code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
Variable: 3 step of visibility
----------------------------------
Global : available accrocess package. must be declear with capital letter.
Package : available inside package. must be declear with small letter outside of function
Block : available inside block/function. must be declear with small letter inside of function
Use of variable case
-----------------------------------
small length (like i, j) variable have shorter lifespam. Longer name (like Employee) mean useing for log time.
Conversion
-----------------------------------
Go will depend on user to covert variable.
Example: var a int8 = 22, var b int32 = 33. a-b need coversion.
a - int8(b)
Constant
------------------------------------
constant operation can be perform
const a = 10; int8 b = 10
a+ b is possible as compiler knows a value
*/
package main
import (
"fmt"
"strconv"
)
// I is Global variable
var I int = 11
func declareVariable() {
/*
Decdeclare of variable
*/
var name string = "ABC"
var price1, price2 float64 = 22.22, 33.44 //declare variable
price3 := 30 // declare variable
price3 = 22 // reassign value
const pi float64 = 3.14 // const variable
fmt.Println(name, price1, price2, price3, pi)
/*
Set of variable
*/
var (
name1 string
age int
)
name1 = "abc"
fmt.Println(name1, age)
}
func conversionVariable() {
/*
int to float float to int
*/
intA := 44
floatA := 20.20
var floatB float64 = float64(intA)
var intB int = int(floatA)
fmt.Println(intA, floatA, intB, floatB)
/*
strint to int and int to string.
if we use string() it will replace num with unicode char.
Use strcov lib for string coversion
*/
var strA string = string(intA)
var strB string = strconv.Itoa(intB)
var strC string = "222"
intB, _ = strconv.Atoi(strC)
fmt.Println(strA, strB, strC, intB)
}
func primitiveVariable() {
/*
bool: default is false.
signed int: int8, int16 ... int64
unsigned int: uint16
*/
var x uint16 = 11
fmt.Println(x)
/*
Shift operator.
*/
var y int32 = 27
fmt.Println(y<<3, y>>3)
}
func constVariable() {
/*
itoa will increase value from 0 to 1 by 1
*/
const (
a = iota
b
c
)
fmt.Println(a, b, c)
}
func myVariable() {
declareVariable()
conversionVariable()
primitiveVariable()
constVariable()
}
/*
func main() {
myVariable()
}
*/ | Practice/2_Variable.go | 0.612889 | 0.409162 | 2_Variable.go | starcoder |
package api
import (
. "github.com/gocircuit/circuit/gocircuit.org/render"
)
func RenderContainerPage() string {
figs := A{
"FigMkDkr": RenderFigurePngSvg("Docker elements are similar to processes.", "mkdkr", "600px"),
}
return RenderHtml("Using containers", Render(containerBody, figs))
}
const containerBody = `
<h2>Using containers</h2>
<p>The container-related types and structures of the circuit API are
in a dedicated package:
<pre>
import "github.com/gocircuit/circuit/client/docker"
</pre>
<p>Container element manipulations and semantics are exactly analogous to
their <a href="api-process.html">process element</a> counterparts.
<p>Given an anchor object, new containers are created using the anchor method:
<pre>
MakeDocker(docker.Run) (docker.Container, error)
</pre>
<p>The <code>docker.Run</code> argument above is analogous to the <code>Cmd</code>
argument in the <code>MakeProc</code> method.
It specifies the execution parameters of the container, and is defined as:
<pre>
type Run struct {
Image string
Memory int64
CpuShares int64
Lxc []string
Volume []string
Dir string
Entry string
Env []string
Path string
Args []string
Scrub bool
}
</pre>
<p>Excluding the field <code>Scrub</code>, all fields exactly match the standard
Docker execution parameters which are explained in Docker's help:
<pre>
docker help run
</pre>
<p>The field <code>Scrub</code> is also analogous to its counterpart in the process execution structure <code>Cmd</code>.
If <code>Scrub</code> is set, the container element will automatically be detached from the anchor and discarded,
as soon as the underlying Docker container exits.
If <code>Scrub</code> is not set, the container element will remain attached to the anchor even after the underlying
Docker container dies.
<p>The methods of the container element interface are otherwise identical in form and meaning as those of the
process element:
<pre>
type Container interface {
Scrub()
Peek() (*docker.Stat, error)
Signal(sig string) error
Wait() (*docker.Stat, error)
Stdin() io.WriteCloser
Stdout() io.ReadCloser
Stderr() io.ReadCloser
}
</pre>
<p>Finally, the <code>docker.Stat</code> structure (not shown here for space considerations)
exactly captures all the container status variables that are available through the <code>docker inspect</code>
command.
<h4>Example</h4>
<p>The following snippet shows an example of creating a Docker container with an Ubuntu image,
which runs the <code>ls</code> command inside, while also specifying some resource limits and
mapping some file system volumes:
<pre>
proc, err := anchor.MakeDocker(
docker.Run{
Image: "ubuntu",
Memory: 1000000000,
CpuShares: 3,
Volume: []string{"/webapp", "/src/webapp:/opt/webapp:ro"},
Dir: "/",
Path: "/bin/ls",
Args: []string{"/"},
Scrub: true,
})
</pre>
{{.FigMkDkr}}
` | gocircuit.org/api/container.go | 0.741674 | 0.461927 | container.go | starcoder |
package gotime
import (
"time"
)
// Equalizer abstracts the Gotime comparison functions
type Equalizer func(a, b time.Time) bool
// DateEquals determines whether the date portion of two Times are equal.
// This function considers two times with the same year and the same day
// of the year to be identical, ignoring time zones. Use DateEqualsTZ if
// you're concerned about time zones.
func DateEquals(a, b time.Time) bool {
return a.Year() == b.Year() && a.YearDay() == b.YearDay()
}
// DateEqualsTZ extends DateEquals by converting both times to UTC.
// After calling UTC() on both times, this function calls DateEquals
// internally and returns the result.
func DateEqualsTZ(a, b time.Time) bool {
a, b = a.UTC(), b.UTC()
return DateEquals(a, b)
}
// TimeEquals determines whether the time portion of two Times are equal.
// This function comapres the hours, minutes, and seconds, ignoring nanoseconds
// and time zones. If those are important to you, use the appropriate function.
func TimeEquals(a, b time.Time) bool {
return a.Hour() == b.Hour() && a.Minute() == b.Minute() && a.Second() == b.Second()
}
// TimeEqualsTZ extends TimeEquals by converting both times to UTC.
// After calling UTC() on both times, this function calls TimeEquals
// internally and returns the result.
func TimeEqualsTZ(a, b time.Time) bool {
a, b = a.UTC(), b.UTC()
return TimeEquals(a, b)
}
// TimeEqualsNS extends TimeEquals by comparing nanoseconds as well.
func TimeEqualsNS(a, b time.Time) bool {
return TimeEquals(a, b) && a.Nanosecond() == b.Nanosecond()
}
// TimeEqualsNSTZ extends TimeEqualsNS by converting both times to UTC.
// After the convertions, this function returns the value of TimeEqualsNS.
func TimeEqualsNSTZ(a, b time.Time) bool {
a, b = a.UTC(), b.UTC()
return TimeEqualsNS(a, b)
}
// SameTime determines whether two times refer to the same time, down to the second.
// This is achieved by comparing the Unix timestamps of each time. For a more accurate
// comparison, consider using time.Time.Equal(), which compares nanoseconds.
func SameTime(a, b time.Time) bool {
return a.Unix() == b.Unix()
} | equality.go | 0.87452 | 0.595934 | equality.go | starcoder |
package ses
import (
"fmt"
"net/http"
)
// Session decorates the user session data with additional fields.
type Session struct {
Data
IsNew bool
IsCookie bool
}
// Data is the user defined session data that is often coupled to a specific application and store.
type Data interface {
// ID returns the session id.
ID() string
// Tok returns the data to be encoded in the token.
// This is usually just the session id but can be any string encoded value.
Tok() string
// User returns the user or account id if authenticated.
User() string
}
// Store provides access to often persisted session data.
type Store interface {
// New creates and returns session data that must at least have a unique id or an error.
New() (Data, error)
// Get returns session data for the given token data or an error.
Get(td string) (Data, error)
// Save persist the given session data or returns an error.
Save(d Data, isnew bool) error
// Delete deletes the session data for the given token data or returns an error.
Delete(td string) error
}
var errNoToken = fmt.Errorf("no session token")
// TokenReader can read a token from a http request.
type TokenReader interface {
ReadToken(*http.Request) string
}
// TokenWriter can write a token to a http response, usually cookies.
// The token should be cleared when called with an empty token string.
type TokenWriter interface {
WriteToken(http.ResponseWriter, string)
}
// TokenCodec and decode and encode a token to the underlying token data.
// Data is used as string in this context, but anything can be encode as string.
// You can wrap github.com/gorilla/securecookie as a simple an proven token codec.
// Or you can check your claims from any kind of bearer token you want to use.
type TokenCodec interface {
DecodeToken(tok string) (td string, err error)
EncodeToken(td string) (tok string, err error)
}
// Config represent a session token configuration with multiple codecs and helpers.
type Config struct {
// TokenReader reads tokens from requests. Some readers do also implement token write.
TokenReader
// Codec is codec list that is tried from first to last both for decoding and encoding.
// This allows seamless key rotations for session encodings. We try older codecs to fail
// gracefully if a newly introduced codec cannot encode the token data.
Codec []TokenCodec
}
// HeaderReader is a token reader that reads a named http request header
type HeaderReader string
func (tr HeaderReader) ReadToken(r *http.Request) string { return r.Header.Get(string(tr)) } | ses/token.go | 0.720762 | 0.490602 | token.go | starcoder |
package core
const jwtAPIHelp = `
# NATS Account Server JWT API HELP
This document describes the various URL paths that encompass the HTTP API for working
with JWTs on the NATS Account Server
## GET /jwt/v1/help
Returns this page.
## GET /jwt/v1/operator
If the server is configured with an operator JWT path, this URL will return the Operator JWT loaded at startup to find the trusted keys.
## GET /jwt/v1/accounts/<pubkey>
Retrieve an account JWT by the public key. The result is either an error or the encoded JWT.
The response contains cache control headers, and uses the JTI as the ETag.
The response has content type application/jwt and may cause a download in a browser.
The JWT is not validated for expiration or revocation. [see check below]
A 304 is returned if the request contains the appropriate If-None-Match header.
A status 404 is returned if the JWT is not found.
Four optional query parameters are supported:
* check - can be set to "true" which will tell the server to return 404 if the JWT is expired
* text - can be set to "true" to change the content type to text/plain
* decode - can be set to "true" to display the JSON for the JWT header and body
* noticy - can be set to "true" to trigger a notification event if NATS is configured
## POST /jwt/v1/accounts/<pubkey> (optional)
Update, or store, an account JWT. The JWT Subject should match the pubkey.
The JWT must be signed by the operator specified in the server's configuration.
A status 400 is returned if there is a problem with the JWT or the server is in read-only mode. In rare
cases a status 500 may be returned if there was an issue saving the JWT.
If the JWT is self signed and the account server is enabled to do so, the JWT may be signed.
Optionally a status of 202 can be returned, signifying that signing happens out of band.
## GET /jwt/v1/activations/<hash>
Retrieve an activation token by its hash.
The hash is calculated by creating a string with jwtIssuer.jwtSubject.<subject> and
constructing the sha-256 hash and base32 encoding that. Where <subject> is the exported
subject, minus any wildcards, so foo.* becomes foo. The one special case is that if the
export starts with "*" or is ">" the <subject> will be set to "_".
Three optional query parameters are supported:
* text - can be set to "true" to change the content type to text/plain
* decode - can be set to "true" to display the JSON for the JWT header and body
* notify - can be set to "true" to trigger a notification event if NATS is configured
The response contains cache control headers, and uses the JTI as the ETag.
A 304 is returned if the request contains the appropriate If-None-Match header.
## POST /jwt/v1/activations
Post a new activation token a JWT.
The body of the POST should be a valid activation token, with an account subject and issuer.
Activation tokens are stored by their hash, so two tokens with the same hash will overwrite each other,
however this should only happen if the accounts and subjects match which requires either the
same export or a matching one.
A status 400 is returned if there is a problem with the JWT or saving it. In rare
cases a status 500 may be returned if there was an issue saving the JWT. Otherwise
a status 200 is returned.
` | server/core/jwthelp.go | 0.914625 | 0.466116 | jwthelp.go | starcoder |
package toml
import (
"fmt"
"strings"
"time"
)
// LocalDate represents a calendar day in no specific timezone.
type LocalDate struct {
Year int
Month int
Day int
}
// AsTime converts d into a specific time instance at midnight in zone.
func (d LocalDate) AsTime(zone *time.Location) time.Time {
return time.Date(d.Year, time.Month(d.Month), d.Day, 0, 0, 0, 0, zone)
}
// String returns RFC 3339 representation of d.
func (d LocalDate) String() string {
return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
}
// MarshalText returns RFC 3339 representation of d.
func (d LocalDate) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText parses b using RFC 3339 to fill d.
func (d *LocalDate) UnmarshalText(b []byte) error {
res, err := parseLocalDate(b)
if err != nil {
return err
}
*d = res
return nil
}
// LocalTime represents a time of day of no specific day in no specific
// timezone.
type LocalTime struct {
Hour int // Hour of the day: [0; 24[
Minute int // Minute of the hour: [0; 60[
Second int // Second of the minute: [0; 60[
Nanosecond int // Nanoseconds within the second: [0, 1000000000[
Precision int // Number of digits to display for Nanosecond.
}
// String returns RFC 3339 representation of d.
// If d.Nanosecond and d.Precision are zero, the time won't have a nanosecond
// component. If d.Nanosecond > 0 but d.Precision = 0, then the minimum number
// of digits for nanoseconds is provided.
func (d LocalTime) String() string {
s := fmt.Sprintf("%02d:%02d:%02d", d.Hour, d.Minute, d.Second)
if d.Precision > 0 {
s += fmt.Sprintf(".%09d", d.Nanosecond)[:d.Precision+1]
} else if d.Nanosecond > 0 {
// Nanoseconds are specified, but precision is not provided. Use the
// minimum.
s += strings.Trim(fmt.Sprintf(".%09d", d.Nanosecond), "0")
}
return s
}
// MarshalText returns RFC 3339 representation of d.
func (d LocalTime) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText parses b using RFC 3339 to fill d.
func (d *LocalTime) UnmarshalText(b []byte) error {
res, left, err := parseLocalTime(b)
if err == nil && len(left) != 0 {
err = newDecodeError(left, "extra characters")
}
if err != nil {
return err
}
*d = res
return nil
}
// LocalDateTime represents a time of a specific day in no specific timezone.
type LocalDateTime struct {
LocalDate
LocalTime
}
// AsTime converts d into a specific time instance in zone.
func (d LocalDateTime) AsTime(zone *time.Location) time.Time {
return time.Date(d.Year, time.Month(d.Month), d.Day, d.Hour, d.Minute, d.Second, d.Nanosecond, zone)
}
// String returns RFC 3339 representation of d.
func (d LocalDateTime) String() string {
return d.LocalDate.String() + "T" + d.LocalTime.String()
}
// MarshalText returns RFC 3339 representation of d.
func (d LocalDateTime) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText parses b using RFC 3339 to fill d.
func (d *LocalDateTime) UnmarshalText(data []byte) error {
res, left, err := parseLocalDateTime(data)
if err == nil && len(left) != 0 {
err = newDecodeError(left, "extra characters")
}
if err != nil {
return err
}
*d = res
return nil
} | vendor/github.com/pelletier/go-toml/v2/localtime.go | 0.884819 | 0.408985 | localtime.go | starcoder |
package types
import (
"io"
"github.com/lyraproj/issue/issue"
"github.com/lyraproj/pcore/px"
)
type TypeReferenceType struct {
typeString string
}
var TypeReferenceMetaType px.ObjectType
func init() {
TypeReferenceMetaType = newObjectType(`Pcore::TypeReference`,
`Pcore::AnyType {
attributes => {
type_string => String[1]
}
}`, func(ctx px.Context, args []px.Value) px.Value {
return newTypeReferenceType2(args...)
})
}
func DefaultTypeReferenceType() *TypeReferenceType {
return typeReferenceTypeDefault
}
func NewTypeReferenceType(typeString string) *TypeReferenceType {
return &TypeReferenceType{typeString}
}
func newTypeReferenceType2(args ...px.Value) *TypeReferenceType {
switch len(args) {
case 0:
return DefaultTypeReferenceType()
case 1:
if str, ok := args[0].(stringValue); ok {
return &TypeReferenceType{string(str)}
}
panic(illegalArgumentType(`TypeReference[]`, 0, `String`, args[0]))
default:
panic(illegalArgumentCount(`TypeReference[]`, `0 - 1`, len(args)))
}
}
func (t *TypeReferenceType) Accept(v px.Visitor, g px.Guard) {
v(t)
}
func (t *TypeReferenceType) Default() px.Type {
return typeReferenceTypeDefault
}
func (t *TypeReferenceType) Equals(o interface{}, g px.Guard) bool {
if ot, ok := o.(*TypeReferenceType); ok {
return t.typeString == ot.typeString
}
return false
}
func (t *TypeReferenceType) Get(key string) (px.Value, bool) {
switch key {
case `type_string`:
return stringValue(t.typeString), true
default:
return nil, false
}
}
func (t *TypeReferenceType) IsAssignable(o px.Type, g px.Guard) bool {
tr, ok := o.(*TypeReferenceType)
return ok && t.typeString == tr.typeString
}
func (t *TypeReferenceType) IsInstance(o px.Value, g px.Guard) bool {
return false
}
func (t *TypeReferenceType) MetaType() px.ObjectType {
return TypeReferenceMetaType
}
func (t *TypeReferenceType) Name() string {
return `TypeReference`
}
func (t *TypeReferenceType) CanSerializeAsString() bool {
return true
}
func (t *TypeReferenceType) SerializationString() string {
return t.String()
}
func (t *TypeReferenceType) String() string {
return px.ToString2(t, None)
}
func (t *TypeReferenceType) Parameters() []px.Value {
if *t == *typeReferenceTypeDefault {
return px.EmptyValues
}
return []px.Value{stringValue(t.typeString)}
}
func (t *TypeReferenceType) Resolve(c px.Context) px.Type {
r := c.ParseType(t.typeString)
if rt, ok := r.(px.ResolvableType); ok {
if tr, ok := rt.(*TypeReferenceType); ok && t.typeString == tr.typeString {
panic(px.Error(px.UnresolvedType, issue.H{`typeString`: t.typeString}))
}
r = rt.Resolve(c)
}
return r
}
func (t *TypeReferenceType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) {
TypeToString(t, b, s, g)
}
func (t *TypeReferenceType) PType() px.Type {
return &TypeType{t}
}
func (t *TypeReferenceType) TypeString() string {
return t.typeString
}
var typeReferenceTypeDefault = &TypeReferenceType{`UnresolvedReference`} | types/typereferencetype.go | 0.615088 | 0.44071 | typereferencetype.go | starcoder |
package opt
// --- Bool --------------------------------------------------------------------
// Bool is an optional type that wraps a bool.
type Bool struct {
isSet bool
val bool
}
// MakeBool creates a new Bool with the specified value.
func MakeBool(v bool) Bool {
return Bool{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Bool) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Bool) Set(v bool) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Bool) Get() bool {
return p.val
}
// --- Byte --------------------------------------------------------------------
// Byte is an optional type that wraps a byte.
type Byte struct {
isSet bool
val byte
}
// MakeByte creates a new Byte with the specified value.
func MakeByte(v byte) Byte {
return Byte{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Byte) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Byte) Set(v byte) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Byte) Get() byte {
return p.val
}
// --- Complex64 ---------------------------------------------------------------
// Complex64 is an optional type that wraps a complex64.
type Complex64 struct {
isSet bool
val complex64
}
// MakeComplex64 creates a new Complex64 with the specified value.
func MakeComplex64(v complex64) Complex64 {
return Complex64{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Complex64) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Complex64) Set(v complex64) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Complex64) Get() complex64 {
return p.val
}
// --- Complex128 --------------------------------------------------------------
// Complex128 is an optional type that wraps a complex128.
type Complex128 struct {
isSet bool
val complex128
}
// MakeComplex128 creates a new Complex128 with the specified value.
func MakeComplex128(v complex128) Complex128 {
return Complex128{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Complex128) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Complex128) Set(v complex128) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Complex128) Get() complex128 {
return p.val
}
// --- Float32 -----------------------------------------------------------------
// Float32 is an optional type that wraps a float32.
type Float32 struct {
isSet bool
val float32
}
// MakeFloat32 creates a new Float32 with the specified value.
func MakeFloat32(v float32) Float32 {
return Float32{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Float32) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Float32) Set(v float32) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Float32) Get() float32 {
return p.val
}
// --- Float64 -----------------------------------------------------------------
// Float64 is an optional type that wraps a float64.
type Float64 struct {
isSet bool
val float64
}
// MakeFloat64 creates a new Float64 with the specified value.
func MakeFloat64(v float64) Float64 {
return Float64{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Float64) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Float64) Set(v float64) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Float64) Get() float64 {
return p.val
}
// --- Int ---------------------------------------------------------------------
// Int is an optional type that wraps a int.
type Int struct {
isSet bool
val int
}
// MakeInt creates a new Int with the specified value.
func MakeInt(v int) Int {
return Int{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Int) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Int) Set(v int) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Int) Get() int {
return p.val
}
// --- Int8 --------------------------------------------------------------------
// Int8 is an optional type that wraps a int8.
type Int8 struct {
isSet bool
val int8
}
// MakeInt8 creates a new Int8 with the specified value.
func MakeInt8(v int8) Int8 {
return Int8{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Int8) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Int8) Set(v int8) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Int8) Get() int8 {
return p.val
}
// --- Int16 -------------------------------------------------------------------
// Int16 is an optional type that wraps a int16.
type Int16 struct {
isSet bool
val int16
}
// MakeInt16 creates a new Int16 with the specified value.
func MakeInt16(v int16) Int16 {
return Int16{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Int16) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Int16) Set(v int16) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Int16) Get() int16 {
return p.val
}
// --- Int32 -------------------------------------------------------------------
// Int32 is an optional type that wraps a int32.
type Int32 struct {
isSet bool
val int32
}
// MakeInt32 creates a new Int32 with the specified value.
func MakeInt32(v int32) Int32 {
return Int32{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Int32) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Int32) Set(v int32) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Int32) Get() int32 {
return p.val
}
// --- Int64 -------------------------------------------------------------------
// Int64 is an optional type that wraps a int64.
type Int64 struct {
isSet bool
val int64
}
// MakeInt64 creates a new Int64 with the specified value.
func MakeInt64(v int64) Int64 {
return Int64{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Int64) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Int64) Set(v int64) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Int64) Get() int64 {
return p.val
}
// --- Rune --------------------------------------------------------------------
// Rune is an optional type that wraps a rune.
type Rune struct {
isSet bool
val rune
}
// MakeRune creates a new Rune with the specified value.
func MakeRune(v rune) Rune {
return Rune{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Rune) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Rune) Set(v rune) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Rune) Get() rune {
return p.val
}
// --- String ------------------------------------------------------------------
// String is an optional type that wraps a string.
type String struct {
isSet bool
val string
}
// MakeString creates a new String with the specified value.
func MakeString(v string) String {
return String{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p String) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *String) Set(v string) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p String) Get() string {
return p.val
}
// --- Uint --------------------------------------------------------------------
// Uint is an optional type that wraps a uint.
type Uint struct {
isSet bool
val uint
}
// MakeUint creates a new Uint with the specified value.
func MakeUint(v uint) Uint {
return Uint{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Uint) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Uint) Set(v uint) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Uint) Get() uint {
return p.val
}
// --- Uint8 -------------------------------------------------------------------
// Uint8 is an optional type that wraps a uint8.
type Uint8 struct {
isSet bool
val uint8
}
// MakeUint8 creates a new Uint8 with the specified value.
func MakeUint8(v uint8) Uint8 {
return Uint8{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Uint8) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Uint8) Set(v uint8) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Uint8) Get() uint8 {
return p.val
}
// --- Uint16 ------------------------------------------------------------------
// Uint16 is an optional type that wraps a uint16.
type Uint16 struct {
isSet bool
val uint16
}
// MakeUint16 creates a new Uint16 with the specified value.
func MakeUint16(v uint16) Uint16 {
return Uint16{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Uint16) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Uint16) Set(v uint16) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Uint16) Get() uint16 {
return p.val
}
// --- Uint32 ------------------------------------------------------------------
// Uint32 is an optional type that wraps a uint32.
type Uint32 struct {
isSet bool
val uint32
}
// MakeUint32 creates a new Uint32 with the specified value.
func MakeUint32(v uint32) Uint32 {
return Uint32{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Uint32) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Uint32) Set(v uint32) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Uint32) Get() uint32 {
return p.val
}
// --- Uint64 ------------------------------------------------------------------
// Uint64 is an optional type that wraps a uint64.
type Uint64 struct {
isSet bool
val uint64
}
// MakeUint64 creates a new Uint64 with the specified value.
func MakeUint64(v uint64) Uint64 {
return Uint64{isSet: true, val: v}
}
// IsSet returns a value indicating whether the optional type's value is set.
func (p Uint64) IsSet() bool {
return p.isSet
}
// Set sets the optional type's value.
func (p *Uint64) Set(v uint64) {
p.val = v
p.isSet = true
}
// Get returns the underlying value wrapped by the optional type. If IsSet returns
// false, then Get's return value will be the zero value for the underlying type.
func (p Uint64) Get() uint64 {
return p.val
} | opt_gen.go | 0.936241 | 0.636353 | opt_gen.go | starcoder |
package polygon
import (
"github.com/adamcolton/geom/d2"
"github.com/adamcolton/geom/d2/curve/line"
"github.com/adamcolton/geom/d2/shape/triangle"
)
// ConcavePolygon represents a Polygon with at least one concave angle.
type ConcavePolygon struct {
concave Polygon
regular Polygon
triangles [][2]*triangle.Triangle
}
// GetTriangles takes triangle indexes from FindTriangles and returns a slice
// of triangles. This can be used to map one polygon to another with the same
// number of sides.
func GetTriangles(triangles [][3]uint32, p Polygon) []*triangle.Triangle {
ts := make([]*triangle.Triangle, len(triangles))
for i, t := range triangles {
ts[i] = &triangle.Triangle{p[t[0]], p[t[1]], p[t[2]]}
}
return ts
}
// NewConcavePolygon converts a Polygon to a ConcavePolygon
func NewConcavePolygon(concave Polygon) ConcavePolygon {
regular := RegularPolygonRadius(d2.Pt{}, 1, 0, len(concave))
tIdxs := concave.FindTriangles()
cts := GetTriangles(tIdxs, concave)
rts := GetTriangles(tIdxs, regular)
ts := make([][2]*triangle.Triangle, len(tIdxs))
for i := range cts {
ts[i][0] = rts[i]
ts[i][1] = cts[i]
}
return ConcavePolygon{
concave: concave,
regular: regular,
triangles: ts,
}
}
// Pt2 returns a point in the ConcavePolygon adhereing to the shape rules
func (c ConcavePolygon) Pt2(t0, t1 float64) d2.Pt {
pt := c.regular.Pt2(t0, t1)
for _, ts := range c.triangles {
if !ts[0].Contains(pt) {
continue
}
tfrm, _ := triangle.Transform(ts[0], ts[1])
return tfrm.Pt(pt)
}
// point is on perimeter
for _, ts := range c.triangles {
if line.New(ts[0][0], ts[0][1]).Closest(pt).Distance(pt) < 1E-5 ||
line.New(ts[0][1], ts[0][2]).Closest(pt).Distance(pt) < 1E-5 ||
line.New(ts[0][2], ts[0][0]).Closest(pt).Distance(pt) < 1E-5 {
tfrm, _ := triangle.Transform(ts[0], ts[1])
return tfrm.Pt(pt)
}
}
return d2.Pt{}
}
// Pt1 returns a point on the perimeter
func (c ConcavePolygon) Pt1(t0 float64) d2.Pt { return c.concave.Pt1(t0) }
// Area of the polygon
func (c ConcavePolygon) Area() float64 { return c.concave.Area() }
// SignedArea returns the Area and may be negative depending on the polarity.
func (c ConcavePolygon) SignedArea() float64 { return c.concave.SignedArea() }
// Perimeter returns the total length of the perimeter
func (c ConcavePolygon) Perimeter() float64 { return c.concave.Perimeter() }
// Contains returns true of the point f is inside of the polygon
func (c ConcavePolygon) Contains(f d2.Pt) bool { return c.concave.Contains(f) }
// Centroid returns the center of mass of the polygon
func (c ConcavePolygon) Centroid() d2.Pt { return c.concave.Centroid() } | d2/shape/polygon/concave.go | 0.892761 | 0.775095 | concave.go | starcoder |
package middleware
import (
"errors"
"fmt"
"reflect"
)
// ClearFieldByType clears all fields and nested fields in an object
// that have a specified type.
func ClearFieldByType(obj interface{}, t reflect.Type) error {
value := reflect.ValueOf(obj)
if value.Kind() != reflect.Ptr {
return fmt.Errorf("non-pointer %v", value.Type())
}
clearValueFieldByType(value, t)
return nil
}
func clearValueFieldByType(value reflect.Value, t reflect.Type) {
switch value.Kind() {
case reflect.Ptr:
clearValueFieldByType(value.Elem(), t)
case reflect.Interface:
if value.Elem().IsValid() && value.Elem().Type() == t {
value.Set(reflect.Zero(t))
} else {
clearValueFieldByType(value.Elem(), t)
}
case reflect.Struct:
if value.Type() == t && value.IsValid() && value.CanSet() {
value.Set(reflect.Zero(t))
} else {
for i := 0; i < value.NumField(); i++ {
field := value.Field(i)
if field.Type() == t && field.IsValid() && field.CanSet() {
field.Set(reflect.Zero(t))
} else {
clearValueFieldByType(field, t)
}
}
}
case reflect.Slice:
sliceCopy := reflect.New(value.Type()).Elem()
sliceCopy.Set(reflect.MakeSlice(value.Type(), 0, 0))
for i := 0; i < value.Len(); i++ {
sliceValue := value.Index(i)
if sliceValue.Type() == t || (sliceValue.Kind() == reflect.Interface && sliceValue.Elem().Type() == t) {
continue
}
var sliceValueCopy reflect.Value
if sliceValue.Kind() == reflect.Interface || sliceValue.Kind() == reflect.Ptr {
sliceValueCopy = reflect.New(sliceValue.Elem().Type())
sliceValueCopy.Elem().Set(sliceValue.Elem())
} else {
sliceValueCopy = reflect.New(sliceValue.Type())
sliceValueCopy.Elem().Set(reflect.Indirect(sliceValue))
}
clearValueFieldByType(sliceValueCopy, t)
if sliceValue.Type().Kind() == reflect.Ptr {
sliceCopy.Set(reflect.Append(sliceCopy, sliceValueCopy))
} else {
sliceCopy.Set(reflect.Append(sliceCopy, sliceValueCopy.Elem()))
}
}
value.Set(sliceCopy)
case reflect.Map:
for _, key := range value.MapKeys() {
mapValue := value.MapIndex(key)
if mapValue.Type() == t {
mapValue.Set(reflect.Zero(t))
} else {
var copyValue reflect.Value
if mapValue.Kind() == reflect.Interface || mapValue.Kind() == reflect.Ptr {
copyValue = reflect.New(mapValue.Elem().Type())
copyValue.Elem().Set(mapValue.Elem())
} else {
copyValue = reflect.New(mapValue.Type())
copyValue.Elem().Set(mapValue.Elem())
}
clearValueFieldByType(copyValue, t)
mapValue = copyValue.Elem()
}
value.SetMapIndex(key, mapValue)
}
default:
if value.IsValid() && value.CanSet() && value.Type() == t {
value.Set(reflect.Zero(t))
}
}
}
// ClearFieldByName clears all fields and nested fields in an object
// that have a specified name.
func ClearFieldByName(obj interface{}, fieldName string) error {
if fieldName == "" {
return errors.New("field name is required")
}
value := reflect.ValueOf(obj)
if value.Kind() != reflect.Ptr {
return fmt.Errorf("non-pointer %v", value.Type())
}
clearValueFieldByName(value, fieldName)
return nil
}
func clearValueFieldByName(value reflect.Value, fieldName string) {
switch value.Kind() {
case reflect.Ptr:
clearValueFieldByName(reflect.Indirect(value), fieldName)
case reflect.Interface:
clearValueFieldByName(value.Elem(), fieldName)
case reflect.Struct:
for i := 0; i < value.NumField(); i++ {
field := value.Field(i)
fieldType := value.Type().Field(i)
if fieldType.Name == fieldName && field.IsValid() && field.CanSet() {
field.Set(reflect.Zero(fieldType.Type))
} else {
clearValueFieldByName(field, fieldName)
}
}
case reflect.Slice:
sliceCopy := reflect.New(value.Type()).Elem()
sliceCopy.Set(reflect.MakeSlice(value.Type(), 0, 0))
for i := 0; i < value.Len(); i++ {
sliceValue := value.Index(i)
var sliceValueCopy reflect.Value
if sliceValue.Kind() == reflect.Interface || sliceValue.Kind() == reflect.Ptr {
sliceValueCopy = reflect.New(sliceValue.Elem().Type())
sliceValueCopy.Elem().Set(sliceValue.Elem())
} else {
sliceValueCopy = reflect.New(sliceValue.Type())
sliceValueCopy.Elem().Set(reflect.Indirect(sliceValue))
}
clearValueFieldByName(sliceValueCopy, fieldName)
if sliceValue.Type().Kind() == reflect.Ptr {
sliceCopy.Set(reflect.Append(sliceCopy, sliceValueCopy))
} else {
sliceCopy.Set(reflect.Append(sliceCopy, sliceValueCopy.Elem()))
}
}
value.Set(sliceCopy)
case reflect.Map:
mapCopy := reflect.New(value.Type()).Elem()
mapCopy.Set(reflect.MakeMap(value.Type()))
for _, key := range value.MapKeys() {
originalVal := value.MapIndex(key)
originalValType := originalVal.Type()
copyVal := reflect.New(originalValType).Elem()
if key.String() == fieldName {
if originalVal.Elem().IsValid() {
copyVal.Set(reflect.Zero(originalVal.Elem().Type()))
}
} else if originalValType.Kind() != reflect.Interface || originalVal.Elem().IsValid() {
if originalVal.Kind() == reflect.Interface {
originalVal = originalVal.Elem()
copyVal = reflect.New(originalVal.Type()).Elem()
}
deepCopy(originalVal, copyVal)
clearValueFieldByName(copyVal, fieldName)
}
mapCopy.SetMapIndex(key, copyVal)
}
value.Set(mapCopy)
}
}
func deepCopy(original, copy reflect.Value) {
switch original.Kind() {
case reflect.Ptr:
originalVal := original.Elem()
if !originalVal.IsValid() {
return
}
copy.Set(reflect.New(originalVal.Type()))
deepCopy(originalVal, copy.Elem())
case reflect.Interface:
if !original.IsNil() {
originalVal := original.Elem()
copyVal := reflect.New(originalVal.Type()).Elem()
deepCopy(originalVal, copyVal)
copy.Set(copyVal)
}
case reflect.Struct:
for i := 0; i < original.NumField(); i++ {
deepCopy(original.Field(i), copy.Field(i))
}
case reflect.Slice:
copy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap()))
for i := 0; i < original.Len(); i++ {
deepCopy(original.Index(i), copy.Index(i))
}
case reflect.Map:
copy.Set(reflect.MakeMap(original.Type()))
for _, key := range original.MapKeys() {
originalVal := original.MapIndex(key)
copyVal := reflect.New(originalVal.Type()).Elem()
deepCopy(originalVal, copyVal)
copy.SetMapIndex(key, copyVal)
}
default:
copy.Set(original)
}
} | middleware/reflect.go | 0.674265 | 0.501709 | reflect.go | starcoder |
package solve
import (
"sync"
gs "github.com/deanveloper/gridspech-go"
)
// SolveGoals will return a channel of solutions for all the goal tiles in g
func (g GridSolver) SolveGoals() <-chan gs.TileSet {
iter := make(chan gs.TileSet, 4)
go func() {
defer close(iter)
g.solveGoals(iter)
}()
return iter
}
func (g GridSolver) solveGoals(ch chan<- gs.TileSet) {
goalTiles := g.Grid.TilesWith(func(o gs.Tile) bool {
return o.Data.Type == gs.TypeGoal
}).Slice()
if len(goalTiles) == 0 {
ch <- gs.NewTileSet()
return
}
goalTileCoords := make([]gs.TileCoord, len(goalTiles))
for i := range goalTiles {
goalTileCoords[i] = goalTiles[i].Coord
}
var pairsToSolutionMx sync.Mutex
pairsToSolutions := make(map[[2]gs.TileCoord][]gs.TileSet)
var wg sync.WaitGroup
for i1 := 0; i1 < len(goalTiles)-1; i1++ {
for i2 := i1 + 1; i2 < len(goalTiles); i2++ {
goalPairCoords := [2]gs.TileCoord{goalTiles[i1].Coord, goalTiles[i2].Coord}
wg.Add(1)
go func() {
for c := 0; c < g.Grid.MaxColors; c++ {
for path := range g.PathsIter(goalPairCoords[0], goalPairCoords[1], gs.TileColor(c)) {
pairsToSolutionMx.Lock()
for decorated := range decorateSetBorder(g, gs.TileColor(c), path) {
pairsToSolutions[goalPairCoords] = append(pairsToSolutions[goalPairCoords], decorated)
}
pairsToSolutionMx.Unlock()
}
}
wg.Done()
}()
}
}
wg.Wait()
// now we get solutions for each pairing
allGoalPairings := allTilePairingSets(goalTileCoords)
for _, pairing := range allGoalPairings {
pairingSolutions := pairsToSolutions[pairing[0]]
for pairIndex := 1; pairIndex < len(pairing); pairIndex++ {
pair := pairing[pairIndex]
var tilesToValidate []gs.TileCoord
for i := 0; i <= pairIndex; i++ {
tilesToValidate = append(tilesToValidate, pairing[i][0], pairing[i][1])
}
result := mergeSolutionsSlices(pairingSolutions, pairsToSolutions[pair])
result = removeIfNonUnique(result)
result = removeIfInvalid(g, tilesToValidate, result)
pairingSolutions = result
}
for _, solution := range pairingSolutions {
ch <- solution
}
}
}
func allTilePairingSets(tiles []gs.TileCoord) [][][2]gs.TileCoord {
pairingSets := AllPairingSets(len(tiles))
tilePairingSets := make([][][2]gs.TileCoord, len(pairingSets))
for i, pairing := range pairingSets {
tilePairings := make([][2]gs.TileCoord, len(pairing))
for p, pair := range pairing {
tilePairings[p] = [2]gs.TileCoord{tiles[pair[0]], tiles[pair[1]]}
}
tilePairingSets[i] = tilePairings
}
return tilePairingSets
} | solve/goals.go | 0.548432 | 0.426799 | goals.go | starcoder |
package spt
// http://iquilezles.org/www/articles/distfunctions2d/distfunctions2d.htm
import (
"encoding/gob"
"math"
)
func init() {
gob.Register(SDFCircle{})
gob.Register(SDFRectangle{})
gob.Register(SDFTriangle{})
gob.Register(SDFPolygon{})
gob.Register(SDFStadium{})
gob.Register(SDFParabola{})
gob.Register(SDFHexagram{})
}
type SDF2 interface {
SDF() func(Vec2) float64
Circle() (Vec2, float64)
}
type SDFCircle struct {
Radius float64
}
func (s SDFCircle) SDF() func(Vec2) float64 {
return func(pos Vec2) float64 {
return len2(pos) - s.Radius
}
}
func (s SDFCircle) Circle() (Vec2, float64) {
return Zero2, s.Radius
}
func Circle(radius float64) SDF2 {
return SDFCircle{radius}
}
type SDFRectangle struct {
X, Y float64
}
func (s SDFRectangle) SDF() func(Vec2) float64 {
return func(pos Vec2) float64 {
d := sub2(abs2(pos), V2(s.X, s.Y))
return len2(max2(d, Zero2)) + min(max(d.X, d.Y), 0.0)
}
}
func (s SDFRectangle) Circle() (Vec2, float64) {
return Zero2, sqrt(s.X*s.X + s.Y*s.Y)
}
func Rectangle(x, y float64) SDF2 {
return SDFRectangle{x / 2, y / 2}
}
type SDFTriangle struct {
P0, P1, P2 Vec2
}
func (s SDFTriangle) SDF() func(Vec2) float64 {
return func(pos Vec2) float64 {
e0 := sub2(s.P1, s.P0)
e1 := sub2(s.P2, s.P1)
e2 := sub2(s.P0, s.P2)
v0 := sub2(pos, s.P0)
v1 := sub2(pos, s.P1)
v2 := sub2(pos, s.P2)
pq0 := sub2(v0, scale2(e0, clamp(dot2(v0, e0)/dot2(e0, e0), 0.0, 1.0)))
pq1 := sub2(v1, scale2(e1, clamp(dot2(v1, e1)/dot2(e1, e1), 0.0, 1.0)))
pq2 := sub2(v2, scale2(e2, clamp(dot2(v2, e2)/dot2(e2, e2), 0.0, 1.0)))
s := sign(e0.X*e2.Y - e0.Y*e2.X)
d := min2(min2(
V2(dot2(pq0, pq0), s*(v0.X*e0.Y-v0.Y*e0.X)),
V2(dot2(pq1, pq1), s*(v1.X*e1.Y-v1.Y*e1.X))),
V2(dot2(pq2, pq2), s*(v2.X*e2.Y-v2.Y*e2.X)))
return -sqrt(d.X) * sign(d.Y)
}
}
func (s SDFTriangle) Circle() (Vec2, float64) {
return Zero2, max(max(len2(s.P0), len2(s.P1)), len2(s.P2))
}
func Triangle(p0, p1, p2 Vec2) SDF2 {
return SDFTriangle{p0, p1, p2}
}
type SDFPolygon struct {
N int
R float64
}
func (s SDFPolygon) SDF() func(Vec2) float64 {
return func(p Vec2) float64 {
pi := math.Pi
n := float64(s.N) / 2.0
o := pi / 2.0 / n
a := math.Atan(p.Y / p.X)
a = tif(p.X < 0, a+pi, a)
t := math.Round(a/pi*n) / n * pi
d := math.Round((a+o)/pi*n)/n*pi - o
f := V2(math.Cos(t), math.Sin(t))
if abs(dot2(V2(p.X, -p.Y), V2(f.Y, f.X))) < math.Sin(o)*s.R {
return dot2(p, f) - math.Cos(o)*s.R
}
return len2(sub2(p, scale2(V2(math.Cos(d), math.Sin(d)), s.R)))
}
}
func (s SDFPolygon) Circle() (Vec2, float64) {
return Zero2, s.R * 2
}
func Polygon(n int, r float64) SDF2 {
return SDFPolygon{n, r}
}
type SDFStadium struct {
H, R1, R2 float64
}
func (s SDFStadium) SDF() func(Vec2) float64 {
return func(p Vec2) float64 {
p.X = abs(p.X)
b := (s.R1 - s.R2) / s.H
a := sqrt(1.0 - b*b)
k := dot2(p, V2(-b, a))
if k < 0.0 {
return len2(p) - s.R1
}
if k > a*s.H {
return len2(sub2(p, V2(0.0, s.H))) - s.R2
}
return dot2(p, V2(a, b)) - s.R1
}
}
func (s SDFStadium) Circle() (Vec2, float64) {
return Zero2, s.H + s.R1 + s.R2
}
func Stadium(h, r1, r2 float64) SDF2 {
return SDFStadium{h, r1, r2}
}
type SDFParabola struct {
M, H float64
}
func (s SDFParabola) SDF() func(Vec2) float64 {
return func(pos Vec2) float64 {
pos.X = abs(pos.X)
m := s.M
// capped at height
if pos.Y > s.H {
l := sqrt(s.H / m)
a := Vec2{-l, s.H}
b := Vec2{l, s.H}
pa := sub2(pos, a)
ba := sub2(b, a)
h := clamp(dot2(pa, ba)/dot2(ba, ba), 0.0, 1.0)
return len2(sub2(pa, scale2(ba, h)))
}
p := (2.0*m*pos.Y - 1.0) / (6.0 * m * m)
q := abs(pos.X) / (4.0 * m * m)
h := q*q - p*p*p
r := sqrt(abs(h))
var x float64
if h > 0 {
x = pow(q+r, 1.0/3.0) - pow(abs(q-r), 1.0/3.0)*sign(r-q)
} else {
x = 2.0 * math.Cos(math.Atan2(r, q)/3.0) * sqrt(p)
}
y := m * x * x
return len2(sub2(pos, Vec2{x, y})) * sign(pos.X-x)
}
}
func (s SDFParabola) Circle() (Vec2, float64) {
x := sqrt(s.H / s.M)
r := sqrt(x*x + s.H*s.H)
return Zero2, r
}
// width on x-axis at height on y-axis
func Parabola(w, h float64) SDF2 {
w = w / 2
return SDFParabola{h / (w * w), h}
}
type SDFHexagram struct {
R float64
}
func (s SDFHexagram) SDF() func(Vec2) float64 {
kx := -0.5
ky := 0.8660254038
kz := 0.5773502692
kw := 1.7320508076
return func(p Vec2) float64 {
p = abs2(p)
p = sub2(p, scale2(scale2(V2(kx, ky), min(dot2(V2(kx, ky), p), 0.0)), 2.0))
p = sub2(p, scale2(scale2(V2(ky, kx), min(dot2(V2(ky, kx), p), 0.0)), 2.0))
p = sub2(p, V2(clamp(p.X, s.R*kz, s.R*kw), s.R))
return len2(p) * sign(p.Y)
}
}
func (s SDFHexagram) Circle() (Vec2, float64) {
return Zero2, s.R * 2
}
func Hexagram(r float64) SDF2 {
return SDFHexagram{r}
} | sdf2.go | 0.638497 | 0.452596 | sdf2.go | starcoder |
package ts
import (
"math"
"time"
"github.com/m3db/m3/src/query/graphite/stats"
)
// A Datapoint is a single data value reported at a given time
type Datapoint struct {
Timestamp time.Time
Value float64
}
// ValueIsNaN returns true iff underlying value is NaN
func (d Datapoint) ValueIsNaN() bool { return math.IsNaN(d.Value) }
// DatapointsByTimestamp is a sortable interface for datapoints
type DatapointsByTimestamp []Datapoint
// Len is the length of the datapoints
func (p DatapointsByTimestamp) Len() int { return len(p) }
// Less compares two datapoints by timestamp
func (p DatapointsByTimestamp) Less(i, j int) bool { return p[i].Timestamp.Before(p[j].Timestamp) }
// Swap swaps two datapoints
func (p DatapointsByTimestamp) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// ConsolidatedValue represents a time window of consolidated data
type ConsolidatedValue struct {
// StartTime is the start time of the time window covered by this
// consolidation
StartTime time.Time
// EndTime is the end time of the time window covered by this
// consolidation
EndTime time.Time
// Values is the statistics for that consolidated time window
Values stats.Statistics
}
// ConsolidatedValuesByStartTime is a sortable interface for consolidated values
type ConsolidatedValuesByStartTime []ConsolidatedValue
// Len is the length of the values
func (p ConsolidatedValuesByStartTime) Len() int { return len(p) }
// Less compares two values by start time
func (p ConsolidatedValuesByStartTime) Less(i, j int) bool {
return p[i].StartTime.Before(p[j].StartTime)
}
// Swap swaps two values
func (p ConsolidatedValuesByStartTime) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Datapoints is a list of datapoints that implement the stats.Values interface.
type Datapoints []Datapoint
// Len is the length of the array.
func (d Datapoints) Len() int { return len(d) }
// ValueAt returns the value at the nth element.
func (d Datapoints) ValueAt(n int) float64 { return d[n].Value }
// AllNaN returns true if all the values are NaN
func (d Datapoints) AllNaN() bool {
for _, dp := range d {
if !dp.ValueIsNaN() {
return false
}
}
return true
} | src/query/graphite/ts/datapoint.go | 0.751739 | 0.427755 | datapoint.go | starcoder |
package md3
type Frame struct {
name string
min Vec3
max Vec3
origin Vec3
radius float32
}
func (f *Frame) Name() string {
return f.name
}
func (f *Frame) Min() Vec3 {
return f.min
}
func (f *Frame) Max() Vec3 {
return f.max
}
func (f *Frame) Origin() Vec3 {
return f.origin
}
func (f *Frame) Radius() float32 {
return f.radius
}
type TagFrame struct {
Origin Vec3
XOrientation Vec3
YOrientation Vec3
ZOrientation Vec3
}
type Tag struct {
name string
frames []TagFrame
}
func (t *Tag) Name() string {
return t.name
}
func (t *Tag) NumFrames() int {
return len(t.frames)
}
func (t *Tag) Frame(frame int) TagFrame {
return t.frames[frame]
}
func (t *Tag) Frames() <-chan TagFrame {
output := make(chan TagFrame)
go func(t *Tag, output chan<- TagFrame) {
for _, frame := range t.frames {
output <- frame
}
close(output)
}(t, output)
return output
}
type Triangle struct {
A, B, C int32
}
type Vertex struct {
Origin Vec3
Normal Vec3
}
type TexCoord struct {
S, T float32
}
type Shader struct {
Name string
Index int32
}
type Surface struct {
name string
numFrames int
shaders []Shader
triangles []Triangle
texcoords []TexCoord
vertices [][]Vertex
}
func (s *Surface) Name() string {
return s.name
}
// NumFrames returns the number of frames of vertex data held by the surface.
// This should be equal to its parent model's NumFrames result.
func (s *Surface) NumFrames() int {
return s.numFrames
}
func (s *Surface) NumTriangles() int {
return len(s.triangles)
}
func (s *Surface) NumVertices() int {
return len(s.texcoords)
}
func (s *Surface) NumShaders() int {
return len(s.shaders)
}
func (s *Surface) Triangle(index int) Triangle {
return s.triangles[index]
}
func (s *Surface) Triangles() <-chan Triangle {
output := make(chan Triangle)
go func(s *Surface, output chan<- Triangle) {
for _, tri := range s.triangles {
output <- tri
}
close(output)
}(s, output)
return output
}
func (s *Surface) Vertex(frame, index int) Vertex {
return s.vertices[frame][index]
}
func (s *Surface) Vertices(frame int) <-chan Vertex {
output := make(chan Vertex)
go func(s *Surface, output chan<- Vertex) {
for _, vert := range s.vertices[frame] {
output <- vert
}
close(output)
}(s, output)
return output
}
func (s *Surface) TexCoord(index int) TexCoord {
return s.texcoords[index]
}
func (s *Surface) TexCoords() <-chan TexCoord {
output := make(chan TexCoord)
go func(s *Surface, output chan<- TexCoord) {
for _, texcoord := range s.texcoords {
output <- texcoord
}
close(output)
}(s, output)
return output
}
func (s *Surface) Shader(index int) Shader {
return s.shaders[index]
}
func (s *Surface) Shaders() <-chan Shader {
output := make(chan Shader)
go func(s *Surface, output chan<- Shader) {
for _, shader := range s.shaders {
output <- shader
}
close(output)
}(s, output)
return output
}
type Model struct {
name string
frames []*Frame
tags []*Tag
surfaces []*Surface
}
func (m *Model) Name() string {
return m.name
}
func (m *Model) NumSurfaces() int {
return len(m.surfaces)
}
func (m *Model) NumFrames() int {
return len(m.frames)
}
func (m *Model) NumTags() int {
return len(m.tags)
}
func (m *Model) Surface(index int) *Surface {
return m.surfaces[index]
}
func (m *Model) Frame(index int) *Frame {
return m.frames[index]
}
func (m *Model) Tag(index int) *Tag {
return m.tags[index]
}
func (m *Model) Surfaces() <-chan *Surface {
output := make(chan *Surface)
go func(m *Model, output chan<- *Surface) {
for _, surface := range m.surfaces {
output <- surface
}
close(output)
}(m, output)
return output
}
func (m *Model) Frames() <-chan *Frame {
output := make(chan *Frame)
go func(m *Model, output chan<- *Frame) {
for _, frame := range m.frames {
output <- frame
}
close(output)
}(m, output)
return output
}
func (m *Model) Tags() <-chan *Tag {
output := make(chan *Tag)
go func(m *Model, output chan<- *Tag) {
for _, tag := range m.tags {
output <- tag
}
close(output)
}(m, output)
return output
} | md3/model.go | 0.78789 | 0.419053 | model.go | starcoder |
package detest
import (
"fmt"
"reflect"
"sort"
)
// MapComparer implements comparison of map values.
type MapComparer struct {
with func(*MapTester)
}
// Map takes a function which will be called to do further comparisons of the
// map's contents.
func (d *D) Map(with func(*MapTester)) MapComparer {
return MapComparer{with}
}
// MapTester is the struct that will be passed to the test function passed to
// detest.Map. This struct implements the map-specific testing methods such as
// Idx() and AllValues().
type MapTester struct {
d *D
ending CollectionEnding
seen map[interface{}]bool
}
// Compare compares the map value in d.Actual() by calling the function passed
// to `Map()`, which is in turn expected to further tests of the map's
// content.
func (mc MapComparer) Compare(d *D) {
v := reflect.ValueOf(d.Actual())
d.PushPath(d.NewPath(describeTypeOfReflectValue(v), 1, "detest.(*D).Map"))
defer d.PopPath()
if v.Kind() != reflect.Map {
d.AddResult(result{
actual: newValue(d.Actual()),
pass: false,
where: inDataStructure,
op: "[]",
description: fmt.Sprintf(
"Called detest.Map() but the value being tested isn't a map, it's %s",
articleize(describeTypeOfReflectValue(v)),
),
})
return
}
mt := &MapTester{d: d, seen: map[interface{}]bool{}}
defer mt.enforceEnding()
mc.with(mt)
}
// Key takes a key and an expected value for that key. If the key does not
// exist, this is considered a failure.
func (mt *MapTester) Key(key interface{}, expect interface{}) {
v := reflect.ValueOf(mt.d.Actual())
mt.d.PushPath(mt.d.NewPath(fmt.Sprintf("[%v]", key), 0, ""))
defer mt.d.PopPath()
kv := reflect.ValueOf(key)
if !kv.IsValid() || kv.Type() != v.Type().Key() {
mt.d.AddResult(result{
actual: newValue(mt.d.Actual()),
pass: false,
where: inDataStructure,
op: fmt.Sprintf("[%v]", key),
description: fmt.Sprintf(
"Attempted to look up a map using a key that is %s but this map uses %s as a key",
articleize(describeTypeOfReflectValue(kv)),
articleize(describeType(v.Type().Key())),
),
})
return
}
found := v.MapIndex(kv)
if !found.IsValid() {
mt.d.AddResult(result{
actual: newValue(mt.d.Actual()),
pass: false,
where: inDataStructure,
op: fmt.Sprintf("[%v]", key),
description: "Attempted to get a map key that does not exist",
})
return
}
mt.d.PushActual(found.Interface())
defer mt.d.PopActual()
mt.seen[key] = true
if c, ok := expect.(Comparer); ok {
c.Compare(mt.d)
} else {
mt.d.Equal(expect).Compare(mt.d)
}
}
// AllValues takes a function and turns it into a `FuncComparer`. It then
// passes every map value to that comparer in turn. The function must take
// exactly one value matching the map key's type and return a single boolean
// value.
func (mt *MapTester) AllValues(check interface{}) {
mt.d.PushPath(mt.d.NewPath("range", 0, ""))
defer mt.d.PopPath()
comparer, err := mt.d.FuncFor(check, "AllValues")
if err != nil {
mt.d.AddResult(result{
actual: newValue(mt.d.Actual()),
pass: false,
where: inUsage,
description: err.Error(),
})
return
}
mapVal := reflect.ValueOf(mt.d.Actual())
for _, k := range mapVal.MapKeys() {
mt.Key(k.Interface(), comparer)
}
}
// Etc means that not all elements of the map will be tested.
func (mt *MapTester) Etc() {
mt.ending = Etc
}
// End means that all elements of the map must be tested or else the test will
// fail.
func (mt *MapTester) End() {
mt.ending = End
}
func (mt *MapTester) enforceEnding() {
// If we got an error in anything but a value check that means the test
// aborted. This could mean attempting to get an index past the end of the
// map, passing an incorrect type to AllValues, etc.
if mt.d.lastResultIsNonValueError() {
return
}
if mt.ending == Etc {
return
}
if mt.ending == Unset {
mt.d.AddWarning("The function passed to Map() did not call Etc() or End()")
return
}
results := []result{}
for _, k := range reflect.ValueOf(mt.d.Actual()).MapKeys() {
if !mt.seen[k.Interface()] {
results = append(results, result{
pass: false,
where: inUsage,
description: fmt.Sprintf("Your map test did not check the key %v", k),
})
}
}
// We sort the results for the benefit of our tests. This makes it easier
// to check for specific descriptions in the result without having to
// search through the list of results to find that message.
sort.SliceStable(results, func(i, j int) bool {
return results[i].description < results[j].description
})
for _, r := range results {
mt.d.AddResult(r)
}
} | pkg/detest/map.go | 0.746878 | 0.561395 | map.go | starcoder |
package trees
import (
"fmt"
)
type TreeNode struct {
Left *TreeNode
Right *TreeNode
Val int
}
func Algos() {
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(nil))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(nil))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(nil))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(nil, 5))
root := TreeNode{Left: nil, Right: nil, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 1 exists?", hasPathSum(&root, 1))
left := TreeNode{Left: nil, Right: nil, Val: 2}
root = TreeNode{Left: &left, Right: nil, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
right := TreeNode{Left: nil, Right: nil, Val: 3}
root = TreeNode{Left: nil, Right: &right, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
root = TreeNode{Left: &left, Right: &right, Val: 1}
root.Right.Val = 2
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
leftOfleft := TreeNode{Left: nil, Right: nil, Val: 4}
left = TreeNode{Left: &leftOfleft, Right: nil, Val: 2}
root = TreeNode{Left: &left, Right: &right, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
rightOfright := TreeNode{Left: nil, Right: nil, Val: 5}
right = TreeNode{Left: nil, Right: &rightOfright, Val: 3}
root = TreeNode{Left: &left, Right: &right, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
leftOfright := TreeNode{Left: nil, Right: nil, Val: 6}
right = TreeNode{Left: &leftOfright, Right: &rightOfright, Val: 3}
root = TreeNode{Left: &left, Right: &right, Val: 1}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
rightOfleft := TreeNode{Left: nil, Right: nil, Val: 6}
left = TreeNode{Left: nil, Right: &rightOfleft, Val: 2}
right = TreeNode{Left: &leftOfright, Right: nil, Val: 2}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
left = TreeNode{Left: nil, Right: nil, Val: 2}
right = TreeNode{Left: &leftOfright, Right: nil, Val: 2}
fmt.Println("Minimum depth of BTree: ", findMinDepthOfBTree(&root))
fmt.Println("Maximum depth of BTree: ", findMaxDepthOfBTree(&root))
fmt.Println("Is Tree symmetrical? ", isTreeSymmetrical(&root))
fmt.Println("Does Path to sum 5 exists?", hasPathSum(&root, 5))
}
func findMinDepthOfBTree(root *TreeNode) int {
if root == nil {
return 0
}
if isTreeNodeLeaf(root) {
return 1
} else if isTreeNodeLeaf(root.Left) || isTreeNodeLeaf(root.Right) {
return 2
} else {
if root.Left != nil && root.Right != nil {
leftTree := 1 + findMinDepthOfBTree(root.Left)
rightTree := 1 + findMinDepthOfBTree(root.Right)
if leftTree > rightTree {
return rightTree
} else {
return leftTree
}
} else if root.Left != nil {
return 1 + findMinDepthOfBTree(root.Right)
} else {
return 1 + findMinDepthOfBTree(root.Left)
}
}
}
func findMaxDepthOfBTree(node *TreeNode) int {
if node == nil {
return 0
}
if isTreeNodeLeaf(node) {
return 1
} else if isTreeNodeLeaf(node.Left) && isTreeNodeLeaf(node.Right) {
return 2
} else {
left := 1 + findMaxDepthOfBTree(node.Left)
right := 1 + findMaxDepthOfBTree(node.Right)
if left > right {
return left
} else {
return right
}
}
}
func isTreeNodeLeaf(node *TreeNode) bool {
if node == nil {
return false
}
if node.Left == node.Right && node.Left == nil {
return true
}
return false
}
func isTreeSymmetrical(root *TreeNode) bool {
if root == nil || isTreeNodeLeaf(root) {
return true
} else {
return areTwoTreesMirrorTrees(root.Left, root.Right)
}
}
func areTwoTreesMirrorTrees(left *TreeNode, right *TreeNode) bool {
if left == right && left == nil {
return true
}
if (left == nil && right != nil) || (right == nil && left != nil) {
return false
}
if isTreeNodeLeaf(left) && isTreeNodeLeaf(right) {
return left.Val == right.Val
} else if isTreeNodeLeaf(left) != isTreeNodeLeaf(right) {
return false
}
//This means both are not leaf nodes
return left.Val == right.Val && areTwoTreesMirrorTrees(left.Right, right.Left) && areTwoTreesMirrorTrees(left.Left, right.Right)
}
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
func hasPathSum(root *TreeNode, sum int) bool {
aSum := 0
return doesMatchingSumExists(sum, aSum, root)
}
func doesMatchingSumExists(eSum int, aSum int, node *TreeNode) bool {
if node == nil {
return false
} else if isTreeNodeLeaf(node) {
return eSum == aSum+node.Val
} else {
aSum = aSum + node.Val
if node.Left != nil && node.Right == nil && isTreeNodeLeaf(node.Left) {
return eSum == aSum+node.Left.Val
} else if node.Right != nil && node.Left == nil && isTreeNodeLeaf(node.Right) {
return eSum == aSum+node.Right.Val
} else if isTreeNodeLeaf(node.Left) && isTreeNodeLeaf(node.Right) {
return eSum == aSum+node.Right.Val || eSum == aSum+node.Left.Val
} else {
if node.Right == nil {
return doesMatchingSumExists(eSum, aSum, node.Left)
} else if node.Left == nil {
return doesMatchingSumExists(eSum, aSum, node.Right)
} else {
return doesMatchingSumExists(eSum, aSum, node.Left) || doesMatchingSumExists(eSum, aSum, node.Right)
}
}
}
} | trees/algos.go | 0.558568 | 0.591841 | algos.go | starcoder |
package core
// ===========================================================================
// Fmap establishes Head as Endo-Functor F<Head>.
// If a == nil this is returned directly - f is not evaluated for nil.
// If f == nil the identity function is applied.
func (a Head) Fmap(f func(Head) Head) Head {
if f == nil { f = func(a Head) Head { return a } }
return func() Pair {
if a == nil { return nil }
return f(a)
}
}
// FmapPair establishes Head as a Functor.
// If a == nil this is returned directly - f is not evaluated for nil.
// If f == nil the identity function is applied.
func (a Head) FmapPair(f func(Pair) Pair) Head {
if f == nil { f = func(a Pair) Pair { return a } }
return func() Pair {
if a == nil { return nil }
return f(a())
}
}
// ===========================================================================
// Fmap establishes Tail as Endo-Functor F<Tail>.
// If f == nil the identity functions is applied.
func (a Tail) Fmap(f func(Tail) Tail) Tail {
if a == nil { return NilTail() }
if f == nil { f = func(a Tail) Tail { return a } }
return func() (head Head, tail Tail) {
head, tail = f(a)()
if head == nil { return NilTail()() }
tail = tail.Fmap(f)
return
}
}
// FmapHead returns a Tail with f applied to each of its heads.
// If f == nil the identity functions is applied.
func (a Tail) FmapHead(f func(Head) Head) Tail {
if a == nil { return NilTail() }
if f == nil { f = func(a Head) Head { return a } }
return func() (head Head, tail Tail) {
aHead, tail := a()
if aHead == nil { return NilTail()() }
head = func() Pair { return f(aHead)() }
tail = tail.FmapHead(f)
return
}
}
// FmapPair returns a Tail with f applied to each pair its heads evaluate to.
// If f == nil the identity functions is applied.
// If some head evaluates to a Pair == nil this is returned directly - f is not evaluated for nil.
func (a Tail) FmapPair(f func(Pair) Pair) Tail {
if a == nil { return NilTail() }
if f == nil { f = func(a Pair) Pair { return a } }
return func() (head Head, tail Tail) {
aHead, tail := a()
if aHead == nil { return NilTail()() }
head = func() (pair Pair) {
pair = aHead()
if pair != nil { pair = f(pair) }
return
}
tail = tail.FmapPair(f)
return
}
}
// =========================================================================== | core/fmap.go | 0.635449 | 0.426023 | fmap.go | starcoder |
package entity
import "github.com/lquesada/cavernal/lib/g3n/engine/math32"
// RelativeCylinder is a cylinder relative to a bigger entity, e.g. a sword that's carried.
type RelativeCylinder struct{
Ahead float32
Y float32
SimpleCylinder *SimpleCylinder
}
// SimpleCylinder is a bare abstract cylinder with a certain volume.
type SimpleCylinder struct{
Radius float32
Height float32
}
// Cylinder is a simple cylinder somewhere in the 3D space.
type Cylinder struct{
X float32
Y float32
Z float32
SimpleCylinder *SimpleCylinder
}
func GenerateCylinders(maxWidth, reach, height, y, startAhead float32) []*RelativeCylinder {
if reach < maxWidth {
return []*RelativeCylinder{
&RelativeCylinder{Ahead: startAhead, Y: y, SimpleCylinder: &SimpleCylinder{Radius: reach/2, Height: height}},
}
}
cylinders := []*RelativeCylinder{}
ahead := startAhead
for ahead < reach-maxWidth/2 {
cylinders = append(cylinders,&RelativeCylinder{Ahead: ahead, Y: y, SimpleCylinder: &SimpleCylinder{Radius: maxWidth/2, Height: height}},)
ahead += maxWidth/2
}
ahead = reach-maxWidth/2
cylinders = append(cylinders,&RelativeCylinder{Ahead: ahead, Y: y, SimpleCylinder: &SimpleCylinder{Radius: maxWidth/2, Height: height}})
return cylinders
}
func SimpleToAbsolute(s *SimpleCylinder) *Cylinder {
return &Cylinder{
X: 0,
Y: 0,
Z: 0,
SimpleCylinder: s,
}
}
func SimpleToAbsoluteList(s []*SimpleCylinder) []*Cylinder {
a := make([]*Cylinder, len(s), len(s))
for i, v := range s {
a[i] = SimpleToAbsolute(v)
}
return a
}
func RelativeToAbsolute(r *RelativeCylinder, lookAngle float32) *Cylinder {
return &Cylinder{
X: r.Ahead*math32.Sin(lookAngle),
Y: r.Y,
Z: r.Ahead*math32.Cos(lookAngle),
SimpleCylinder: r.SimpleCylinder,
}
}
func RelativeToAbsoluteList(r []*RelativeCylinder, lookAngle float32) []*Cylinder {
a := make([]*Cylinder, len(r), len(r))
for i, v := range r {
a[i] = RelativeToAbsolute(v, lookAngle)
}
return a
}
func CheckColisionBetweenFrames(x1Old, y1Old, z1Old, x1New, y1New, z1New float32, cylinder1 []*Cylinder, x2Old, y2Old, z2Old, x2New, y2New, z2New float32, cylinder2 []*Cylinder) bool {
for _, c1 := range cylinder1 {
for _, c2 := range cylinder2 {
y1Min := math32.Min(y1Old, y1New)+c1.Y
y1Max := math32.Max(y1Old, y1New)+c1.Y
y2Min := math32.Min(y2Old, y2New)+c2.Y // NOTE c2.Y was missing, added it. may have introduced a bug
y2Max := math32.Max(y2Old, y2New)+c2.Y // NOTE c2.Y was missing, added it. may have introduced a bug
if math32.Max(y1Min, y2Min) <= math32.Min(y1Max + c1.SimpleCylinder.Height, y2Max + c2.SimpleCylinder.Height) {
distance := ShortestDistanceBetweenLines(x1Old+c1.X, z1Old+c1.Z, x1New+c1.X, z1New+c1.Z, x2Old+c2.X, z2Old+c2.Z, x2New+c2.X, z2New+c2.Z)
if distance < c1.SimpleCylinder.Radius+c2.SimpleCylinder.Radius {
return true
}
}
}
}
return false
}
func ShortestDistanceBetweenLines(x1Old, z1Old, x1New, z1New, x2Old, z2Old, x2New, z2New float32) float32 {
var epsilon float32 = 0.001
uX := x1New-x1Old+epsilon
uY := z1New-z1Old+epsilon
vX := x2New-x2Old+epsilon
vY := z2New-z2Old+epsilon
wX := x1Old-x2Old+epsilon
wY := z1Old-z2Old+epsilon
a := uX*uX+uY*uY
b := uX*vX+uY*vY
c := vX*vX+vY*vY
d := uX*wX+uY*wY
e := vX*wX+vY*wY
nD := a*c - b*b
var sc float32
var sN float32
sD := nD
var tc float32
var tN float32
tD := nD
if math32.Abs(nD) < epsilon {
sN = 0.0
sD = 1.0
tN = e
tD = c
} else {
sN = (b*e - c*d)
tN = (a*e - b*d)
if sN < 0.0 {
sN = 0.0
tN = e
tD = c
} else if sN > sD {
sN = sD
tN = e + b
tD = c
}
}
if tN < 0.0 {
tN = 0.0
if -d < 0.0 {
sN = 0.0
} else if -d > a {
sN = sD
} else {
sN = -d
sD = a
}
} else if tN > tD {
tN = tD
if (-d + b) < 0.0 {
sN = 0
} else if (-d + b) > a {
sN = sD
} else {
sN = (-d + b)
sD = a
}
}
if math32.Abs(sD) < epsilon {
sc = 0.0
} else {
sc = sN / sD
}
if math32.Abs(tD) < epsilon {
tc = 0.0
} else {
tc = tN / tD
}
dpx := wX+uX*sc-vX*tc
dpz := wY+uY*sc-vY*tc
return math32.Sqrt(dpx*dpx+dpz*dpz)
} | entity/colision.go | 0.771801 | 0.464962 | colision.go | starcoder |
package parser
import (
"github.com/serulian/compiler/compilercommon"
)
// Parse parses the given WebIDL source into a parse tree.
func Parse(moduleNode AstNode, builder NodeBuilder, source compilercommon.InputSource, input string) AstNode {
lexer := lex(source, input)
config := parserConfig{
ignoredTokenTypes: map[tokenType]bool{
tokenTypeWhitespace: true,
tokenTypeComment: true,
},
childPredicate: NodePredicateChild,
sourcePredicate: NodePredicateSource,
startRunePredicate: NodePredicateStartRune,
endRunePredicate: NodePredicateEndRune,
errorNodeType: NodeTypeError,
errorMessagePredicate: NodePredicateErrorMessage,
commentNodeType: NodeTypeComment,
commentNodeValuePredicate: NodePredicateCommentValue,
isCommentToken: func(kind tokenType) bool {
return kind == tokenTypeComment
},
keywordTokenType: tokenTypeKeyword,
errorTokenType: tokenTypeError,
eofTokenType: tokenTypeEOF,
}
parser := buildParser(lexer, builder, config, source, bytePosition(0), input)
return parser.consumeTopLevel(moduleNode)
}
// consumeTopLevel attempts to consume the top-level constructs of a WebIDL file.
func (p *sourceParser) consumeTopLevel(moduleNode AstNode) AstNode {
rootNode := p.startNode(NodeTypeFile)
defer p.finishNode()
moduleNode.Connect(NodePredicateChild, rootNode)
// Start at the first token.
p.consumeToken()
if p.currentToken.kind == tokenTypeError {
p.emitError("%s", p.currentToken.value)
return rootNode
}
Loop:
for {
switch {
case p.isToken(tokenTypeLeftBracket) || p.isKeyword("interface"):
rootNode.Connect(NodePredicateChild, p.consumeDeclaration())
case p.isToken(tokenTypeIdentifier):
rootNode.Connect(NodePredicateChild, p.consumeImplementation())
default:
p.emitError("Unexpected token at root level: %v", p.currentToken.kind)
break Loop
}
if p.isToken(tokenTypeEOF) {
break Loop
}
}
return rootNode
}
// consumeDeclaration attempts to consume a declaration, with optional attributes.
func (p *sourceParser) consumeDeclaration() AstNode {
declNode := p.startNode(NodeTypeDeclaration)
defer p.finishNode()
// Consume any annotations.
p.tryConsumeAnnotations(declNode, NodePredicateDeclarationAnnotation)
// Consume the type of declaration.
if !p.consumeKeyword("interface") {
return declNode
}
declNode.Decorate(NodePredicateDeclarationKind, "interface")
// Consume the name of the declaration.
declNode.Decorate(NodePredicateDeclarationName, p.consumeIdentifier())
// Check for (optional) inheritance.
if _, ok := p.tryConsume(tokenTypeColon); ok {
declNode.Decorate(NodePredicateDeclarationParentType, p.consumeIdentifier())
}
// {
p.consume(tokenTypeLeftBrace)
// Members and custom operations (if any).
loop:
for {
if p.isToken(tokenTypeRightBrace) {
break
}
if p.isKeyword("serializer") || p.isKeyword("jsonifier") {
customOpNode := p.startNode(NodeTypeCustomOp)
customOpNode.Decorate(NodePredicateCustomOpName, p.currentToken.value)
p.consume(tokenTypeKeyword)
_, ok := p.consume(tokenTypeSemicolon)
p.finishNode()
declNode.Connect(NodePredicateDeclarationCustomOperation, customOpNode)
if !ok {
break loop
}
continue
}
declNode.Connect(NodePredicateDeclarationMember, p.consumeMember())
if _, ok := p.consume(tokenTypeSemicolon); !ok {
break
}
}
// };
p.consume(tokenTypeRightBrace)
p.consume(tokenTypeSemicolon)
return declNode
}
// consumeMember attempts to consume a member definition in a declaration.
func (p *sourceParser) consumeMember() AstNode {
memberNode := p.startNode(NodeTypeMember)
defer p.finishNode()
var isAttribute = false
// annotations
p.tryConsumeAnnotations(memberNode, NodePredicateMemberAnnotation)
// getter/setter
var specialization = ""
if p.isKeyword("getter") || p.isKeyword("setter") {
consumed, _ := p.consume(tokenTypeKeyword)
specialization = consumed.value
memberNode.Decorate(NodePredicateMemberSpecialization, specialization)
}
// static readonly attribute
if p.tryConsumeKeyword("static") {
memberNode.Decorate(NodePredicateMemberStatic, "true")
}
if p.tryConsumeKeyword("readonly") {
memberNode.Decorate(NodePredicateMemberReadonly, "true")
}
if p.tryConsumeKeyword("attribute") {
isAttribute = true
memberNode.Decorate(NodePredicateMemberAttribute, "true")
}
// Consume the type of the member.
memberNode.Decorate(NodePredicateMemberType, p.consumeType())
// Consume the member's name.
if specialization == "" {
memberNode.Decorate(NodePredicateMemberName, p.consumeIdentifier())
}
// If not an attribute, consume the parameters of the member.
if !isAttribute {
p.consumeParameters(memberNode, NodePredicateMemberParameter)
}
return memberNode
}
// tryConsumeAnnotations consumes any annotations found on the parent node.
func (p *sourceParser) tryConsumeAnnotations(parentNode AstNode, predicate string) {
for {
// [
if _, ok := p.tryConsume(tokenTypeLeftBracket); !ok {
return
}
for {
// Foo()
parentNode.Connect(predicate, p.consumeAnnotationPart())
// ,
if _, ok := p.tryConsume(tokenTypeComma); !ok {
break
}
}
// ]
if _, ok := p.consume(tokenTypeRightBracket); !ok {
return
}
}
}
// consumeAnnotationPart consumes an annotation, as found within a set of brackets `[]`.
func (p *sourceParser) consumeAnnotationPart() AstNode {
annotationNode := p.startNode(NodeTypeAnnotation)
defer p.finishNode()
// Consume the name of the annotation.
annotationNode.Decorate(NodePredicateAnnotationName, p.consumeIdentifier())
// Consume (optional) value.
if _, ok := p.tryConsume(tokenTypeEquals); ok {
annotationNode.Decorate(NodePredicateAnnotationDefinedValue, p.consumeIdentifier())
}
// Consume (optional) parameters.
if p.isToken(tokenTypeLeftParen) {
p.consumeParameters(annotationNode, NodePredicateAnnotationParameter)
}
return annotationNode
}
// expandedTypeKeywords defines the keywords that form the prefixes for expanded types:
// two-identifier type names.
var expandedTypeKeywords = map[string][]string{
"unsigned": []string{"short", "long"},
"long": []string{"long"},
"unrestricted": []string{"float", "double"},
}
// consumeType attempts to consume a type (identifier (with optional ?) or 'any').
func (p *sourceParser) consumeType() string {
if p.tryConsumeKeyword("any") {
return "any"
}
var typeName = ""
identifier := p.consumeIdentifier()
typeName += identifier
// If the identifier is the beginning of a possible expanded type name, check for the
// secondary portion.
if secondaries, ok := expandedTypeKeywords[identifier]; ok {
for _, secondary := range secondaries {
if p.isToken(tokenTypeIdentifier) && p.currentToken.value == secondary {
typeName += " " + secondary
p.consume(tokenTypeIdentifier)
break
}
}
}
if _, ok := p.tryConsume(tokenTypeQuestionMark); ok {
return typeName + "?"
} else {
return typeName
}
}
// consumeParameter attempts to consume a parameter.
func (p *sourceParser) consumeParameter() AstNode {
paramNode := p.startNode(NodeTypeParameter)
defer p.finishNode()
// optional
if p.tryConsumeKeyword("optional") {
paramNode.Decorate(NodePredicateParameterOptional, "true")
}
// Consume the parameter's type.
paramNode.Decorate(NodePredicateParameterType, p.consumeType())
// Consume the parameter's name.
paramNode.Decorate(NodePredicateParameterName, p.consumeIdentifier())
return paramNode
}
// consumeParameters attempts to consume a set of parameters.
func (p *sourceParser) consumeParameters(parentNode AstNode, predicate string) {
p.consume(tokenTypeLeftParen)
if _, ok := p.tryConsume(tokenTypeRightParen); ok {
return
}
for {
parentNode.Connect(predicate, p.consumeParameter())
if _, ok := p.tryConsume(tokenTypeRightParen); ok {
return
}
if _, ok := p.consume(tokenTypeComma); !ok {
return
}
}
}
// consumeImplementation attempts to consume an implementation definition.
func (p *sourceParser) consumeImplementation() AstNode {
implNode := p.startNode(NodeTypeImplementation)
defer p.finishNode()
// identifier
implNode.Decorate(NodePredicateImplementationName, p.consumeIdentifier())
// implements
if !p.consumeKeyword("implements") {
return implNode
}
// identifier
implNode.Decorate(NodePredicateImplementationSource, p.consumeIdentifier())
// semicolon
p.consume(tokenTypeSemicolon)
return implNode
} | webidl/parser/parser_rules.go | 0.692954 | 0.400046 | parser_rules.go | starcoder |
package geomap
import "math"
// SECTION: Internal
func toDegrees(x float64) float64 {
return x * 180.0 / math.Pi
}
func toRadians(x float64) float64 {
return x * math.Pi / 180.0
}
func min(a int, b int) int {
if a < b {
return a
} else {
return b
}
}
func euclidDistance(disl DegreeDistance, lat1 float64, lon1 float64, lat2 float64, lon2 float64) float64 {
var (
latLen float64 = math.Abs(lat1-lat2) * latDegreeLength
lonLen float64 = math.Abs(lon1-lon2) * disl.avgLatitudeLength(lat1, lat2)
)
return (latLen*latLen + lonLen*lonLen)
}
func pCoordinate(lat float64, lon float64, res float64) (x int, y int) {
x = int((-minLat + lat) * latDegreeLength / res)
y = int((-minLon + lon) * lonDegreeLength / res)
return x, y
}
func directionTo(bearing float64) int {
var (
index float64 = bearing - 22.5
)
if index < 0 {
index += 360
}
return int(index / 45.0)
}
func bearingTo(lat1, lon1, lat2, lon2 float64) float64 {
var (
disLon float64 = toRadians(lon2 - lon1)
rlat1 float64 = toRadians(lat1)
rlat2 float64 = toRadians(lat2)
x float64 = math.Cos(rlat1)*math.Sin(rlat2) - math.Sin(rlat1)*math.Cos(rlat2)*math.Cos(disLon)
y float64 = math.Sin(disLon) * math.Cos(rlat2)
)
return toDegrees(math.Atan2(y, x))
}
func distance(lat1, lon1, lat2, lon2 float64) float64 {
var (
sLat float64 = math.Sin(toRadians(lat2-lat1) / 2)
sLon float64 = math.Sin(toRadians(lon2-lon1) / 2)
a float64
c float64
)
a = math.Pow(sLat, 2) + math.Pow(sLon, 2)*math.Cos(toRadians(lat1)*math.Cos(toRadians(lat2)))
c = 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
return cEarthRadius * c
}
func isBetween(value float64, min float64, max float64) bool {
return (value >= min) && (value <= max)
}
func CalcCell(p PointInterface, res float64) cell {
var (
x int
y int
)
x, y = pCoordinate(p.Lat(), p.Lon(), res)
return cell{x, y}
}
// - MARK: DegreeDistance section.
func (dd DegreeDistance) get(lat float64) (dist float64) {
var (
latIndex int = int(lat * 10)
latRnd float64 = float64(latIndex) / 10
)
if v, ok := dd[latIndex]; ok {
return v
}
dist = distance(latRnd, 0.0, latRnd, 1.0)
dd[latIndex] = dist
return dist
}
func (dd DegreeDistance) avgLatitudeLength(lat1, lat2 float64) float64 {
var (
avg float64 = (lat1 + lat2) / 2.0
)
return dd.get(avg)
}
// SECTION: Public
func Km(km float64) float64 {
return km * 1000
}
func EuclidDistance(disl DegreeDistance, p1 PointInterface, p2 PointInterface) float64 {
return euclidDistance(disl, p1.Lat(), p1.Lon(), p2.Lat(), p2.Lon())
}
func DirectionTo(p1 PointInterface, p2 PointInterface) int {
var (
bearing float64 = BearingTo(p1, p2)
)
return directionTo(bearing)
}
func BearingTo(p1 PointInterface, p2 PointInterface) float64 {
return bearingTo(p1.Lat(), p1.Lon(), p2.Lat(), p2.Lon())
}
func Distance(p1 PointInterface, p2 PointInterface) float64 {
return distance(p1.Lat(), p1.Lon(), p2.Lat(), p2.Lon())
} | geomap.go | 0.861057 | 0.708364 | geomap.go | starcoder |
package codes
type Code struct {
Message string `yaml:"message"`
Description string `yaml:"description"`
}
var Codes = map[string]Code{
"100": {
Message: "Continue",
Description: "The server has received the request headers, and the client should proceed to send the request body.",
},
"101": {
Message: "Switching Protocols",
Description: "The requester has asked the server to switch protocols.",
},
"102": {
Message: "Processing",
Description: "This code indicates that the server has received and is processing the request, but no response is available yet. This prevents the client from timing out and assuming the request was lost.",
},
"103": {
Message: "Early Hints",
Description: "Used to return some response headers before final HTTP Message.",
},
"200": {
Message: "OK",
Description: "The request is OK (this is the standard response for successful HTTP requests).",
},
"201": {
Message: "Created",
Description: "The request has been fulfilled, and a new resource is created.",
},
"202": {
Message: "Accepted",
Description: "The request has been accepted for processing, but the processing has not been completed.",
},
"203": {
Message: "Non-Authoritative Information",
Description: "The request has been successfully processed, but is returning information that may be from another source.",
},
"204": {
Message: "No Content",
Description: "The request has been successfully processed, but is not returning any content.",
},
"205": {
Message: "Reset Content",
Description: "The request has been successfully processed, but is not returning any content, and requires that the requester reset the document view.",
},
"206": {
Message: "Partial Content",
Description: "The server is delivering only part of the resource due to a range header sent by the client.",
},
"207": {
Message: "Multi-Status",
Description: "The Message body that follows is by default an XML Message and can contain a number of separate response codes, depending on how many sub-requests were made.",
},
"208": {
Message: "Already Reported",
Description: "The members of a DAV binding have already been enumerated in a preceding part of the (multistatus) response, and are not being included again.",
},
"218": {
Message: "This is fine (Apache Web Server)",
Description: "Used as a catch-all error condition for allowing response bodies to flow through Apache when ProxyErrorOverride is enabled.",
},
"226": {
Message: "IM Used",
Description: "The server has fulfilled a request for the resource, and the response is a representation of the result of one or more instance-manipulations applied to the current instance.",
},
"300": {
Message: "Multiple Choices",
Description: "A link list. The user can select a link and go to that location. Maximum five addresses.",
},
"301": {
Message: "Moved Permanently",
Description: "The requested page has moved to a new URL.",
},
"302": {
Message: "Found",
Description: "The requested page has moved temporarily to a new URL.",
},
"303": {
Message: "See Other",
Description: "The requested page can be found under a different URL.",
},
"304": {
Message: "Not Modified",
Description: "Indicates the requested page has not been modified since last requested.",
},
"306": {
Message: "Switch Proxy",
Description: "No longer used. Originally meant 'Subsequent requests should use the specified proxy.'",
},
"307": {
Message: "Temporary Redirect",
Description: "The requested page has moved temporarily to a new URL.",
},
"308": {
Message: "Resume Incomplete",
Description: "Used in the resumable requests proposal to resume aborted PUT or POST requests.",
},
"400": {
Message: "Bad Request",
Description: "The request cannot be fulfilled due to bad syntax.",
},
"401": {
Message: "Unauthorized",
Description: "The request was a legal request, but the server is refusing to respond to it. For use when authentication is possible but has failed or not yet been provided.",
},
"402": {
Message: "Payment Required",
Description: "Not yet implemented by RFC standards, but reserved for future use.",
},
"403": {
Message: "Forbidden",
Description: "The request was a legal request, but the server is refusing to respond to it.",
},
"404": {
Message: "Not Found",
Description: "The requested page could not be found but may be available again in the future.",
},
"405": {
Message: "Method Not Allowed",
Description: "A request was made of a page using a request method not supported by that page.",
},
"406": {
Message: "Not Acceptable",
Description: "The server can only generate a response that is not accepted by the client.",
},
"407": {
Message: "Proxy Authentication Required",
Description: "The client must first authenticate itself with the proxy.",
},
"408": {
Message: "Request Timeout",
Description: "The server timed out waiting for the request.",
},
"409": {
Message: "Conflict",
Description: "The request could not be completed because of a conflict in the request.",
},
"410": {
Message: "Gone",
Description: "The requested page is no longer available.",
},
"411": {
Message: "Length Required",
Description: "The 'Content-Length' is not defined. The server will not accept the request without it.",
},
"412": {
Message: "Precondition Failed",
Description: "The precondition given in the request evaluated to false by the server.",
},
"413": {
Message: "Request Entity Too Large",
Description: "The server will not accept the request, because the request entity is too large.",
},
"414": {
Message: "Request-URI Too Long",
Description: "The server will not accept the request, because the URL is too long. Occurs when you convert a POST request to a GET request with a long query information.",
},
"415": {
Message: "Unsupported Media Type",
Description: "The server will not accept the request, because the media type is not supported.",
},
"416": {
Message: "Requested Range Not Satisfiable",
Description: "The client has asked for a portion of the file, but the server cannot supply that portion.",
},
"417": {
Message: "Expectation Failed",
Description: "The server cannot meet the requirements of the Expect request-header field.",
},
"418": {
Message: "I'm a teapot",
Description: `Any attempt to brew coffee with a teapot should result in the error code '418 I'm a teapot'. The resulting entity body MAY be short and stout.
________ __
| \ | \
\▓▓▓▓▓▓▓▓ ______ ______ ______ ______ _| ▓▓_
| ▓▓ / \ | \ / \ / \| ▓▓ \
| ▓▓ | ▓▓▓▓▓▓\ \▓▓▓▓▓▓\ ▓▓▓▓▓▓\ ▓▓▓▓▓▓\\▓▓▓▓▓▓
| ▓▓ | ▓▓ ▓▓/ ▓▓ ▓▓ | ▓▓ ▓▓ | ▓▓ | ▓▓ __
| ▓▓ | ▓▓▓▓▓▓▓▓ ▓▓▓▓▓▓▓ ▓▓__/ ▓▓ ▓▓__/ ▓▓ | ▓▓| \
| ▓▓ \▓▓ \\▓▓ ▓▓ ▓▓ ▓▓\▓▓ ▓▓ \▓▓ ▓▓
\▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ ▓▓▓▓▓▓▓ \▓▓▓▓▓▓ \▓▓▓▓
| ▓▓
| ▓▓
\▓▓
`,
},
"419": {
Message: "Page Expired (Laravel Framework)",
Description: "Used by the Laravel Framework when a CSRF Token is missing or expired.",
},
"420": {
Message: "Method Failure (Spring Framework)",
Description: "A deprecated response used by the Spring Framework when a method has failed.",
},
"421": {
Message: "Misdirected Request",
Description: "The request was directed at a server that is not able to produce a response (for example because a connection reuse).",
},
"422": {
Message: "Unprocessable Entity",
Description: "The request was well-formed but was unable to be followed due to semantic errors.",
},
"423": {
Message: "Locked",
Description: "The resource that is being accessed is locked.",
},
"424": {
Message: "Failed Dependency",
Description: "The request failed due to failure of a previous request (e.g., a PROPPATCH).",
},
"426": {
Message: "Upgrade Required",
Description: "The client should switch to a different protocol such as TLS/1.0, given in the Upgrade header field.",
},
"428": {
Message: "Precondition Required",
Description: "The origin server requires the request to be conditional.",
},
"429": {
Message: "Too Many Requests",
Description: "The user has sent too many requests in a given amount of time. Intended for use with rate limiting schemes.",
},
"431": {
Message: "Request Header Fields Too Large",
Description: "The server is unwilling to process the request because either an individual header field, or all the header fields collectively, are too large.",
},
"440": {
Message: "Login Time-out",
Description: "The client's session has expired and must log in again. (IIS)",
},
"444": {
Message: "Connection Closed Without Response",
Description: "A non-standard status code used to instruct nginx to close the connection without sending a response to the client, most commonly used to deny malicious or malformed requests.",
},
"449": {
Message: "Retry With",
Description: "The server cannot honour the request because the user has not provided the required information. (IIS)",
},
"450": {
Message: "Blocked by Windows Parental Controls",
Description: "The Microsoft extension code indicated when Windows Parental Controls are turned on and are blocking access to the requested webpage.",
},
"451": {
Message: "Unavailable For Legal Reasons",
Description: "A server operator has received a legal demand to deny access to a resource or to a set of resources that includes the requested resource.",
},
"494": {
Message: "Request Header Too Large",
Description: "Used by nginx to indicate the client sent too large of a request or header line that is too long.",
},
"495": {
Message: "SSL Certificate Error",
Description: "An expansion of the 400 Bad Request response code, used when the client has provided an invalid client certificate.",
},
"496": {
Message: "SSL Certificate Required",
Description: "An expansion of the 400 Bad Request response code, used when a client certificate is required but not provided.",
},
"497": {
Message: "HTTP Request Sent to HTTPS Port",
Description: "An expansion of the 400 Bad Request response code, used when the client has made a HTTP request to a port listening for HTTPS requests.",
},
"498": {
Message: "Invalid Token (Esri)",
Description: "Returned by ArcGIS for Server. Code 498 indicates an expired or otherwise invalid token.",
},
"499": {
Message: "Client Closed Request",
Description: "A non-standard status code introduced by nginx for the case when a client closes the connection while nginx is processing the request.",
},
"500": {
Message: "Internal Server Error",
Description: "An error has occurred in a server side script, a no more specific Message is suitable.",
},
"501": {
Message: "Not Implemented",
Description: "The server either does not recognize the request method, or it lacks the ability to fulfill the request.",
},
"502": {
Message: "Bad Gateway",
Description: "The server was acting as a gateway or proxy and received an invalid response from the upstream server.",
},
"503": {
Message: "Service Unavailable",
Description: "The server is currently unavailable (overloaded or down).",
},
"504": {
Message: "Gateway Timeout",
Description: "The server was acting as a gateway or proxy and did not receive a timely response from the upstream server.",
},
"505": {
Message: "HTTP Version Not Supported",
Description: "The server does not support the HTTP protocol version used in the request.",
},
"506": {
Message: "Variant Also Negotiates",
Description: "Transparent content negotiation for the request results in a circular reference.",
},
"507": {
Message: "Insufficient Storage",
Description: "The server is unable to store the representation needed to complete the request.",
},
"508": {
Message: "Loop Detected",
Description: "The server detected an infinite loop while processing the request (sent instead of 208 Already Reported).",
},
"509": {
Message: "Bandwidth Limit Exceeded",
Description: "The server has exceeded the bandwidth specified by the server administrator; this is often used by shared hosting providers to limit the bandwidth of customers.",
},
"510": {
Message: "Not Extended",
Description: "Further extensions to the request are required for the server to fulfil it.",
},
"511": {
Message: "Network Authentication Required",
Description: "The client needs to authenticate to gain network access.",
},
"520": {
Message: "Unknown Error",
Description: "The 520 error is used as a 'catch-all response for when the origin server returns something unexpected', listing connection resets, large headers, and empty or invalid responses as common triggers.",
},
"521": {
Message: "Web Server Is Down",
Description: "The origin server has refused the connection from Cloudflare.",
},
"522": {
Message: "Connection Timed Out",
Description: "Cloudflare could not negotiate a TCP handshake with the origin server.",
},
"523": {
Message: "Origin Is Unreachable",
Description: "Cloudflare could not reach the origin server; for example, if the DNS records for the origin server are incorrect.",
},
"524": {
Message: "A Timeout Occurred",
Description: "Cloudflare was able to complete a TCP connection to the origin server, but did not receive a timely HTTP response.",
},
"525": {
Message: "SSL Handshake Failed",
Description: "Cloudflare could not negotiate a SSL/TLS handshake with the origin server.",
},
"526": {
Message: "Invalid SSL Certificate",
Description: "Used by Cloudflare and Cloud Foundry's gorouter to indicate failure to validate the SSL/TLS certificate that the origin server presented.",
},
"527": {
Message: "Railgun Listener to Origin Error",
Description: "Error 527 indicates that the request timed out or failed after the WAN connection had been established.",
},
"530": {
Message: "Origin DNS Error",
Description: "Error 530 indicates that the requested host name could not be resolved on the Cloudflare network to an origin server.",
},
"598": {
Message: "Network Read Timeout Error",
Description: "Used by some HTTP proxies to signal a network read timeout behind the proxy to a client in front of the proxy.",
},
"1xx": {
Message: "Information",
Description: "1xx codes are often interim responses for sharing connection status information. Not intended for final request or response action.",
},
"2xx": {
Message: "Successful",
Description: "2xx codes indicate successful responses usually this means the action requested by the client was received, understood and accepted successfully.",
},
"3xx": {
Message: "Redirection",
Description: "3xx codes are a class of responses that suggest the User-Agent must follow another course of action to obtain the complete requested resource.",
},
"4xx": {
Message: "Client Error",
Description: "4xx codes generally are error responses specifying an issue at the client’s end. Potentially a network issue.",
},
"5xx": {
Message: "Server Error",
Description: "5xx error codes indicate that an error or unresolvable request occurred on the server side, whether that is a proxy or the origin host.",
},
} | pkg/codes/codes.go | 0.568536 | 0.427397 | codes.go | starcoder |
package fpc
import "time"
// Parameters define the parameters of an FPC instance.
type Parameters struct {
// The lower bound liked percentage threshold at the first round. Also called 'a'.
FirstRoundLowerBoundThreshold float64
// The upper bound liked percentage threshold at the first round. Also called 'b'.
FirstRoundUpperBoundThreshold float64
// The lower bound liked percentage threshold used after the first round.
SubsequentRoundsLowerBoundThreshold float64
// The upper bound liked percentage threshold used after the first round.
SubsequentRoundsUpperBoundThreshold float64
// The fixed liked percentage threshold used in last 'l2' rounds.
EndingRoundsFixedThreshold float64
// The amount of opinions to query on each round for a given vote context. Also called 'k'.
QuerySampleSize int
// The maximum amount of votes to collect on each round for a given vote context. Naive implementation of 'k_diff' from the paper.
MaxQuerySampleSize int
// The amount of rounds a vote context's opinion needs to stay the same to be considered final. Also called 'l'.
TotalRoundsFinalization int
// The amount of last rounds for the fixed threshold.
TotalRoundsFixedThreshold int
// The amount of rounds for which to ignore any finalization checks for. Also called 'm'.
TotalRoundsCoolingOffPeriod int
// The max amount of rounds to execute per vote context before aborting them.
MaxRoundsPerVoteContext int
// The max amount of time a query is allowed to take.
QueryTimeout time.Duration
// MinOpinionsReceived defines the minimum amount of opinions to receive in order to consider an FPC round valid.
MinOpinionsReceived int
}
// DefaultParameters returns the default parameters used in FPC.
func DefaultParameters() *Parameters {
p := &Parameters{
FirstRoundLowerBoundThreshold: 0.67,
FirstRoundUpperBoundThreshold: 0.67,
SubsequentRoundsLowerBoundThreshold: 0.50,
SubsequentRoundsUpperBoundThreshold: 0.67,
EndingRoundsFixedThreshold: 0.50,
QuerySampleSize: 21,
MaxQuerySampleSize: 100,
MinOpinionsReceived: 1,
TotalRoundsFinalization: 10,
TotalRoundsFixedThreshold: 3,
TotalRoundsCoolingOffPeriod: 0,
MaxRoundsPerVoteContext: 100,
QueryTimeout: 1500 * time.Millisecond,
}
return p
}
// RandUniformThreshold returns random threshold between the given lower/upper bound values.
func RandUniformThreshold(rand float64, thresholdLowerBound float64, thresholdUpperBound float64) float64 {
return thresholdLowerBound + rand*(thresholdUpperBound-thresholdLowerBound)
} | packages/vote/fpc/parameters.go | 0.788298 | 0.520374 | parameters.go | starcoder |
package main
import (
"errors"
"fmt"
"strconv"
"strings"
)
var (
// ErrVertexExists is returned when adding a vertex with a label that already exists.
ErrVertexExists = errors.New("vertex already exists, try a different label")
// ErrEdgeExists is returned when adding a vertex with a label that already exists.
ErrEdgeExists = errors.New("edge already exists, try a different label")
)
// NewGraph ...
func NewGraph() *Graph {
return &Graph{
Vertices: make(map[*Vertex]struct{}, 0),
Edges: make(map[*Edge]struct{}, 0),
}
}
// Link ...
func (g *Graph) Link(from, to *Vertex, weight int) error {
// Add source vertex to vertices slice.
err := g.AddVertex(from)
if err != nil {
fmt.Errorf("link between %v and %v failed: %w", from, to, err)
}
// Add destination vertex to vertices slice.
err = g.AddVertex(to)
if err != nil {
fmt.Errorf("link between %v and %v failed: %w", from, to, err)
}
// Add edge from source to destination.
err = g.AddEdge(from, to, weight)
if err != nil {
fmt.Errorf("link between %v and %v failed: %w", from, to, err)
}
// Add edge from destination to source (undirected graph).
err = g.AddEdge(to, from, weight)
if err != nil {
fmt.Errorf("link between %v and %v failed: %w", to, from, err)
}
return nil
}
// AddVertex ...
func (g *Graph) AddVertex(v *Vertex) error {
// Check if vertex exists before proceeding.
if _, ok := g.Vertices[v]; ok {
return fmt.Errorf("failed to add vertex: %s", ErrVertexExists)
}
// Add vertex to vertices slice.
g.Vertices[v] = struct{}{}
return nil
}
// AddEdge ...
func (g *Graph) AddEdge(from, to *Vertex, weight int) error {
// Create new edge
edge := &Edge{
From: from,
To: to,
Weight: weight,
}
// Check if edge exists before proceeding
if _, ok := g.Edges[edge]; ok {
return fmt.Errorf("failed to add edge: %s", ErrEdgeExists)
}
// Add edge to root slice
g.Edges[edge] = struct{}{}
return nil
}
// PrintVertices prints all vertices from the graph.
func (g *Graph) PrintVertices() {
vertices := fmt.Sprintf("\nVertices:\n\t")
for v := range g.Vertices {
vertices += fmt.Sprintf("%s, ", v.Label)
}
vertices = strings.TrimRight(vertices, ", ")
fmt.Println(vertices)
}
// PrintEdges prints all edges from the graph.
func (g *Graph) PrintEdges() {
edges := fmt.Sprintf("\nEdges:")
for e := range g.Edges {
edges += fmt.Sprintf("\n\t%s -> %s (%s)", e.From.Label, e.To.Label, strconv.Itoa(e.Weight))
}
fmt.Println(edges)
}
// PrintLinks prints all links from the graph.
func (g *Graph) PrintLinks() {
links := fmt.Sprintf("\nLinks:")
for v := range g.Vertices {
links += fmt.Sprintf("\n\t%s: ", v.Label)
for e := range g.Edges {
if e.From.Label == v.Label {
links += fmt.Sprintf("%s -> ", e.To.Label)
}
}
links = strings.TrimRight(links, " -> ")
}
fmt.Println(links)
} | go/graph/graph.go | 0.608245 | 0.418043 | graph.go | starcoder |
package custom
import (
"sort"
"strings"
"github.com/grafana-tools/sdk"
)
// Option represents an option that can be used to configure a custom variable.
type Option func(constant *Custom)
// ValuesMap represent a "label" to "value" map of options for a custom variable.
type ValuesMap map[string]string
func (values ValuesMap) asQuery() string {
valuesList := make([]string, 0, len(values))
for _, value := range values {
valuesList = append(valuesList, value)
}
sort.Strings(valuesList)
return strings.Join(valuesList, ",")
}
func (values ValuesMap) labelFor(value string) string {
for label, val := range values {
if val == value {
return label
}
}
return value
}
// Custom represents a "custom" templated variable.
type Custom struct {
Builder sdk.TemplateVar
values ValuesMap
}
// New creates a new "custom" templated variable.
func New(name string, options ...Option) *Custom {
custom := &Custom{Builder: sdk.TemplateVar{
Name: name,
Label: name,
Type: "custom",
}}
for _, opt := range options {
opt(custom)
}
return custom
}
// Values sets the possible values for the variable.
func Values(values ValuesMap) Option {
return func(custom *Custom) {
for label, value := range values {
custom.Builder.Options = append(custom.Builder.Options, sdk.Option{
Text: label,
Value: value,
})
}
custom.values = values
custom.Builder.Query = values.asQuery()
}
}
// Default sets the default value of the variable.
func Default(value string) Option {
return func(custom *Custom) {
custom.Builder.Current = sdk.Current{
Text: custom.values.labelFor(value),
Value: value,
}
}
}
// Label sets the label of the variable.
func Label(label string) Option {
return func(custom *Custom) {
custom.Builder.Label = label
}
}
// HideLabel ensures that this variable's label will not be displayed.
func HideLabel() Option {
return func(custom *Custom) {
custom.Builder.Hide = 1
}
}
// Hide ensures that the variable will not be displayed.
func Hide() Option {
return func(custom *Custom) {
custom.Builder.Hide = 2
}
}
// Multi allows several values to be selected.
func Multi() Option {
return func(custom *Custom) {
custom.Builder.Multi = true
}
}
// IncludeAll adds an option to allow all values to be selected.
func IncludeAll() Option {
return func(custom *Custom) {
custom.Builder.IncludeAll = true
custom.Builder.Options = append(custom.Builder.Options, sdk.Option{
Text: "All",
Value: "$__all",
})
}
}
// AllValue define the value used when selecting the "All" option.
func AllValue(value string) Option {
return func(custom *Custom) {
custom.Builder.AllValue = value
}
} | variable/custom/custom.go | 0.779238 | 0.408808 | custom.go | starcoder |
package utils
import (
"fmt"
"time"
)
// get the current time full format
func GetCurrentTimeFullFormat() string {
return TimeFullFormat(time.Now())
}
// get the current time format to second
func GetCurrentTimeFormatToSecond() string {
return TimeFormatToSecond(time.Now())
}
// get the current time format to day
func GetCurrentTimeFormatToDay() string {
return TimeFormatToDay(time.Now())
}
// get full format for [t]
func TimeFullFormat(t time.Time) string {
return t.Format("2006-01-02 15:04:05.999")
}
// get format to second for [t]
func TimeFormatToSecond(t time.Time) string {
return t.Format("2006-01-02 15:04:05")
}
// get format to day for [t]
func TimeFormatToDay(t time.Time) string {
return t.Format("2006-01-02")
}
// get a Time next second of [t]
func TimeNextSecond(t time.Time) time.Time {
return TimeNextUnit(t , time.Second)
}
// get a Time next minute of [t]
func TimeNextMinute(t time.Time) time.Time {
return TimeNextUnit(t , time.Minute)
}
// get a Time next hour of [t]
func TimeNextHour(t time.Time) time.Time {
return TimeNextUnit(t , time.Hour)
}
// get a Time next day of [t]
func TimeNextDay(t time.Time) time.Time {
return TimeNextUnit(t , time.Hour*24)
}
// get a Time next month of [t]
func TimeNextMonth(t time.Time) time.Time {
month := t.Month()
month++
format := fmt.Sprintf("%d-%d-%d %d:%d:%d.%d",t.Year(),month,t.Day(),t.Hour(),t.Minute(),t.Second(),t.Nanosecond())
loc , _ := time.LoadLocation("Local")
afterTime , _ := time.ParseInLocation("2006-01-02 15:04:05.999" , format , loc)
return afterTime
}
// get a Time next year of [t]
func TimeNextYear(t time.Time) time.Time {
year := t.Year()
year++
format := fmt.Sprintf("%d-%d-%d %d:%d:%d.%d",year,t.Month(),t.Day(),t.Hour(),t.Minute(),t.Second(),t.Nanosecond())
loc , _ := time.LoadLocation("Local")
afterTime , _ := time.ParseInLocation("2006-01-02 15:04:05.999" , format , loc)
return afterTime
}
// get a Time next unit of [t]
func TimeNextUnit(t time.Time , unit time.Duration) time.Time {
return TimeAfterDate(t , unit , 1)
}
// get a Time after [num] count unit of [t]
func TimeAfterDate(t time.Time, unit time.Duration, num int64) time.Time {
return t.Add(unit*time.Duration(num))
} | time_utils.go | 0.716715 | 0.497192 | time_utils.go | starcoder |
package expr
import (
"fmt"
"github.com/lqiz/expr/node"
"go/ast"
"go/token"
"strings"
)
type BinaryBoolExpr struct{}
type BinaryStrExpr struct{}
type BinaryIntExpr struct{}
type CallExpr struct {
fn string // one of "in_array", "ver_compare"
args []ast.Expr
}
func (b BinaryBoolExpr) Invoke(x, y node.ValueNode, op token.Token) node.ValueNode {
xb, xok := x.(node.BoolNode)
yb, yok := y.(node.BoolNode)
if !xok || !yok {
return node.NewBadNode(x.GetTextValue() + y.GetTextValue())
}
switch op {
case token.LAND:
return node.NewBoolNode(xb.True && yb.True)
case token.LOR:
return node.NewBoolNode(xb.True || yb.True)
}
return node.NewBadNode(fmt.Sprintf("unsupported binary operator: %s", op.String()))
}
func (b BinaryStrExpr) Invoke(x, y node.ValueNode, op token.Token) node.ValueNode {
xs, xok := x.(node.StrNode)
ys, yok := y.(node.StrNode)
if !xok || !yok {
return node.NewBadNode("x: " + x.GetTextValue() + "y: " + y.GetTextValue())
}
switch op {
case token.EQL: // ==
return node.NewBoolNode(strings.Compare(xs.GetValue(), ys.GetValue()) == 0)
case token.LSS: // <
return node.NewBoolNode(strings.Compare(xs.GetValue(), ys.GetValue()) == -1)
case token.GTR: // >
return node.NewBoolNode(strings.Compare(xs.GetValue(), ys.GetValue()) == +1)
case token.GEQ: // >=
return node.NewBoolNode(strings.Compare(xs.GetValue(), ys.GetValue()) >= 0)
case token.LEQ: // <=
return node.NewBoolNode(strings.Compare(xs.GetValue(), ys.GetValue()) <= 0)
}
return node.NewBadNode(fmt.Sprintf("unsupported binary operator: %s", op.String()))
}
func (b BinaryIntExpr) Invoke(x, y node.ValueNode, op token.Token) node.ValueNode {
xs, xok := x.(node.IntNode)
ys, yok := y.(node.IntNode)
if !xok || !yok {
return node.NewBadNode(x.GetTextValue() + y.GetTextValue())
}
switch op {
case token.EQL: // ==
return node.BoolNode{
True: xs.Value == ys.Value,
}
case token.LSS: // <
return node.BoolNode{
True: xs.Value < ys.Value,
}
case token.GTR: // >
return node.NewBoolNode(xs.Value > ys.Value)
case token.GEQ: // >=
return node.NewBoolNode(xs.Value >= ys.Value)
case token.LEQ: // <=
return node.NewBoolNode(xs.Value <= ys.Value)
}
return node.NewBadNode(fmt.Sprintf("unsupported binary operator: %s", op.String()))
}
func (c CallExpr) Invoke(mem map[string]node.ValueNode) node.ValueNode {
switch c.fn {
case "in_array":
parm := eval(mem, c.args[0])
if parm.GetType() == node.TypeBad {
return parm
}
vRange, ok := c.args[1].(*ast.CompositeLit)
if !ok {
return node.NewBadNode("func in_array 2ed params is not a composite lit")
}
eltNodes := make([]node.ValueNode, 0, len(vRange.Elts))
for _, p := range vRange.Elts {
elt := eval(mem, p)
eltNodes = append(eltNodes, elt)
}
has := false
for _, v := range eltNodes {
if v.GetType() == parm.GetType() && v.GetTextValue() == parm.GetTextValue() {
has = true
}
}
return node.NewBoolNode(has)
case "ver_compare":
if len(c.args) != 3 {
return node.NewBadNode("func ver_compare doesn't have enough params")
}
args := make([]string, 0, 3)
for _, v := range c.args {
arg := eval(mem, v)
if arg.GetType() != node.TypeStr {
return node.NewBadNode("func ver_compare params type error")
}
args = append(args, arg.GetTextValue())
}
ret := VersionCompare(args[0], args[1], args[2])
return node.NewBoolNode(ret)
}
panic(fmt.Sprintf("unsupported function call: %s", c.fn))
} | function.go | 0.560253 | 0.456107 | function.go | starcoder |
package util
import (
"github.com/foxcapades/go-bytify/v0/bytify"
"github.com/foxcapades/tally-go/v1/tally"
)
func ClampF32(value float32) float32 {
if value < 0 {
return 0
} else if value > 1 {
return 1
} else {
return value
}
}
func TruncateF32(value float32, precision int) float32 {
mag := IPow(10, precision)
return float32(int64(value*float32(mag))) / float32(mag)
}
// ParseFloat32 is a simplified float parser suitable for the specific needs of
// this library.
func ParseFloat32(v []byte) (float32, error) {
ln := len(v)
la := ln - 1
if ln == 0 {
return 0, ErrEmptyNumberVal.WithValue(string(v), 0)
}
count := uint8(0)
stage := uint32(0)
dpPos := -1
for ; la >= 0; la-- {
// We've hit the decimal marker, record it's position and move on.
if v[la] == '.' {
if dpPos > -1 {
return 0, ErrInvalidFloatFmt.WithValue(string(v), la)
}
dpPos = la
continue
}
d, err := DigitToU8(v[la])
if err != nil {
return 0, ErrInvalidUint8Fmt.WithValue(string(v), int(count))
}
stage += uint32(d) * uint32(U8Pow(10, count))
count++
}
div := float32(1.0)
if dpPos > 0 {
div = float32(IPow(10, ln-1-dpPos))
}
return float32(stage) / div, nil
}
func PrecisionPercentToBytes(per float32, buf []byte) (written uint8) {
if per < 0 {
per = -per
}
if per <= minPer {
buf[0] = '0'
return 1
} else if per >= 1 {
buf[0] = '1'
buf[1] = '0'
buf[2] = '0'
return 3
}
big := BluntRound(per * 100)
tmp := uint8(big)
written = bytify.Uint8ToBytes(tmp, buf)
if t := big - float32(tmp); t == 0 {
return
} else {
buf[written] = '.'
written++
buf[written] = uint8(t*10) + '0'
written++
}
return
}
func AppendF32(val float32, buf []byte, off *tally.UTally8) {
val = AbsF32(val)
if val == 0 {
buf[off.Inc()] = '0'
} else if val >= 1 {
buf[off.Inc()] = '1'
} else {
buf[off.Inc()] = '0'
buf[off.Inc()] = '.'
floor := int32(val * 100)
buf[off.Inc()] = byte(floor/10) + '0'
if mod := floor % 10; mod > 0 {
buf[off.Inc()] = byte(mod) + '0'
}
}
}
func F32StringSize(val float32) (size uint8) {
val = AbsF32(val)
if val == 0 || val >= 1 {
return 1
}
floor := int(val * 100)
size = 2 // "0."
// If we have something in the hundredths
if floor%10 > 0 {
size += 2
} else {
size++
}
return
}
func PercentStringSize(per float32) (size uint8) {
if per < 0 {
per = -per
}
if per <= minPer {
return 2
} else if per >= 1 {
return 4
}
rounded := BluntRound(per * 100)
floor := uint8(rounded)
size = bytify.Uint8StringSize(floor) + 1 // (+1 for '%' character)
// If rounded - floor == 0 then we have no decimal places.
if t := rounded - float32(floor); t == 0 {
return size
}
// If we do have decimal places, add 2 to account for the leading `0.`
return size + 2
} | v1/internal/util/float32.go | 0.673729 | 0.436922 | float32.go | starcoder |
package toms
import (
"github.com/dreading/gospecfunc/machine"
"github.com/dreading/gospecfunc/utils"
"math"
)
// TRAN02 calculates the transport integral of order 2
// ∫ 0 to x {t^2 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN02(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
THREE = 3.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 2
RNUMJN = 2.0e0
VALINF = 0.32898681336964528729e1
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XLOW1, XHIGH1, XHIGH2, XHIGH3, XK, XK1 float64
var ATRAN = []float64{
1.67176044643453850301e0,
-0.14773535994679448986e0,
0.1482138199469363384e-1,
-0.141953303263056126e-2,
0.13065413244157083e-3,
-0.1171557958675790e-4,
0.103334984457557e-5,
-0.9019113042227e-7,
0.781771698331e-8,
-0.67445656840e-9,
0.5799463945e-10,
-0.497476185e-11,
0.42596097e-12,
-0.3642189e-13,
0.311086e-14,
-0.26547e-15,
0.2264e-16,
-0.193e-17,
0.16e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = ONE / (HALF * XK)
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN03 calculates the transport integral of order 3
// ∫ 0 to x {t^3 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN03(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 3
RNUMJN = 3.0e0
VALINF = 0.72123414189575657124e1
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.76201254324387200657e0,
-0.10567438770505853250e0,
0.1197780848196578097e-1,
-0.121440152036983073e-2,
0.11550997693928547e-3,
-0.1058159921244229e-4,
0.94746633853018e-6,
-0.8362212128581e-7,
0.731090992775e-8,
-0.63505947788e-9,
0.5491182819e-10,
-0.473213954e-11,
0.40676948e-12,
-0.3489706e-13,
0.298923e-14,
-0.25574e-15,
0.2186e-16,
-0.187e-17,
0.16e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XLOW2 = math.Sqrt(machine.D1MACH[1] / HALF)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN04 calculates the transport integral of order 4
// ∫ 0 to x {t^4 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN04(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 4
RNUMJN = 4.0e0
VALINF = 0.25975757609067316596e2
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.48075709946151105786e0,
-0.8175378810321083956e-1,
0.1002700665975162973e-1,
-0.105993393598201507e-2,
0.10345062450304053e-3,
-0.964427054858991e-5,
0.87455444085147e-6,
-0.7793212079811e-7,
0.686498861410e-8,
-0.59995710764e-9,
0.5213662413e-10,
-0.451183819e-11,
0.38921592e-12,
-0.3349360e-13,
0.287667e-14,
-0.24668e-15,
0.2113e-16,
-0.181e-17,
0.15e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN05 calculates the transport integral of order 5
// ∫ 0 to x {t^5 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN05(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 5
RNUMJN = 5.0e0
VALINF = 0.12443133061720439116e3
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.34777777713391078928e0,
-0.6645698897605042801e-1,
0.861107265688330882e-2,
-0.93966822237555384e-3,
0.9363248060815134e-4,
-0.885713193408328e-5,
0.81191498914503e-6,
-0.7295765423277e-7,
0.646971455045e-8,
-0.56849028255e-9,
0.4962559787e-10,
-0.431093996e-11,
0.37310094e-12,
-0.3219769e-13,
0.277220e-14,
-0.23824e-15,
0.2044e-16,
-0.175e-17,
0.15e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN06 calculates the transport integral of order 6
// ∫ 0 to x {t^6 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN06(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 6
RNUMJN = 6.0e0
VALINF = 0.73248700462880338059e3
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.27127335397840008227e0,
-0.5588610553191453393e-1,
0.753919513290083056e-2,
-0.84351138579211219e-3,
0.8549098079676702e-4,
-0.818715493293098e-5,
0.75754240427986e-6,
-0.6857306541831e-7,
0.611700376031e-8,
-0.54012707024e-9,
0.4734306435e-10,
-0.412701055e-11,
0.35825603e-12,
-0.3099752e-13,
0.267501e-14,
-0.23036e-15,
0.1980e-16,
-0.170e-17,
0.15e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4 .0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4 .0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN07 calculates the transport integral of order 7
// ∫ 0 to x {t^7 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN07(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 7
RNUMJN = 7.0e0
VALINF = 0.50820803580048910473e4
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.22189250734010404423e0,
-0.4816751061177993694e-1,
0.670092448103153629e-2,
-0.76495183443082557e-3,
0.7863485592348690e-4,
-0.761025180887504e-5,
0.70991696299917e-6,
-0.6468025624903e-7,
0.580039233960e-8,
-0.51443370149e-9,
0.4525944183e-10,
-0.395800363e-11,
0.34453785e-12,
-0.2988292e-13,
0.258434e-14,
-0.22297e-15,
0.1920e-16,
-0.165e-17,
0.14e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x <= 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN08 calculates the transport integral of order 8
// ∫ 0 to x {t^8 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN08(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 8
RNUMJN = 8.0e0
VALINF = 0.40484399001901115764e5
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.18750695774043719233e0,
-0.4229527646093673337e-1,
0.602814856929065592e-2,
-0.69961054811814776e-3,
0.7278482421298789e-4,
-0.710846250050067e-5,
0.66786706890115e-6,
-0.6120157501844e-7,
0.551465264474e-8,
-0.49105307052e-9,
0.4335000869e-10,
-0.380218700e-11,
0.33182369e-12,
-0.2884512e-13,
0.249958e-14,
-0.21605e-15,
0.1863e-16,
-0.160e-17,
0.14e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
}
// TRAN09 calculates the transport integral of order 9
// ∫ 0 to x {t^9 exp(t)/[exp(t)-1]^2 } dt
// The code uses Chebyshev expansions with the coefficients
// given to 20 decimal places
func TRAN09(XVALUE float64) float64 {
const (
ZERO = 0.0e0
HALF = 0.5e0
ONE = 1.0e0
FOUR = 4.0e0
EIGHT = 8.0e0
ONEHUN = 100.0e0
NUMJN = 9
RNUMJN = 9.0e0
VALINF = 0.36360880558872871397e6
)
var K1, K2, NTERMS, NUMEXP int
var RET, RK, SUMEXP, SUM2, T, X, XHIGH1, XHIGH2, XHIGH3, XK, XK1, XLOW1, XLOW2 float64
var ATRAN = []float64{
0.16224049991949846835e0,
-0.3768351452195937773e-1,
0.547669715917719770e-2,
-0.64443945009449521e-3,
0.6773645285280983e-4,
-0.666813497582042e-5,
0.63047560019047e-6,
-0.5807478663611e-7,
0.525551305123e-8,
-0.46968861761e-9,
0.4159395065e-10,
-0.365808491e-11,
0.32000794e-12,
-0.2787651e-13,
0.242017e-14,
-0.20953e-15,
0.1810e-16,
-0.156e-17,
0.13e-18,
-0.1e-19}
X = XVALUE
// Error test
if X < ZERO {
return ZERO
}
// Compute the machine-dependent constants.
XK = machine.D1MACH[3]
T = XK / ONEHUN
if X <= FOUR {
NTERMS = 19
XLOW1 = math.Sqrt(EIGHT * XK)
XK1 = RNUMJN - ONE
XLOW2 = math.Pow(XK1*machine.D1MACH[1], ONE/XK1)
} else {
XHIGH1 = -math.Log(machine.D1MACH[4])
XHIGH2 = RNUMJN / XK
XHIGH3 = math.Log(XK)
}
// Code for x < = 4.0
if X <= FOUR {
if X < XLOW2 {
RET = ZERO
} else {
if X < XLOW1 {
RET = math.Pow(X, NUMJN-1) / (RNUMJN - ONE)
} else {
T = (((X * X) / EIGHT) - HALF) - HALF
RET = math.Pow(X, NUMJN-1) * utils.Cheval(NTERMS, ATRAN, T)
}
}
} else {
// Code for x > 4.0
if X > XHIGH2 {
SUMEXP = ONE
} else {
if X <= XHIGH1 {
NUMEXP = int(XHIGH1/X) + 1
T = math.Exp(-X)
} else {
NUMEXP = 1
T = ONE
}
RK = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
RK = RK + ONE
}
SUMEXP = ZERO
for K1 = 1; K1 <= NUMEXP; K1++ {
SUM2 = ONE
XK = ONE / (RK * X)
XK1 = ONE
for K2 = 1; K2 <= NUMJN; K2++ {
SUM2 = SUM2*XK1*XK + ONE
XK1 = XK1 + ONE
}
SUMEXP = SUMEXP*T + SUM2
RK = RK - ONE
}
}
T = RNUMJN*math.Log(X) - X + math.Log(SUMEXP)
if T < XHIGH3 {
RET = VALINF
} else {
RET = VALINF - math.Exp(T)
}
}
return RET
} | integrals/internal/toms/transport.go | 0.564939 | 0.44734 | transport.go | starcoder |
package dosa
import (
"bytes"
"sort"
"strings"
"time"
"github.com/pkg/errors"
)
// Condition holds an operator and a value for a condition on a field.
type Condition struct {
Op Operator
Value FieldValue
}
// ColumnCondition represents the condition of each column
type ColumnCondition struct {
Name string
Condition *Condition
}
// SortedColumnCondition implements sorting of an array of columnConditions
type sortedColumnCondition []*ColumnCondition
func (list sortedColumnCondition) Len() int { return len(list) }
func (list sortedColumnCondition) Swap(i, j int) { list[i], list[j] = list[j], list[i] }
func (list sortedColumnCondition) Less(i, j int) bool {
si := list[i]
sj := list[j]
if si.Name != sj.Name {
return si.Name < sj.Name
}
return si.Condition.Op < sj.Condition.Op
}
// NormalizeConditions takes a set of conditions for columns and returns a sorted, denormalized view of the conditions.
func NormalizeConditions(columnConditions map[string][]*Condition) []*ColumnCondition {
var cc []*ColumnCondition
for column, conds := range columnConditions {
for _, cond := range conds {
cc = append(cc, &ColumnCondition{
Name: column,
Condition: cond})
}
}
sort.Sort(sortedColumnCondition(cc))
return cc
}
// EnsureValidRangeConditions checks the conditions for a PK Range(). "transform" is a name-prettifying function.
func EnsureValidRangeConditions(ed *EntityDefinition, pk *PrimaryKey, columnConditions map[string][]*Condition, transform func(string) string) error {
// The requirements for range conditions on the PK being valid:
// partition key: each field must be present, with a single Eq constraint on each
// clustering key: conditions must be applied to consecutive fields and must all be Eq except for the last one
if transform == nil {
transform = func(s string) string { return s }
}
// Get the partition key. Fields will be removed from missingPKs as we find them in columnConditions.
partitionKeys := pk.PartitionKeySet()
missingPKs := pk.PartitionKeySet()
// For the clustering key the order matters; remember the position of each one.
clusteringKeys := make(map[string]int)
for i, k := range pk.ClusteringKeys {
clusteringKeys[k.Name] = i
}
clusteringConds := make([][]*Condition, len(pk.ClusteringKeys))
columnTypes := ed.ColumnTypes()
for column, conds := range columnConditions {
// column in the partition key?
if _, ok := partitionKeys[column]; ok {
if err := ensureExactOneEqCondition(columnTypes[column], conds); err != nil {
return errors.Wrapf(err, "invalid conditions for partition key: %s", transform(column))
}
delete(missingPKs, column)
continue
}
// column in the clustering key?
if i, ok := clusteringKeys[column]; ok {
// Save the condition, check after we've collected them all.
clusteringConds[i] = conds
continue
}
return errors.Errorf("column %s is not in the primary key", transform(column))
}
// Were all the partition key fields OK?
if len(missingPKs) > 0 {
names := []string{}
for k := range missingPKs {
names = append(names, transform(k))
}
return errors.Errorf("missing Eq condition on partition keys: %v", names)
}
// Finally, ensure the clustering key conditions are OK.
if err := ensureClusteringKeyConditions(pk.ClusteringKeys, columnTypes, clusteringConds, transform); err != nil {
return errors.Wrap(err, "conditions for clustering keys are invalid")
}
return nil
}
func ensureExactOneEqCondition(t Type, conditions []*Condition) error {
if len(conditions) != 1 {
return errors.Errorf("expected exactly one Eq condition, found: %v", conditions)
}
r := conditions[0]
if r.Op != Eq {
return errors.Errorf("only Eq is allowed on this column for this query, found: %s", r.Op.String())
}
if err := ensureTypeMatch(t, r.Value); err != nil {
return errors.Wrapf(err, "the value %v in the condition does not have expected type %v", r.Value, t)
}
return nil
}
func ensureClusteringKeyConditions(cks []*ClusteringKey, columnTypes map[string]Type,
clusteringKeyConditions [][]*Condition, transform func(string) string) error {
// ensure conditions are applied to consecutive clustering keys
lastConstrainedIndex := -1
for i, conditions := range clusteringKeyConditions {
if len(conditions) > 0 {
if lastConstrainedIndex != i-1 {
return errors.Errorf("conditions must be applied consecutively on clustering keys, "+
"but at least one clustering key is unconstrained before: %s", transform(cks[i].Name))
}
lastConstrainedIndex = i
}
}
// ensure only Eq is applied to clustering keys except for the last constrained one
for i := 0; i < lastConstrainedIndex; i++ {
name := cks[i].Name
if err := ensureExactOneEqCondition(columnTypes[name], clusteringKeyConditions[i]); err != nil {
return errors.Wrapf(err, "exact one Eq condition can be applied except for the last "+
"constrained clustering key, found invalid condition for key: %s", transform(name))
}
}
// ensure the last constrained clustering key has valid conditions
if lastConstrainedIndex >= 0 {
name := cks[lastConstrainedIndex].Name
if err := ensureValidConditions(columnTypes[name], clusteringKeyConditions[lastConstrainedIndex]); err != nil {
return errors.Wrapf(err, "invalid or unsupported conditions for clustering key: %s", transform(name))
}
}
return nil
}
const conditionsRule = `
If you have a Lt or LtOrEq operator on a column, you can also have a Gt or GtOrEq on the same column.
No other combinations of operators are permitted.
`
// Start with simple rules as specified in `conditionsRule` above.
// Hence, the length of valid conditions slice is either one or two (won't be called if zero length).
func ensureValidConditions(t Type, conditions []*Condition) error {
// check type sanity
for _, r := range conditions {
if err := ensureTypeMatch(t, r.Value); err != nil {
return errors.Wrap(err, "invalid condition")
}
}
switch {
case len(conditions) == 1:
return nil // single condition is always valid
case len(conditions) > 2:
return errors.Errorf("conditions: %v, rules: %s", conditions, conditionsRule)
}
r0 := conditions[0]
r1 := conditions[1]
// sort conditions according to operators so we have few cases to handle
if r0.Op >= r1.Op {
r0, r1 = r1, r0
}
op0 := r0.Op
v0 := r0.Value
op1 := r1.Op
v1 := r1.Value
switch {
// v1 < fv < v0, v1 <= fv < v0, v1 < fv <= v0 ===> v0 > v1
case op0 == Lt && op1 == Gt, op0 == Lt && op1 == GtOrEq, op0 == LtOrEq && op1 == Gt:
if compare(t, v0, v1) <= 0 {
return errors.Errorf("invalid range: %v", conditions)
}
// v1 <= fv <= v0 ===> v0 >= v1
case op0 == LtOrEq && op1 == GtOrEq:
if compare(t, v0, v1) < 0 {
return errors.Errorf("invalid range: %v", conditions)
}
default: // invalid combination of operators
return errors.Errorf("unsupported conditions: %v, rules: %s", conditions, conditionsRule)
}
return nil
}
// compare compares two values; return 0 if equal, -1 if <, 1 if >.
// Assumes args are valid.
func compare(t Type, a, b interface{}) int {
switch t {
case TUUID:
// TODO: make sure if comparison for UUID like below makes sense.
return strings.Compare(string(a.(UUID)), string(b.(UUID)))
case Int64:
return int(a.(int64) - b.(int64))
case Int32:
return int(a.(int32) - b.(int32))
case String:
return strings.Compare(a.(string), b.(string))
case Blob:
return bytes.Compare(a.([]byte), b.([]byte))
case Bool:
// TODO: we don't need to order bools for range query and should report error if people do dumb things
var ia, ib int
if a.(bool) {
ia = 1
}
if b.(bool) {
ib = 1
}
return ia - ib
case Double:
fa := a.(float64)
fb := b.(float64)
if fa < fb {
return -1
}
if fa > fb {
return 1
}
return 0
case Timestamp:
ta := a.(time.Time)
tb := b.(time.Time)
if ta.Before(tb) {
return -1
}
if ta.After(tb) {
return 1
}
return 0
}
panic("invalid type") // shouldn't reach here
}
func ensureTypeMatch(t Type, v FieldValue) error {
switch t {
case TUUID:
if _, ok := v.(UUID); !ok {
return errors.Errorf("invalid value for UUID type: %v", v)
}
case Int64:
if _, ok := v.(int64); !ok {
return errors.Errorf("invalid value for int64 type: %v", v)
}
case Int32:
if _, ok := v.(int32); !ok {
return errors.Errorf("invalid value for int32 type: %v", v)
}
case String:
if _, ok := v.(string); !ok {
return errors.Errorf("invalid value for string type: %v", v)
}
case Blob:
if _, ok := v.([]byte); !ok {
return errors.Errorf("invalid value for blob type: %v", v)
}
case Bool:
if _, ok := v.(bool); !ok {
return errors.Errorf("invalid value for bool type: %v", v)
}
case Double:
if _, ok := v.(float64); !ok {
return errors.Errorf("invalid value for double/float64 type: %v", v)
}
case Timestamp:
if _, ok := v.(time.Time); !ok {
return errors.Errorf("invalid value for timestamp type: %v", v)
}
default:
// will not happen unless we have a bug
panic("invalid type")
}
return nil
} | range_conditions.go | 0.747339 | 0.414484 | range_conditions.go | starcoder |
package tipWifi
import (
"fmt"
)
// The Radio object represents a subset of the Device configuration related
// to the Radio, whether that is Wi-Fi or Other.
type Radio struct {
Band string `json:"band"` // "Specifies the wireless band to configure the radio for. Available radio device phys on the target system are matched by the wireless band given here. If multiple radio phys support the same band, the settings specified here will be applied to all of them." ["2G","5G","5G-lower","5G-upper","6G"]
Bandwidth int `json:"bandwidth,omitempty"` // "Specifies a narrow channel width in MHz, possible values are 5, 10, 20."
Channel interface{} `json:"channel"` // "Specifies the wireless channel to use. A value of 'auto' starts the ACS algorithm."
Country string `json:"country,omitempty"` // min: 2, max: 2, "Specifies the country code, affects the available channels and transmission powers."
ChannelMode string `json:"channel-mode,omitempty"` // "Define the ideal channel mode that the radio shall use. This can be 802.11n, 802.11ac or 802.11ax. This is just a hint for the AP. If the requested value is not supported then the AP will use the highest common denominator." ["HT","VHT","HE"],"default": "HE"
ChannelWidth int `json:"channel-width,omitempty"` // "The channel width that the radio shall use. This is just a hint for the AP. If the requested value is not supported then the AP will use the highest common denominator." [20,40,80,160,8080], "default": 80
RequireMode string `json:"require-mode,omitempty"` // "Stations that do no fulfill these HT modes will be rejected." ["HT","VHT","HE"]
Mimo string `json:"mimo,omitempty"` // "This option allows configuring the antenna pairs that shall be used. This is just a hint for the AP. If the requested value is not supported then the AP will use the highest common denominator." ["1x1","2x2","3x3","4x4","5x5","6x6","7x7","8x8"]
TxPower int `json:"tx-power,omitempty"` // min: 0, max: 30, "This option specifies the transmission power in dBm"
Rates struct {
Beacon int `json:"beacon"`
Multicast int `json:"multicast"`
} `json:"rates,omitempty"`
LegacyRates int `json:"legacy-rates,omitempty"` // "Allow legacy 802.11b data rates." def: false
BeaconInterval int `json:"beacon-interval,omitempty"` // min: 15, max: 65535, def: 100, "Beacon interval in kus (1.024 ms)."
DtimPeriod int `json:"dtim-period,omitempty"` // min: 1, max: 255, def: 2, "Set the DTIM (delivery traffic information message) period. There will be one DTIM per this many beacon frames. This may be set between 1 and 255. This option only has an effect on ap wifi-ifaces."
MaximumClients int `json:"maximum-clients,omitempty"` // "Set the maximum number of clients that may connect to this radio. This value is accumulative for all attached VAP interfaces."
HeSettings struct {
MultipleBssid int `json:"multiple-bssid"` // "Enabling this option will make the PHY broadcast its BSSs using the multiple BSSID beacon IE." def: false
Ema int `json:"ema"` // "Enableing this option will make the PHY broadcast its multiple BSSID beacons using EMA." def: false
BssColor int `json:"bss-color"` // def: 64, "This enables BSS Coloring on the PHY. setting it to 0 disables the feature 1-63 sets the color and 64 will make hostapd pick a random color."
} `json:"he-settings,omitempty"`
HostapdIfaceRaw []string `json:"hostapd-iface-raw,omitempty"` // "This array allows passing raw hostapd.conf lines." ["ap_table_expiration_time=3600","device_type=6-0050F204-1","ieee80211h=1","rssi_ignore_probe_request=-75","time_zone=EST5","uuid=12345678-9abc-def0-1234-56789abcdef0","venue_url=1:http://www.example.com/info-eng","wpa_deny_ptk0_rekey=0"]
}
// GenerateDescription returns a string of concatenated values describing the Radio object.
func (r *Radio) GenerateDescription() string {
desc := fmt.Sprintf("Band: %s, ", r.Band)
desc += fmt.Sprintf("Channel: %v, ", r.Channel)
desc += fmt.Sprintf("Width: %d, ", r.ChannelWidth)
desc += fmt.Sprintf("Mode: %s, ", r.ChannelMode)
return desc
} | radio.go | 0.776792 | 0.422266 | radio.go | starcoder |
package node
// Node Definition for a Node.
type Node struct {
Val int
Left *Node
Right *Node
Next *Node
}
// 根据前序遍历和中序遍历得出Node
func BuildTreePreIn(preorder []int, inorder []int) *Node {
return buildTreePreIn(preorder, inorder)
}
func buildTreePreIn(preorder []int, inorder []int) *Node {
if len(inorder) == 0 {
return nil
}
var root *Node
prePos := 0
root = &Node{Val: preorder[prePos]}
for inPos := 0; inPos < len(inorder); inPos++ {
if preorder[prePos] == inorder[inPos] {
// inorder中inPos左边是左子树
root.Left = buildTreePreIn(preorder[prePos+1:inPos+1], inorder[:inPos])
// inorder中inPos右边是右子树
root.Right = buildTreePreIn(preorder[inPos+1:], inorder[inPos+1:])
// 找到了就退出此次循环
break
}
}
return root
}
/*
# https://leetcode.com/explore/learn/card/data-structure-tree/133/conclusion/994/
You are given a perfect binary tree where all leaves are on the same level, and every parent has two children. The binary tree has the following definition:
struct Node {
int val;
Node *left;
Node *right;
Node *next;
}
Populate each next pointer to point to its next right node. If there is no next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Follow up:
You may only use constant extra space.
Recursive approach is fine, you may assume implicit stack space does not count as extra space for this problem.
Example 1:
1 1 -> null
/ \ / \
2 3 2 ->3 -> null
/ \ / \ / \ / \
4 5 6 7 4->5->6->7 -> null
Figure A Figure B
Input: root = [1,2,3,4,5,6,7]
Output: [1,#,2,3,#,4,5,6,7,#]
Explanation: Given the above perfect binary tree (Figure A), your function should populate each next pointer to point to its next right node, just like in Figure B. The serialized output is in level order as connected by the next pointers, with '#' signifying the end of each level.
Constraints:
The number of nodes in the given tree is less than 4096.
-1000 <= node.val <= 1000
*/
func Connect(root *Node) *Node {
return connect(root)
}
func connect(root *Node) *Node {
if root == nil {
return root
}
connectPerfectTree(root.Left, root.Right)
return root
}
func connectPerfectTree(left, right *Node) {
if left == nil && right == nil {
return
}
if left != nil && right != nil {
left.Next = right
}
connectPerfectTree(left.Left, left.Right)
connectPerfectTree(right.Left, right.Right)
connectPerfectTree(left.Right, right.Left)
}
/*
# https://leetcode.com/explore/learn/card/data-structure-tree/133/conclusion/1016/
Given a binary tree
struct Node {
int val;
Node *left;
Node *right;
Node *next;
}
Populate each next pointer to point to its next right node. If there is no next right node, the next pointer should be set to NULL.
Initially, all next pointers are set to NULL.
Follow up:
You may only use constant extra space.
Recursive approach is fine, you may assume implicit stack space does not count as extra space for this problem.
Example 1:
1 1 -> null
/ \ / \
2 3 2 ->3 -> null
/ \ \ / \ \
4 5 7 4-> 5 ->7 -> null
Figure A Figure B
Input: root = [1,2,3,4,5,null,7]
Output: [1,#,2,3,#,4,5,7,#]
Explanation: Given the above binary tree (Figure A), your function should populate each next pointer to point to its next right node, just like in Figure B. The serialized output is in level order as connected by the next pointers, with '#' signifying the end of each level.
Constraints:
The number of nodes in the given tree is less than 6000.
-100 <= node.val <= 100
*/
func Connect2(root *Node) *Node {
return connect2(root)
}
func connect2(root *Node) *Node {
if root == nil {
return root
}
src := connectNotPerfectTree(root, nil, 0)
for _, level := range src {
if len(level) > 1 {
for i := 0; i < len(level)-1; i++ {
level[i].Next = level[i+1]
}
}
}
return root
}
func connectNotPerfectTree(root *Node, src [][]*Node, level int) [][]*Node {
if root == nil {
return src
}
if len(src) > level {
src[level] = append(src[level], root)
} else {
src = append(src, []*Node{root})
}
src = connectNotPerfectTree(root.Left, src, level+1)
src = connectNotPerfectTree(root.Right, src, level+1)
return src
} | node/node.go | 0.566978 | 0.49408 | node.go | starcoder |
package methodchain
import (
"strings"
)
// MethodChainMapTest is template to generate itself for different combination of data type.
func MethodChainMapTest() string {
return `
func TestMap<FTYPE>MethodChain(t *testing.T) {
expectedSquareList := []<TYPE>{1, 4, 9}
squareList := Make<FTYPE>Slice([]<TYPE>{1, 2, 3}...).Map(square<FTYPE>)
if squareList[0] != expectedSquareList[0] || squareList[1] != expectedSquareList[1] || squareList[2] != expectedSquareList[2] {
t.Errorf("Map<FTYPE>MthodChain failed")
}
if len(Make<FTYPE>Slice().Map(square<FTYPE>)) > 0 {
t.Errorf("Map<FTYPE> failed.")
t.Errorf(reflect.String.String())
}
}
// TestMap2<FTYPE>MethodChain -
func TestMap2<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>Slice().Map(nil)) > 0 {
t.Errorf("Map<FTYPE> failed.")
t.Errorf(reflect.String.String())
}
}
// TestMap<FTYPE>MethodChainPtr -
func TestMap<FTYPE>MethodChainPtr(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
var v4 <TYPE> = 4
var v9 <TYPE> = 9
expectedSquareList := []*<TYPE>{&v1, &v4, &v9}
squareList := Make<FTYPE>SlicePtr([]*<TYPE>{&v1, &v2, &v3}...).MapPtr(square<FTYPE>Ptr)
if *squareList[0] != *expectedSquareList[0] || *squareList[1] != *expectedSquareList[1] || *squareList[2] != *expectedSquareList[2] {
t.Errorf("Map<FTYPE>PtrMthodChain failed")
}
if len(Make<FTYPE>SlicePtr().MapPtr(square<FTYPE>Ptr)) > 0 {
t.Errorf("Map<FTYPE>Ptr failed.")
}
}
// TestMapPtr2<FTYPE>MethodChain -
func TestMapPtr2<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>SlicePtr().MapPtr(nil)) > 0 {
t.Errorf("Map<FTYPE>Ptr failed.")
}
}
// TestFilter<FTYPE>MethodChain -
func TestFilter<FTYPE>MethodChain(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
greaterThan1<FTYPE>MethodChain := func(num <TYPE>) bool {
return num > 1
}
// Test : even number in the list
expectedFilteredList := []<TYPE>{v2, v3}
filteredList := Make<FTYPE>Slice([]<TYPE>{v1, v2, v3}...).Filter(greaterThan1<FTYPE>MethodChain)
if filteredList[0] != expectedFilteredList[0] || filteredList[1] != expectedFilteredList[1] {
t.Errorf("Filter<FTYPE> failed. Expected filtered list=%v, actual list=%v", expectedFilteredList, filteredList)
}
}
// TestFilter2<FTYPE>MethodChain -
func TestFilter2<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>Slice().Filter(nil)) > 0 {
t.Errorf("Filter<FTYPE>Ptr failed.")
}
}
// TestFilter<FTYPE>PtrMethodChain -
func TestFilter<FTYPE>PtrMethodChain(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
greaterThan1<FTYPE>MethodChain := func(num *<TYPE>) bool {
return *num > 1
}
// Test : even number in the list
expectedFilteredList := []*<TYPE>{&v2, &v3}
filteredList := Make<FTYPE>SlicePtr([]*<TYPE>{&v1, &v2, &v3}...).FilterPtr(greaterThan1<FTYPE>MethodChain)
if *filteredList[0] != *expectedFilteredList[0] || *filteredList[1] != *expectedFilteredList[1] {
t.Errorf("Filter<FTYPE>Ptr failed. Expected filtered list=%v, actual list=%v", expectedFilteredList, filteredList)
}
}
// TestFilter2<FTYPE>PtrMethodChain -
func TestFilter2<FTYPE>PtrMethodChain(t *testing.T) {
if len(Make<FTYPE>SlicePtr().FilterPtr(nil)) > 0 {
t.Errorf("Filter<FTYPE>Ptr failed.")
}
}
// TestRemove<FTYPE>MethodChain -
func TestRemove<FTYPE>MethodChain(t *testing.T) {
// Test : even number in the list
var v2 <TYPE> = 2
var v3 <TYPE> = 3
var v4 <TYPE> = 4
isGreaterThanThree<FTYPE> := func (num <TYPE>) bool {
return num > 3
}
expectedNewList := []<TYPE>{v2, v3}
NewList := Make<FTYPE>Slice([]<TYPE>{v2, v3, v4}...).Remove(isGreaterThanThree<FTYPE>)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] {
t.Errorf("Remove<FTYPE> failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}
// TestRemove2<FTYPE>MethodChain -
func TestRemove2<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>Slice().Remove(nil)) > 0 {
t.Errorf("Remove<FTYPE> failed.")
}
}
// TestRemove<FTYPE>PtrMethodChain -
func TestRemove<FTYPE>PtrMethodChain(t *testing.T) {
// Test : even number in the list
var v2 <TYPE> = 2
var v3 <TYPE> = 3
var v4 <TYPE> = 4
isGreaterThanThree<FTYPE> := func (num *<TYPE>) bool {
return *num > 3
}
expectedNewList := []*<TYPE>{&v2, &v3}
NewList := Make<FTYPE>SlicePtr([]*<TYPE>{&v2, &v3, &v4}...).RemovePtr(isGreaterThanThree<FTYPE>)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] {
t.Errorf("Remove<FTYPE>Ptr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}
// TestRemove2Ptr<FTYPE>MethodChain -
func TestRemove2Ptr<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>SlicePtr().RemovePtr(nil)) > 0 {
t.Errorf("Remove<FTYPE>Ptr failed.")
}
}
func TestDropWhile<FTYPE>MethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 <TYPE> = 2
var v3 <TYPE> = 3
var v4 <TYPE> = 4
var v5 <TYPE> = 5
isEven<FTYPE> := func(num <TYPE>) bool {
return num%2 == 0
}
expectedNewList := []<TYPE>{v3, v4, v5}
NewList := Make<FTYPE>Slice([]<TYPE>{v4, v2, v3, v4, v5}...).DropWhile(isEven<FTYPE>)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("DropWhile>MethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}
// TestDropWhile2<FTYPE>MethodChain -
func TestDropWhile2<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>Slice().DropWhile(nil)) > 0 {
t.Errorf("DropWhile<FTYPE> failed.")
}
}
func TestDropWhile<FTYPE>PtrMethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 <TYPE> = 2
var v3 <TYPE> = 3
var v4 <TYPE> = 4
var v5 <TYPE> = 5
isEven<FTYPE>Ptr := func(num *<TYPE>) bool {
return *num%2 == 0
}
expectedNewList := []*<TYPE>{&v3, &v4, &v5}
NewList := Make<FTYPE>SlicePtr([]*<TYPE>{&v4, &v2, &v3, &v4, &v5}...).DropWhilePtr(isEven<FTYPE>Ptr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("DropWhile>PtrMethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}
// TestDropWhile2Ptr<FTYPE>MethodChain -
func TestDropWhile2Ptr<FTYPE>MethodChain(t *testing.T) {
if len(Make<FTYPE>SlicePtr().DropWhilePtr(nil)) > 0 {
t.Errorf("DropWhile<FTYPE>Ptr failed.")
}
}
// TestReverse<FTYPE>methodchain
func TestReverse<FTYPE>methodchain(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []<TYPE>{v3, v2, v1}
reversed := Make<FTYPE>Slice([]<TYPE>{v1, v2, v3}...).Reverse()
if expected[0] != reversed[0] || expected[1] != reversed[1] || expected[2] != reversed[2] {
t.Errorf("Reverse<Type>s failed")
}
}
// TestReverse<FTYPE>Ptrmethodchain
func TestReverse<FTYPE>Ptrmethodchain(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []*<TYPE>{&v3, &v2, &v1}
reversed := Make<FTYPE>SlicePtr([]*<TYPE>{&v1, &v2, &v3}...).ReversePtr()
if *expected[0] != *reversed[0] || *expected[1] != *reversed[1] || *expected[2] != *reversed[2] {
t.Errorf("Reverse<Type>sMethodChain failed")
}
}
// TestDistinct<FTYPE>MethodChain -
func TestDistinct<FTYPE>MethodChain(t *testing.T) {
// Test : Get distinct values
expected := []<TYPE>{8, 2, 0}
list := []<TYPE>{8, 2, 8, 0, 2, 0}
distinct := Make<FTYPE>Slice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != 8 || distinct[1] != 2 || distinct[2] != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []<TYPE>{8, 2, 0}
list = []<TYPE>{8, 2, 0}
distinct = Make<FTYPE>Slice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != 8 || distinct[1] != 2 || distinct[2] != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []<TYPE>{}
list = []<TYPE>{}
distinct = Make<FTYPE>Slice(list...).Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
distinct = Make<FTYPE>Slice().Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
}
func TestDistinct<FTYPE>PtrMethodChain(t *testing.T) {
var v8 <TYPE> = 8
var v2 <TYPE> = 2
var v0 <TYPE>
// Test : Get distinct values
expected := []*<TYPE>{&v8, &v2, &v0}
list := []*<TYPE>{&v8, &v2, &v8, &v0, &v2, &v0}
distinct := Make<FTYPE>SlicePtr(list...).DistinctPtr()
if len(distinct) != 3 || *distinct[0] != v8 || *distinct[1] != v2 || *distinct[2] != v0 {
t.Errorf("Distinct<FTYPE>Ptr failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []*<TYPE>{&v8, &v2, &v0}
list = []*<TYPE>{&v8, &v2, &v0}
distinct = Make<FTYPE>SlicePtr(list...).DistinctPtr()
if len(distinct) != 3 || *distinct[0] != v8 || *distinct[1] != v2 || *distinct[2] != v0 {
t.Errorf("Distinct<TYPE>PtrMethodChain failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []*<TYPE>{}
list = []*<TYPE>{}
distinct = Make<FTYPE>SlicePtr(list...).DistinctPtr()
if len(distinct) != 0 {
t.Errorf("Distinct<FTYPE>PtrMethodChain failed. Expected=%v, actual=%v", expected, distinct)
}
distinct = Make<FTYPE>SlicePtr().DistinctPtr()
if len(distinct) != 0 {
t.Errorf("Distinct<FTYPE>MethodChain failed. Expected=%v, actual=%v", expected, distinct)
}
}
// TestTakeWhile<FTYPE>MethodChain -
func TestTakeWhile<FTYPE>MethodChain(t *testing.T) {
isEvenMethodChain := func(num <TYPE>) bool {
return num%2 == 0
}
// Test : Take the numbers as long as condition match
expectedNewList := []<TYPE>{4, 2, 4}
NewList := Make<FTYPE>Slice([]<TYPE>{4, 2, 4, 7, 5}...).TakeWhile(isEvenMethodChain)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("TakeWhileInt failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>Slice().TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
if len(Make<FTYPE>Slice([]<TYPE>{}...).TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
}
// TestTakeWhile<FTYPE>MethodChainPtr -
func TestTakeWhile<FTYPE>MethodChainPtr(t *testing.T) {
isEvenMethodChain := func(num *<TYPE>) bool {
return *num%2 == 0
}
var v2 <TYPE> = 2
var v4 <TYPE> = 4
var v5 <TYPE> = 5
var v7 <TYPE> = 7
// Test : Take the numbers as long as condition match
expectedNewList := []*<TYPE>{&v4, &v2, &v4}
NewList := Make<FTYPE>SlicePtr([]*<TYPE>{&v4, &v2, &v4, &v7, &v5}...).TakeWhilePtr(isEvenMethodChain)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("TakeWhile<FTYPE>methodchain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>SlicePtr().TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhile<FTYPE> failed.")
}
if len(Make<FTYPE>SlicePtr([]*<TYPE>{}...).TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhile<FTYPE> failed.")
}
}
`
}
// MethodChainSortTest is template to generate itself for different combination of data type.
func MethodChainSortTest() string {
return `
// TestSort<FTYPE>methodchain
func TestSort<FTYPE>methodchain(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []<TYPE>{v1, v2, v3}
sorted := Make<FTYPE>Slice([]<TYPE>{v3, v2, v1}...).Sort()
if expected[0] != sorted[0] || expected[1] != sorted[1] || expected[2] != sorted[2] {
t.Errorf("Sort<Type>sMethodChain failed")
t.Errorf(reflect.String.String())
}
}
// TestSort<FTYPE>methodchainDesc
func TestSort<FTYPE>methodchainDesc(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []<TYPE>{v3, v2, v1}
sorted := Make<FTYPE>Slice([]<TYPE>{v3, v2, v1}...).SortDesc()
if expected[0] != sorted[0] || expected[1] != sorted[1] || expected[2] != sorted[2] {
t.Errorf("Sort<Type>sMethodChain failed")
t.Errorf(reflect.String.String())
}
}
// TestSort<FTYPE>methodchainPtr
func TestSort<FTYPE>methodchainPtr(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []*<TYPE>{&v1, &v2, &v3}
sorted := Make<FTYPE>SlicePtr([]*<TYPE>{&v3, &v2, &v1}...).SortPtr()
if *expected[0] != *sorted[0] || *expected[1] != *sorted[1] || *expected[2] != *sorted[2] {
t.Errorf("Sort<Type>sMethodChain failed")
}
}
// TestSort<FTYPE>methodchainDescPtr
func TestSort<FTYPE>methodchainDescPtr(t *testing.T) {
var v1 <TYPE> = 1
var v2 <TYPE> = 2
var v3 <TYPE> = 3
expected := []*<TYPE>{&v3, &v2, &v1}
sorted := Make<FTYPE>SlicePtr([]*<TYPE>{&v3, &v2, &v1}...).SortDescPtr()
if *expected[0] != *sorted[0] || *expected[1] != *sorted[1] || *expected[2] != *sorted[2] {
t.Errorf("Sort<Type>sMethodChain failed")
}
}
`
}
// MethodChainMapBoolTest is template to generate itself for different combination of data type.
func MethodChainMapBoolTest() string {
return `
// TestMap<FTYPE>MethodChain -
func TestMap<FTYPE>MethodChain(t *testing.T) {
expectedSquareList := []<TYPE>{false, true, false}
squareList := Make<FTYPE>Slice([]<TYPE>{true, false, true}...).Map(inverseBool)
if squareList[0] != expectedSquareList[0] || squareList[1] != expectedSquareList[1] || squareList[2] != expectedSquareList[2] {
t.Errorf("Map<FTYPE>MthodChain failed")
}
if len(Make<FTYPE>Slice().Map(inverseBool)) > 0 {
t.Errorf("Map<FTYPE> failed.")
t.Errorf(reflect.String.String())
}
if len(Make<FTYPE>Slice().Map(nil)) > 0 {
t.Errorf("Map<FTYPE> failed.")
}
}
func inverseBool(v bool) bool {
if v == true {
return false
}
return true
}
// TestMapPtrMethodChainBool -
func TestMapPtrMethodChainBool<FTYPE>MethodChain(t *testing.T) {
tr := true
f := false
expectedSquareList := []*<TYPE>{&f, &tr, &f}
squareList := Make<FTYPE>SlicePtr([]*<TYPE>{&tr, &f, &tr}...).MapPtr(inverseBoolPtr)
if *squareList[0] != *expectedSquareList[0] || *squareList[1] != *expectedSquareList[1] || *squareList[2] != *expectedSquareList[2] {
t.Errorf("Map<FTYPE>PtrMthodChain failed")
}
if len(Make<FTYPE>SlicePtr().MapPtr(inverseBoolPtr)) > 0 {
t.Errorf("Map<FTYPE>PtrFilterChain failed.")
}
if len(Make<FTYPE>SlicePtr().MapPtr(nil)) > 0 {
t.Errorf("Map<FTYPE>PtrFilterChain failed.")
}
}
// TestFilterBoolMethodChain -
func TestFilterBoolMethodChain(t *testing.T) {
var vt bool = true
expectedSumList := []bool{vt}
newList := Make<FTYPE>Slice([]bool{vt}...).Filter(trueBool)
if newList[0] != expectedSumList[0] {
t.Errorf("FilterBoolPtr failed")
}
if len(Make<FTYPE>Slice().Filter(nil)) > 0 {
t.Errorf("MapBoolPtr failed.")
}
}
// TestFilterBoolPtrMethodChain -
func TestFilterBoolPtrMethodChain(t *testing.T) {
var vt bool = true
expectedSumList := []*bool{&vt}
newList := Make<FTYPE>SlicePtr([]*bool{&vt}...).FilterPtr(trueBoolPtr)
if *newList[0] != *expectedSumList[0] {
t.Errorf("FilterBoolPtr failed")
}
if len(Make<FTYPE>SlicePtr(&vt).FilterPtr(nil)) == 0 {
t.Errorf("MapBoolPtr failed.")
}
}
// TestRemoveBoolMethodChain -
func TestRemoveBoolMethodChain(t *testing.T) {
var vt bool = true
r := func(vt bool) bool {
if vt == true {
return true
}
return false
}
if len(Make<FTYPE>Slice(vt).Remove(r)) > 0 {
t.Errorf("RemoveBool failed.")
}
if len(Make<FTYPE>Slice(vt).Remove(nil)) == 0 {
t.Errorf("RemoveBool failed.")
}
}
// TestRemoveBoolPtrMethodChain -
func TestRemoveBoolPtrMethodChain(t *testing.T) {
var vt bool = true
r := func(vt *bool) bool {
if *vt == true {
return true
}
return false
}
if len(Make<FTYPE>SlicePtr(&vt).RemovePtr(r)) > 0 {
t.Errorf("RemoveBool failed.")
}
if len(Make<FTYPE>SlicePtr(&vt).RemovePtr(nil)) == 0 {
t.Errorf("RemoveBoolPtr failed.")
}
}
// TestDropWhileBoolMethodChain -
func TestDropWhileBoolMethodChain(t *testing.T) {
var vt bool = true
var vf bool = false
isTrueBool := func(num bool) bool {
return num == true
}
expectedNewList := []bool{vf, vt}
NewList := Make<FTYPE>Slice([]bool{vt, vf, vt}...).DropWhile(isTrueBool)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] {
t.Errorf("DropWhileMethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>Slice().DropWhile(nil)) > 0 {
t.Errorf("DropWhile failed.")
}
}
// TestDropWhileBoolPtrMethodChain -
func TestDropWhileBoolPtrMethodChain(t *testing.T) {
var vt bool = true
var vf bool = false
isTrueBoolPtr := func(num *bool) bool {
return *num == true
}
expectedNewList := []*bool{&vf, &vt}
NewList := Make<FTYPE>SlicePtr([]*bool{&vt, &vf, &vt}...).DropWhilePtr(isTrueBoolPtr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] {
t.Errorf("DropWhilePtrMethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>SlicePtr().DropWhilePtr(nil)) > 0 {
t.Errorf("DropWhilePtr failed.")
}
}
// TestReverse<FTYPE>methodchain
func TestReverse<FTYPE>methodchain(t *testing.T) {
var v1 <TYPE> = true
var v2 <TYPE> = false
var v3 <TYPE> = false
expected := []<TYPE>{v3, v2, v1}
reversed := Make<FTYPE>Slice([]<TYPE>{v1, v2, v3}...).Reverse()
if expected[0] != reversed[0] || expected[1] != reversed[1] || expected[2] != reversed[2] {
t.Errorf("Reverse<Type>s failed")
}
}
// TestReverse<FTYPE>Ptrmethodchain
func TestReverse<FTYPE>Ptrmethodchain(t *testing.T) {
var v1 <TYPE> = true
var v2 <TYPE> = false
var v3 <TYPE> = false
expected := []*<TYPE>{&v3, &v2, &v1}
reversed := Make<FTYPE>SlicePtr([]*<TYPE>{&v1, &v2, &v3}...).ReversePtr()
if *expected[0] != *reversed[0] || *expected[1] != *reversed[1] || *expected[2] != *reversed[2] {
t.Errorf("Reverse<Type>sMethodChain failed")
}
}
// TestDistinctBoolMethodChain
func TestDistinctBoolMethodChain(t *testing.T) {
var vt bool = true
newList := Make<FTYPE>Slice([]bool{vt, vt}...).Distinct()
if newList[0] != vt {
t.Errorf("DistinctBool failed")
}
if len(Make<FTYPE>Slice().Distinct()) > 0 {
t.Errorf("DistinctBool failed.")
}
}
// TestDistinctBoolPtrMethodChain
func TestDistinctBoolPtrMethodChain(t *testing.T) {
var vt bool = true
newList := Make<FTYPE>SlicePtr([]*bool{&vt, &vt}...).DistinctPtr()
if *newList[0] != vt {
t.Errorf("DistinctPtrBool failed")
}
if len(Make<FTYPE>SlicePtr().DistinctPtr()) > 0 {
t.Errorf("DistinctPtrBool failed.")
}
}
// TestTakeWhileBoolMethodChain -
func TestTakeWhileBoolMethodChain(t *testing.T) {
// Test : Take the numbers as long as condition match
var vt bool = true
var vf bool = false
expectedNewList := []bool{vt, vt, vf}
NewList := Make<FTYPE>Slice([]bool{vt, vt, vf, vf, vf}...).TakeWhile(func(v bool) bool { return v == true })
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] {
t.Errorf("TakeWhileBool failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
expectedNewList = []bool{vt}
NewList = Make<FTYPE>Slice([]bool{vt}...).TakeWhile(func(v bool) bool { return v == true })
if NewList[0] != expectedNewList[0] {
t.Errorf("TakeWhileBoolPtr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>Slice().TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileBoolPtr failed.")
}
if len(Make<FTYPE>Slice([]bool{}...).TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileBoolPtr failed.")
}
}
// TestTakeWhileBoolPtrMethodChain -
func TestTakeWhileBoolPtrMethodChain(t *testing.T) {
// Test : Take the numbers as long as condition match
var vt bool = true
var vf bool = false
expectedNewList := []*bool{&vt, &vt, &vf}
NewList := Make<FTYPE>SlicePtr([]*bool{&vt, &vt, &vf, &vf, &vf}...).TakeWhilePtr(func(v *bool) bool { return *v == true } )
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] {
t.Errorf("TakeWhileBoolPtr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
expectedNewList = []*bool{&vt}
NewList = Make<FTYPE>SlicePtr([]*bool{&vt}...).TakeWhilePtr(func(v *bool) bool { return *v == true })
if *NewList[0] != *expectedNewList[0] {
t.Errorf("TakeWhileBoolPtr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(Make<FTYPE>SlicePtr().TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileBoolPtr failed.")
}
if len(Make<FTYPE>SlicePtr([]*bool{}...).TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileBoolPtr failed.")
t.Errorf(reflect.String.String())
}
}
`
}
// ReplaceActivityMethodChainMap - Replace activity for string type
func ReplaceActivityMethodChainMap(code string) string {
t1 := `func TestMapStrMethodChain(t *testing.T) {
expectedSquareList := []string{1, 4, 9}
squareList := MakeStrSlice([]string{1, 2, 3}...).Map(squareStr)
if squareList[0] != expectedSquareList[0] || squareList[1] != expectedSquareList[1] || squareList[2] != expectedSquareList[2] {
t.Errorf("MapStrMthodChain failed")
}
if len(MakeStrSlice().Map(squareStr)) > 0 {
t.Errorf("MapStr failed.")
t.Errorf(reflect.String.String())
}
}`
t2 := `func TestMapStrMethodChain(t *testing.T) {
expectedSquareList := []string{"11", "22", "33"}
squareList := MakeStrSlice([]string{"1", "2", "3"}...).Map(squareStr)
if squareList[0] != expectedSquareList[0] || squareList[1] != expectedSquareList[1] || squareList[2] != expectedSquareList[2] {
t.Errorf("MapStrMthodChain failed")
}
if len(MakeStrSlice().Map(squareStr)) > 0 {
t.Errorf("MapStr failed.")
t.Errorf(reflect.String.String())
}
}
func squareStr(s string) string {
return s+s
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `greaterThan1StrMethodChain := func(num string) bool {
return num > 1
}`
t2 = `greaterThan1StrMethodChain := func(num string) bool {
return num > "1"
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestMapStrMethodChainPtr(t *testing.T) {
var v1 string = "1"
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
var v9 string = "9"
expectedSquareList := []*string{&v1, &v4, &v9}
squareList := MakeStrSlicePtr([]*string{&v1, &v2, &v3}...).MapPtr(squareStrPtr)
if *squareList[0] != *expectedSquareList[0] || *squareList[1] != *expectedSquareList[1] || *squareList[2] != *expectedSquareList[2] {
t.Errorf("MapStrPtrMthodChain failed")
}
if len(MakeStrSlicePtr().MapPtr(squareStrPtr)) > 0 {
t.Errorf("MapStrPtr failed.")
}
}`
t2 = `func TestMapStrMethodChainPtr(t *testing.T) {
var v1 string = "1"
var v2 string = "2"
var v3 string = "3"
var v11 string = "11"
var v22 string = "22"
var v33 string = "33"
expectedSquareList := []*string{&v11, &v22, &v33}
squareList := MakeStrSlicePtr([]*string{&v1, &v2, &v3}...).MapPtr(squareStrPtr)
if *squareList[0] != *expectedSquareList[0] || *squareList[1] != *expectedSquareList[1] || *squareList[2] != *expectedSquareList[2] {
t.Errorf("MapStrPtrMthodChain failed")
}
if len(MakeStrSlicePtr().MapPtr(squareStrPtr)) > 0 {
t.Errorf("MapStrPtr failed.")
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestFilterStrPtrMethodChain(t *testing.T) {
var v1 string = "1"
var v2 string = "2"
var v3 string = "3"
greaterThan1StrMethodChain := func(num *string) bool {
return *num > 1
}
// Test : even number in the list
expectedFilteredList := []*string{&v2, &v3}
filteredList := MakeStrSlicePtr([]*string{&v1, &v2, &v3}...).FilterPtr(greaterThan1StrMethodChain)
if *filteredList[0] != *expectedFilteredList[0] || *filteredList[1] != *expectedFilteredList[1] {
t.Errorf("FilterStrPtr failed. Expected filtered list=%v, actual list=%v", expectedFilteredList, filteredList)
}
}`
t2 = `func TestFilterStrPtrMethodChain(t *testing.T) {
var v1 string = "1"
var v2 string = "2"
var v3 string = "3"
greaterThan1StrMethodChain := func(num *string) bool {
return *num > "1"
}
// Test : even number in the list
expectedFilteredList := []*string{&v2, &v3}
filteredList := MakeStrSlicePtr([]*string{&v1, &v2, &v3}...).FilterPtr(greaterThan1StrMethodChain)
if *filteredList[0] != *expectedFilteredList[0] || *filteredList[1] != *expectedFilteredList[1] {
t.Errorf("FilterStrPtr failed. Expected filtered list=%v, actual list=%v", expectedFilteredList, filteredList)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestRemoveStrMethodChain(t *testing.T) {
// Test : even number in the list
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
isGreaterThanThreeStr := func (num string) bool {
return num > 3
}
expectedNewList := []string{v2, v3}
NewList := MakeStrSlice([]string{v2, v3, v4}...).Remove(isGreaterThanThreeStr)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] {
t.Errorf("RemoveStr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
t2 = `func TestRemoveStrMethodChain(t *testing.T) {
// Test : even number in the list
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
isGreaterThanThreeStr := func (num string) bool {
return num > "3"
}
expectedNewList := []string{v2, v3}
NewList := MakeStrSlice([]string{v2, v3, v4}...).Remove(isGreaterThanThreeStr)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] {
t.Errorf("RemoveStr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestRemoveStrPtrMethodChain(t *testing.T) {
// Test : even number in the list
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
isGreaterThanThreeStr := func (num *string) bool {
return *num > 3
}
expectedNewList := []*string{&v2, &v3}
NewList := MakeStrSlicePtr([]*string{&v2, &v3, &v4}...).RemovePtr(isGreaterThanThreeStr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] {
t.Errorf("RemoveStrPtr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
t2 = `func TestRemoveStrPtrMethodChain(t *testing.T) {
// Test : even number in the list
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
isGreaterThanThreeStr := func (num *string) bool {
return *num > "3"
}
expectedNewList := []*string{&v2, &v3}
NewList := MakeStrSlicePtr([]*string{&v2, &v3, &v4}...).RemovePtr(isGreaterThanThreeStr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] {
t.Errorf("RemoveStrPtr failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenFloat32 := func(num float32) bool {
return num%2 == 0
}`
t2 = `isEvenFloat32 := func(num float32) bool {
return int(num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenFloat64 := func(num float64) bool {
return num%2 == 0
}`
t2 = `isEvenFloat64 := func(num float64) bool {
return int(num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestDropWhileStrMethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
var v5 string = "5"
isEvenStr := func(num string) bool {
return num%2 == 0
}
expectedNewList := []string{v3, v4, v5}
NewList := MakeStrSlice([]string{v4, v2, v3, v4, v5}...).DropWhile(isEvenStr)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("DropWhile>MethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
t2 = `func TestDropWhileStrMethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
var v5 string = "5"
isEvenStr := func(num string) bool {
return num == "2" || num == "4"
}
expectedNewList := []string{v3, v4, v5}
NewList := MakeStrSlice([]string{v4, v2, v3, v4, v5}...).DropWhile(isEvenStr)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("DropWhile>MethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenFloat32Ptr := func(num *float32) bool {
return *num%2 == 0
}`
t2 = `isEvenFloat32Ptr := func(num *float32) bool {
return int(*num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenFloat64Ptr := func(num *float64) bool {
return *num%2 == 0
}`
t2 = `isEvenFloat64Ptr := func(num *float64) bool {
return int(*num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestDropWhileStrPtrMethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
var v5 string = "5"
isEvenStrPtr := func(num *string) bool {
return *num%2 == 0
}
expectedNewList := []*string{&v3, &v4, &v5}
NewList := MakeStrSlicePtr([]*string{&v4, &v2, &v3, &v4, &v5}...).DropWhilePtr(isEvenStrPtr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("DropWhile>PtrMethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
t2 = `func TestDropWhileStrPtrMethodChain(t *testing.T) {
// Test : drop the numbers as long as condition match and returns remaining number in the list once condition fails
var v2 string = "2"
var v3 string = "3"
var v4 string = "4"
var v5 string = "5"
isEvenStrPtr := func(num *string) bool {
return *num == "2" || *num == "4"
}
expectedNewList := []*string{&v3, &v4, &v5}
NewList := MakeStrSlicePtr([]*string{&v4, &v2, &v3, &v4, &v5}...).DropWhilePtr(isEvenStrPtr)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("DropWhile>PtrMethodChain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `// TestDistinctStrMethodChain -
func TestDistinctStrMethodChain(t *testing.T) {
// Test : Get distinct values
expected := []string{8, 2, 0}
list := []string{8, 2, 8, 0, 2, 0}
distinct := MakeStrSlice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != 8 || distinct[1] != 2 || distinct[2] != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []string{8, 2, 0}
list = []string{8, 2, 0}
distinct = MakeStrSlice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != 8 || distinct[1] != 2 || distinct[2] != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []string{}
list = []string{}
distinct = MakeStrSlice(list...).Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
distinct = MakeStrSlice().Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
}`
t2 = `// TestDistinctStrMethodChain -
func TestDistinctStrMethodChain(t *testing.T) {
// Test : Get distinct values
expected := []string{"8", "2", "0"}
list := []string{"8", "2", "8", "0", "2", "0"}
distinct := MakeStrSlice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != "8" || distinct[1] != "2" || distinct[2] != "0" {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []string{"8", "2", "0"}
list = []string{"8", "2", "0"}
distinct = MakeStrSlice(list...).Distinct()
if len(distinct) != 3 || distinct[0] != "8" || distinct[1] != "2" || distinct[2] != "0" {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
expected = []string{}
list = []string{}
distinct = MakeStrSlice(list...).Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
distinct = MakeStrSlice().Distinct()
if len(distinct) != 0 {
t.Errorf("DistinctInt failed. Expected=%v, actual=%v", expected, distinct)
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `func TestTakeWhileStrMethodChain(t *testing.T) {
isEvenMethodChain := func(num string) bool {
return num%2 == 0
}
// Test : Take the numbers as long as condition match
expectedNewList := []string{4, 2, 4}
NewList := MakeStrSlice([]string{4, 2, 4, 7, 5}...).TakeWhile(isEvenMethodChain)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("TakeWhileInt failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(MakeStrSlice().TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
if len(MakeStrSlice([]string{}...).TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
}`
t2 = `func TestTakeWhileStrMethodChain(t *testing.T) {
isEvenMethodChain := func(num string) bool {
return num == "2" || num == "4"
}
// Test : Take the numbers as long as condition match
expectedNewList := []string{"4", "2", "4"}
NewList := MakeStrSlice([]string{"4", "2", "4", "7", "5"}...).TakeWhile(isEvenMethodChain)
if NewList[0] != expectedNewList[0] || NewList[1] != expectedNewList[1] || NewList[2] != expectedNewList[2] {
t.Errorf("TakeWhileInt failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(MakeStrSlice().TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
if len(MakeStrSlice([]string{}...).TakeWhile(nil)) > 0 {
t.Errorf("TakeWhileInt failed.")
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenMethodChain := func(num float32) bool {
return num%2 == 0
}`
t2 = `isEvenMethodChain := func(num float32) bool {
return int(num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenMethodChain := func(num float64) bool {
return num%2 == 0
}`
t2 = `isEvenMethodChain := func(num float64) bool {
return int(num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `
// TestTakeWhileStrMethodChainPtr -
func TestTakeWhileStrMethodChainPtr(t *testing.T) {
isEvenMethodChain := func(num *string) bool {
return *num%2 == 0
}
var v2 string = "2"
var v4 string = "4"
var v5 string = "5"
var v7 string = "7"
// Test : Take the numbers as long as condition match
expectedNewList := []*string{&v4, &v2, &v4}
NewList := MakeStrSlicePtr([]*string{&v4, &v2, &v4, &v7, &v5}...).TakeWhilePtr(isEvenMethodChain)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("TakeWhileStrmethodchain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(MakeStrSlicePtr().TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileStr failed.")
}
if len(MakeStrSlicePtr([]*string{}...).TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileStr failed.")
}
}`
t2 = `// TestTakeWhileStrMethodChainPtr -
func TestTakeWhileStrMethodChainPtr(t *testing.T) {
isEvenMethodChain := func(num *string) bool {
return *num == "2" || *num == "4"
}
var v2 string = "2"
var v4 string = "4"
var v5 string = "5"
var v7 string = "7"
// Test : Take the numbers as long as condition match
expectedNewList := []*string{&v4, &v2, &v4}
NewList := MakeStrSlicePtr([]*string{&v4, &v2, &v4, &v7, &v5}...).TakeWhilePtr(isEvenMethodChain)
if *NewList[0] != *expectedNewList[0] || *NewList[1] != *expectedNewList[1] || *NewList[2] != *expectedNewList[2] {
t.Errorf("TakeWhileStrmethodchain failed. Expected New list=%v, actual list=%v", expectedNewList, NewList)
}
if len(MakeStrSlicePtr().TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileStr failed.")
}
if len(MakeStrSlicePtr([]*string{}...).TakeWhilePtr(nil)) > 0 {
t.Errorf("TakeWhileStr failed.")
}
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenMethodChain := func(num *float32) bool {
return *num%2 == 0
}`
t2 = `isEvenMethodChain := func(num *float32) bool {
return int(*num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
t1 = `isEvenMethodChain := func(num *float64) bool {
return *num%2 == 0
}`
t2 = `isEvenMethodChain := func(num *float64) bool {
return int(*num)%2 == 0
}`
code = strings.ReplaceAll(code, t1, t2)
return code
} | internal/template/methodchain/methodchaintest.go | 0.608245 | 0.609088 | methodchaintest.go | starcoder |
package draw2dbase
import (
"github.com/elcamino/draw2d"
)
// Liner receive segment definition
type Liner interface {
// LineTo Draw a line from the current position to the point (x, y)
LineTo(x, y float64)
}
// Flattener receive segment definition
type Flattener interface {
// MoveTo Start a New line from the point (x, y)
MoveTo(x, y float64)
// LineTo Draw a line from the current position to the point (x, y)
LineTo(x, y float64)
// LineJoin use Round, Bevel or miter to join points
LineJoin()
// Close add the most recent starting point to close the path to create a polygon
Close()
// End mark the current line as finished so we can draw caps
End()
}
// Flatten convert curves into straight segments keeping join segments info
func Flatten(path *draw2d.Path, flattener Flattener, scale float64) {
// First Point
var startX, startY float64 = 0, 0
// Current Point
var x, y float64 = 0, 0
i := 0
for _, cmp := range path.Components {
switch cmp {
case draw2d.MoveToCmp:
x, y = path.Points[i], path.Points[i+1]
startX, startY = x, y
if i != 0 {
flattener.End()
}
flattener.MoveTo(x, y)
i += 2
case draw2d.LineToCmp:
x, y = path.Points[i], path.Points[i+1]
flattener.LineTo(x, y)
flattener.LineJoin()
i += 2
case draw2d.QuadCurveToCmp:
TraceQuad(flattener, path.Points[i-2:], 0.5)
x, y = path.Points[i+2], path.Points[i+3]
flattener.LineTo(x, y)
i += 4
case draw2d.CubicCurveToCmp:
TraceCubic(flattener, path.Points[i-2:], 0.5)
x, y = path.Points[i+4], path.Points[i+5]
flattener.LineTo(x, y)
i += 6
case draw2d.ArcToCmp:
x, y = TraceArc(flattener, path.Points[i], path.Points[i+1], path.Points[i+2], path.Points[i+3], path.Points[i+4], path.Points[i+5], scale)
flattener.LineTo(x, y)
i += 6
case draw2d.CloseCmp:
flattener.LineTo(startX, startY)
flattener.Close()
}
}
flattener.End()
}
// Transformer apply the Matrix transformation tr
type Transformer struct {
Tr draw2d.Matrix
Flattener Flattener
}
func (t Transformer) MoveTo(x, y float64) {
u := x*t.Tr[0] + y*t.Tr[2] + t.Tr[4]
v := x*t.Tr[1] + y*t.Tr[3] + t.Tr[5]
t.Flattener.MoveTo(u, v)
}
func (t Transformer) LineTo(x, y float64) {
u := x*t.Tr[0] + y*t.Tr[2] + t.Tr[4]
v := x*t.Tr[1] + y*t.Tr[3] + t.Tr[5]
t.Flattener.LineTo(u, v)
}
func (t Transformer) LineJoin() {
t.Flattener.LineJoin()
}
func (t Transformer) Close() {
t.Flattener.Close()
}
func (t Transformer) End() {
t.Flattener.End()
}
type SegmentedPath struct {
Points []float64
}
func (p *SegmentedPath) MoveTo(x, y float64) {
p.Points = append(p.Points, x, y)
// TODO need to mark this point as moveto
}
func (p *SegmentedPath) LineTo(x, y float64) {
p.Points = append(p.Points, x, y)
}
func (p *SegmentedPath) LineJoin() {
// TODO need to mark the current point as linejoin
}
func (p *SegmentedPath) Close() {
// TODO Close
}
func (p *SegmentedPath) End() {
// Nothing to do
} | draw2dbase/flattener.go | 0.574275 | 0.601389 | flattener.go | starcoder |
package demod
import (
"log"
"math"
"math/cmplx"
"github.com/ktye/fft"
)
func iqToComplex128(input []byte, output []complex128) {
i := 0
for idx := range output {
output[idx] = complex(float64(input[i]), float64(input[i+1]))
output[idx] /= 127.5
output[idx] -= (1 + 1i)
i += 2
}
}
// mult calculates p = f1 * f2. The slices must be
// preallocated and have len(f1) <= len(f2) and len(f1) <= len(p)
func mult(f1, f2, p []complex128) {
for idx := range f1 {
p[idx] = f1[idx] * f2[idx]
}
}
// abs sets the real(out) to the absolute value of the input and imag(out) to zero
func abs(in []complex128, out []complex128) {
for i := range in {
out[i] = complex(cmplx.Abs(in[i]), 0)
}
}
// newNCO returns 'n' unity vectors rotating counter-clockwise at frequency 'f', sampled at frequency 'fs'
func newNCO(w float64, n int) []complex128 {
nco := make([]complex128, n)
for i := range nco {
nco[i] = complex128(cmplx.Exp(complex(0, 2*math.Pi*w*float64(i))))
}
return nco
}
const history = 3 // must be >= the order of the IIR filter
func lowpass(in, out []complex128) {
// 3th order Chebychev Type II (IIR) filter, coeffs normalized (a0 = 1)
// Filter design with scipy: b, a = signal.cheby2(order=3, rs=60, 25e3/(1310720/2), 'lowpass')
// y[n] = b[0]*x[n] + b[1]*x[n-1] + b[2]*x[n-2] + b[3]*x[n-3] - a[1]*y[n-1] - a[2]*y[n-2] - a[3]*y[n-3]
b0, b1, b2, b3 := 0.00017744+0i, -0.00017405+0i, -0.00017405+0i, 0.00017744+0i
a1, a2, a3 := -2.96202704+0i, 2.92477167+0i, -0.96273785+0i
for n := history; n < len(in); n = n + 1 {
out[n] = b0*in[n] + b1*in[n-1] + b2*in[n-2] + b3*in[n-3] - a1*out[n-1] - a2*out[n-2] - a3*out[n-3]
}
// Copy the last 'history' samples from from the end to the beginning of the 'in' and 'out' buffers.
// This makes the previous input and output values available in the for loop above.
src := len(out) - history
dst := 0
for src < len(out) {
out[dst] = out[src]
in[dst] = in[src]
src = src + 1
dst = dst + 1
}
}
// Demodulator contains preallocated buffers and cached data for the demodulator
type Demodulator struct {
n int
iqData []complex128
lpfIn, lpfOut []complex128
fftData []complex128
nco []complex128
fft fft.FFT
}
// NewDemodulator creates a Demodulator
func NewDemodulator(w float64, numSamples int) *Demodulator {
f, err := fft.New(numSamples)
if err != nil {
log.Fatal("Error init FFT:", err)
}
return &Demodulator{
iqData: make([]complex128, numSamples),
lpfIn: make([]complex128, numSamples+history),
lpfOut: make([]complex128, numSamples+history),
fftData: make([]complex128, numSamples),
nco: newNCO(-w, numSamples),
fft: f,
n: numSamples,
}
}
// Process input samples and calculate ILS measurements.
// The 'input' time period must be equal to 0.1 seconds.
func (d *Demodulator) Process(input []byte) (power, ddm, sdm, ident float64) {
iqToComplex128(input, d.iqData)
mult(d.iqData, d.nco, d.lpfIn[history:history+d.n])
//lowpass(d.lpfIn, d.lpfOut)
mult(d.iqData, d.nco, d.lpfOut[history:history+d.n])
// TODO: subsample lpfOut here?
abs(d.lpfOut[history:history+d.n], d.fftData) // Demodulate the AM signal
s := d.fft.Transform(d.fftData)
carrier := cmplx.Abs(s[0])
mod150 := (cmplx.Abs(s[15]) + cmplx.Abs(s[len(s)-15])) / carrier * 100
mod90 := (cmplx.Abs(s[9]) + cmplx.Abs(s[len(s)-9])) / carrier * 100
ddm = (mod150 - mod90) // 150 Hz dominance (DDM > 0): Fly UP/LEFT
sdm = (mod150 + mod90)
ident = (cmplx.Abs(s[102]) + cmplx.Abs(s[len(s)-102])) / carrier * 100
carrier = carrier / float64(len(s))
power = 20 * math.Log10(carrier) // Carrier power in dBFS
return
}
// Process200ms
func (d *Demodulator) Process200ms(input []byte) (power, ddm, sdm, ident float64) {
iqToComplex128(input, d.iqData)
mult(d.iqData, d.nco, d.lpfIn[history:history+d.n])
//lowpass(d.lpfIn, d.lpfOut)
mult(d.iqData, d.nco, d.lpfOut[history:history+d.n])
// TODO: subsample lpfOut here?
abs(d.lpfOut[history:history+d.n], d.fftData) // Demodulate the AM signal
s := d.fft.Transform(d.fftData)
carrier := cmplx.Abs(s[0])
mod150 := (cmplx.Abs(s[15*2]) + cmplx.Abs(s[len(s)-15*2])) / carrier * 100
mod90 := (cmplx.Abs(s[9*2]) + cmplx.Abs(s[len(s)-9*2])) / carrier * 100
ddm = (mod150 - mod90) // 150 Hz dominance (DDM > 0): Fly UP/LEFT
sdm = (mod150 + mod90)
ident = (cmplx.Abs(s[102*2]) + cmplx.Abs(s[len(s)-102*2])) / carrier * 100
carrier = carrier / float64(len(s))
power = 20 * math.Log10(carrier) // Carrier power in dBFS
return
}
// Process30ms of data
func (d *Demodulator) Process30ms(input []byte) (power, ddm, sdm, ident float64) {
iqToComplex128(input, d.iqData)
mult(d.iqData, d.nco, d.lpfIn[history:history+d.n])
lowpass(d.lpfIn, d.lpfOut)
abs(d.lpfOut[history:history+d.n], d.fftData) // Demodulate the AM signal
s := d.fft.Transform(d.fftData)
carrier := cmplx.Abs(s[0])
mod150 := (cmplx.Abs(s[5]) + cmplx.Abs(s[len(s)-5])) / carrier * 100
mod90 := (cmplx.Abs(s[3]) + cmplx.Abs(s[len(s)-3])) / carrier * 100
ddm = (mod150 - mod90) // 150 Hz dominance (DDM > 0): Fly UP/LEFT
sdm = (mod150 + mod90)
ident = (cmplx.Abs(s[34]) + cmplx.Abs(s[len(s)-34])) / carrier * 100
carrier = carrier / float64(len(s))
power = 20 * math.Log10(carrier) // Carrier power in dBFS
return
} | pkg/demod/demod.go | 0.607547 | 0.400691 | demod.go | starcoder |
Package p256 Encapsulates secP256k1 elliptic curve.
*/
package p256
import (
"bytes"
"crypto/sha256"
"errors"
"math/big"
"strconv"
"github.com/tap-group/tdsvc/util"
)
var (
CURVE = S256()
)
/*
Elliptic Curve Point struct.
*/
type P256 struct {
X, Y *big.Int
}
func CommitG1(x, r *big.Int, h *P256) (*P256, error) {
var C = new(P256).ScalarBaseMult(x)
Hr := new(P256).ScalarMult(h, r)
C.Add(C, Hr)
return C, nil
}
/*
IsZero returns true if and only if the elliptic curve point is the point at infinity.
*/
func (p *P256) IsZero() bool {
c1 := p.X == nil || p.Y == nil
if !c1 {
z := new(big.Int).SetInt64(0)
return p.X.Cmp(z) == 0 && p.Y.Cmp(z) == 0
}
return true
}
func (p *P256) Equals(q *P256) bool {
return p.X.Cmp(q.X) == 0 && p.Y.Cmp(q.Y) == 0
}
func (e *P256) Copy() *P256 {
var xx big.Int
var yy big.Int
xx.Set(e.X)
yy.Set(e.Y)
f := P256{&xx, &yy}
return &f
}
/*
Neg returns the inverse of the given elliptic curve point.
*/
func (p *P256) Neg(a *P256) *P256 {
// (X, Y) -> (X, X + Y)
if a.IsZero() {
return p.SetInfinity()
}
one := new(big.Int).SetInt64(1)
mone := new(big.Int).Sub(CURVE.N, one)
p.ScalarMult(p, mone)
return p
}
/*
Input points must be distinct
*/
func (p *P256) Add(a, b *P256) *P256 {
if a.IsZero() {
p.X = b.X
p.Y = b.Y
return p
} else if b.IsZero() {
p.X = b.X
p.Y = b.Y
return p
}
resx, resy := CURVE.Add(a.X, a.Y, b.X, b.Y)
p.X = resx
p.Y = resy
return p
}
/*
Double returns 2*P, where P is the given elliptic curve point.
*/
func (p *P256) Double(a *P256) *P256 {
if a.IsZero() {
return p.SetInfinity()
}
resx, resy := CURVE.Double(a.X, a.Y)
p.X = resx
p.Y = resy
return p
}
/*
ScalarMul encapsulates the scalar Multiplication Algorithm from secP256k1.
*/
func (p *P256) ScalarMult(a *P256, n *big.Int) *P256 {
if a.IsZero() {
return p.SetInfinity()
}
cmp := n.Cmp(big.NewInt(0))
if cmp == 0 {
return p.SetInfinity()
}
n = util.Mod(n, CURVE.N)
bns := n.Bytes()
resx, resy := CURVE.ScalarMult(a.X, a.Y, bns)
p.X = resx
p.Y = resy
return p
}
/*
ScalarBaseMult returns the Scalar Multiplication by the base generator.
*/
func (p *P256) ScalarBaseMult(n *big.Int) *P256 {
cmp := n.Cmp(big.NewInt(0))
if cmp == 0 {
return p.SetInfinity()
}
n = util.Mod(n, CURVE.N)
bns := n.Bytes()
resx, resy := CURVE.ScalarBaseMult(bns)
p.X = resx
p.Y = resy
return p
}
/*
Multiply actually is reponsible for the addition of elliptic curve points.
The name here is to maintain compatibility with bn256 interface.
This algorithm verifies if the given elliptic curve points are equal, in which case it
returns the result of Double function, otherwise it returns the result of Add function.
*/
func (p *P256) Multiply(a, b *P256) *P256 {
if a.IsZero() {
p.X = b.X
p.Y = b.Y
return p
} else if b.IsZero() {
p.X = a.X
p.Y = a.Y
return p
}
if a.X.Cmp(b.X) == 0 && a.Y.Cmp(b.Y) == 0 {
resx, resy := CURVE.Double(a.X, a.Y)
p.X = resx
p.Y = resy
return p
}
resx, resy := CURVE.Add(a.X, a.Y, b.X, b.Y)
p.X = resx
p.Y = resy
return p
}
/*
SetInfinity sets the given elliptic curve point to the point at infinity.
*/
func (p *P256) SetInfinity() *P256 {
p.X = nil
p.Y = nil
return p
}
/*
String returns the readable representation of the given elliptic curve point, i.e.
the tuple formed by X and Y coordinates.
*/
func (p *P256) String() string {
return "P256(" + p.X.String() + "," + p.Y.String() + ")"
}
/*
MapToGroup is a hash function that returns a valid elliptic curve point given as
input a string. It is also known as hash-to-point and is used to obtain a generator
that has no discrete logarithm known relation, thus addressing the concept of
NUMS (nothing up my sleeve).
This implementation is based on the paper:
Short signatures from the Weil pairing
Boneh, Lynn and Shacham
Journal of Cryptology, September 2004, Volume 17, Issue 4, pp 297–319
*/
func MapToGroup(m string) (*P256, error) {
var (
i int
buffer bytes.Buffer
)
i = 0
for i < 256 {
buffer.Reset()
buffer.WriteString(strconv.Itoa(i))
buffer.WriteString(m)
x, _ := HashToInt(buffer)
x = util.Mod(x, CURVE.P)
fx, _ := F(x)
fx = util.Mod(fx, CURVE.P)
y := fx.ModSqrt(fx, CURVE.P)
if y != nil {
p := &P256{X: x, Y: y}
if p.IsOnCurve() && !p.IsZero() {
return p, nil
}
}
i = i + 1
}
return nil, errors.New("failed to Hash-to-point")
}
/*
F receives a big integer x as input and return x^3 + 7 mod ORDER.
*/
func F(x *big.Int) (*big.Int, error) {
// Compute x^2
x3p7 := util.Multiply(x, x)
x3p7 = util.Mod(x3p7, CURVE.P)
// Compute x^3
x3p7 = util.Multiply(x3p7, x)
x3p7 = util.Mod(x3p7, CURVE.P)
// Compute X^3 + 7
x3p7 = util.Add(x3p7, new(big.Int).SetInt64(7))
x3p7 = util.Mod(x3p7, CURVE.P)
return x3p7, nil
}
/*
Hash is responsible for the computing a Zp element given the input string.
*/
func HashToInt(b bytes.Buffer) (*big.Int, error) {
digest := sha256.New()
digest.Write(b.Bytes())
output := digest.Sum(nil)
tmp := output[0:]
return util.FromByteArray(tmp)
}
/*
IsOnCurve returns TRUE if and only if p has coordinates X and Y that satisfy the
Elliptic Curve equation: y^2 = x^3 + 7.
*/
func (p *P256) IsOnCurve() bool {
// y² = x³ + 7
y2 := new(big.Int).Mul(p.Y, p.Y)
y2.Mod(y2, CURVE.P)
x3 := new(big.Int).Mul(p.X, p.X)
x3.Mul(x3, p.X)
x3.Add(x3, new(big.Int).SetInt64(7))
x3.Mod(x3, CURVE.P)
return x3.Cmp(y2) == 0
} | crypto/p256/p256.go | 0.805211 | 0.462594 | p256.go | starcoder |
package iso20022
// Details of the closing of the securities financing transaction.
type SecuritiesFinancingTransactionDetails30 struct {
// Unambiguous identification of the underlying securities financing trade as assigned by the instructing party. The identification is common to all collateral pieces (one or many).
SecuritiesFinancingTradeIdentification *RestrictedFINXMax16Text `xml:"SctiesFincgTradId,omitempty"`
// Unambiguous identification of the second leg of the transaction as known by the account owner (or the instructing party acting on its behalf).
ClosingLegIdentification *RestrictedFINXMax16Text `xml:"ClsgLegId,omitempty"`
// Closing date/time or maturity date/time of the transaction.
TerminationDate *TerminationDate5Choice `xml:"TermntnDt,omitempty"`
// Date/Time at which rate change has taken place.
RateChangeDate *DateAndDateTimeChoice `xml:"RateChngDt,omitempty"`
// Earliest date/time at which the call back can take place.
EarliestCallBackDate *DateAndDateTimeChoice `xml:"EarlstCallBckDt,omitempty"`
// Date/time at which the commission is calculated.
CommissionCalculationDate *DateAndDateTimeChoice `xml:"ComssnClctnDt,omitempty"`
// Specifies whether the rate is fixed or variable.
RateType *RateType67Choice `xml:"RateTp,omitempty"`
// Specifies whether the collateral position should be subject to automatic revaluation by the account servicer.
Revaluation *RevaluationIndicator4Choice `xml:"Rvaltn,omitempty"`
// Legal framework of the transaction.
LegalFramework *LegalFramework4Choice `xml:"LglFrmwk,omitempty"`
// Identifies the computation method of accrued interest of the related financial instrument.
InterestComputationMethod *InterestComputationMethodFormat5Choice `xml:"IntrstCmptnMtd,omitempty"`
// Specifies whether the maturity date of the securities financing transaction may be modified.
MaturityDateModification *YesNoIndicator `xml:"MtrtyDtMod,omitempty"`
// Specifies whether the interest is to be paid to the collateral taker. If set to no, the interest is paid to the collateral giver.
InterestPayment *YesNoIndicator `xml:"IntrstPmt,omitempty"`
// Index or support rate used together with the spread to calculate the
// repurchase rate.
VariableRateSupport *RateName2 `xml:"VarblRateSpprt,omitempty"`
// Rate to be used to recalculate the repurchase amount.
RepurchaseRate *Rate2 `xml:"RpRate,omitempty"`
// Percentage mark-up on a loan consideration used to reflect the lender's risk.
StockLoanMargin *Rate2 `xml:"StockLnMrgn,omitempty"`
// Haircut or valuation factor on the security expressed as a percentage.
SecuritiesHaircut *Rate2 `xml:"SctiesHrcut,omitempty"`
// Interest rate paid in the context of a securities financing transaction.
ChargesRate *Rate2 `xml:"ChrgsRate,omitempty"`
// Interest rate to be paid on the transaction amount, as agreed between the counterparties.
PricingRate *RateOrName2Choice `xml:"PricgRate,omitempty"`
// Repurchase spread expressed as a rate; margin over or under an index that determines the repurchase rate.
Spread *Rate2 `xml:"Sprd,omitempty"`
// Minimum number of days' notice a counterparty needs for terminating the transaction.
TransactionCallDelay *Exact3NumericText `xml:"TxCallDely,omitempty"`
// Total number of collateral instructions involved in the transaction.
TotalNumberOfCollateralInstructions *Exact3NumericText `xml:"TtlNbOfCollInstrs,omitempty"`
// Principal amount of a trade (for second leg).
DealAmount *AmountAndDirection59 `xml:"DealAmt,omitempty"`
// Interest amount that has accrued in between coupon payment periods.
AccruedInterestAmount *AmountAndDirection59 `xml:"AcrdIntrstAmt,omitempty"`
// Fixed amount of money that has to be paid (instead of interest) in the case of a recall or at the closing date.
ForfeitAmount *AmountAndDirection59 `xml:"FrftAmt,omitempty"`
// Difference between the amount of money of the first leg and the amount of the second leg of the transaction.
PremiumAmount *AmountAndDirection59 `xml:"PrmAmt,omitempty"`
// Amount of money to be settled per piece of collateral to terminate the transaction.
TerminationAmountPerPieceOfCollateral *AmountAndDirection59 `xml:"TermntnAmtPerPcOfColl,omitempty"`
// Total amount of money to be settled to terminate the transaction.
TerminationTransactionAmount *AmountAndDirection59 `xml:"TermntnTxAmt,omitempty"`
// Provides additional information about the second leg in narrative form.
SecondLegNarrative *RestrictedFINXMax140Text `xml:"ScndLegNrrtv,omitempty"`
}
func (s *SecuritiesFinancingTransactionDetails30) SetSecuritiesFinancingTradeIdentification(value string) {
s.SecuritiesFinancingTradeIdentification = (*RestrictedFINXMax16Text)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) SetClosingLegIdentification(value string) {
s.ClosingLegIdentification = (*RestrictedFINXMax16Text)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) AddTerminationDate() *TerminationDate5Choice {
s.TerminationDate = new(TerminationDate5Choice)
return s.TerminationDate
}
func (s *SecuritiesFinancingTransactionDetails30) AddRateChangeDate() *DateAndDateTimeChoice {
s.RateChangeDate = new(DateAndDateTimeChoice)
return s.RateChangeDate
}
func (s *SecuritiesFinancingTransactionDetails30) AddEarliestCallBackDate() *DateAndDateTimeChoice {
s.EarliestCallBackDate = new(DateAndDateTimeChoice)
return s.EarliestCallBackDate
}
func (s *SecuritiesFinancingTransactionDetails30) AddCommissionCalculationDate() *DateAndDateTimeChoice {
s.CommissionCalculationDate = new(DateAndDateTimeChoice)
return s.CommissionCalculationDate
}
func (s *SecuritiesFinancingTransactionDetails30) AddRateType() *RateType67Choice {
s.RateType = new(RateType67Choice)
return s.RateType
}
func (s *SecuritiesFinancingTransactionDetails30) AddRevaluation() *RevaluationIndicator4Choice {
s.Revaluation = new(RevaluationIndicator4Choice)
return s.Revaluation
}
func (s *SecuritiesFinancingTransactionDetails30) AddLegalFramework() *LegalFramework4Choice {
s.LegalFramework = new(LegalFramework4Choice)
return s.LegalFramework
}
func (s *SecuritiesFinancingTransactionDetails30) AddInterestComputationMethod() *InterestComputationMethodFormat5Choice {
s.InterestComputationMethod = new(InterestComputationMethodFormat5Choice)
return s.InterestComputationMethod
}
func (s *SecuritiesFinancingTransactionDetails30) SetMaturityDateModification(value string) {
s.MaturityDateModification = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) SetInterestPayment(value string) {
s.InterestPayment = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) AddVariableRateSupport() *RateName2 {
s.VariableRateSupport = new(RateName2)
return s.VariableRateSupport
}
func (s *SecuritiesFinancingTransactionDetails30) AddRepurchaseRate() *Rate2 {
s.RepurchaseRate = new(Rate2)
return s.RepurchaseRate
}
func (s *SecuritiesFinancingTransactionDetails30) AddStockLoanMargin() *Rate2 {
s.StockLoanMargin = new(Rate2)
return s.StockLoanMargin
}
func (s *SecuritiesFinancingTransactionDetails30) AddSecuritiesHaircut() *Rate2 {
s.SecuritiesHaircut = new(Rate2)
return s.SecuritiesHaircut
}
func (s *SecuritiesFinancingTransactionDetails30) AddChargesRate() *Rate2 {
s.ChargesRate = new(Rate2)
return s.ChargesRate
}
func (s *SecuritiesFinancingTransactionDetails30) AddPricingRate() *RateOrName2Choice {
s.PricingRate = new(RateOrName2Choice)
return s.PricingRate
}
func (s *SecuritiesFinancingTransactionDetails30) AddSpread() *Rate2 {
s.Spread = new(Rate2)
return s.Spread
}
func (s *SecuritiesFinancingTransactionDetails30) SetTransactionCallDelay(value string) {
s.TransactionCallDelay = (*Exact3NumericText)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) SetTotalNumberOfCollateralInstructions(value string) {
s.TotalNumberOfCollateralInstructions = (*Exact3NumericText)(&value)
}
func (s *SecuritiesFinancingTransactionDetails30) AddDealAmount() *AmountAndDirection59 {
s.DealAmount = new(AmountAndDirection59)
return s.DealAmount
}
func (s *SecuritiesFinancingTransactionDetails30) AddAccruedInterestAmount() *AmountAndDirection59 {
s.AccruedInterestAmount = new(AmountAndDirection59)
return s.AccruedInterestAmount
}
func (s *SecuritiesFinancingTransactionDetails30) AddForfeitAmount() *AmountAndDirection59 {
s.ForfeitAmount = new(AmountAndDirection59)
return s.ForfeitAmount
}
func (s *SecuritiesFinancingTransactionDetails30) AddPremiumAmount() *AmountAndDirection59 {
s.PremiumAmount = new(AmountAndDirection59)
return s.PremiumAmount
}
func (s *SecuritiesFinancingTransactionDetails30) AddTerminationAmountPerPieceOfCollateral() *AmountAndDirection59 {
s.TerminationAmountPerPieceOfCollateral = new(AmountAndDirection59)
return s.TerminationAmountPerPieceOfCollateral
}
func (s *SecuritiesFinancingTransactionDetails30) AddTerminationTransactionAmount() *AmountAndDirection59 {
s.TerminationTransactionAmount = new(AmountAndDirection59)
return s.TerminationTransactionAmount
}
func (s *SecuritiesFinancingTransactionDetails30) SetSecondLegNarrative(value string) {
s.SecondLegNarrative = (*RestrictedFINXMax140Text)(&value)
} | SecuritiesFinancingTransactionDetails30.go | 0.857604 | 0.421076 | SecuritiesFinancingTransactionDetails30.go | starcoder |
package index
import (
"fmt"
"sync"
"github.com/phoreproject/synapse/chainhash"
"github.com/phoreproject/synapse/primitives"
"github.com/prysmaticlabs/go-ssz"
)
// ShardBlockNode is a block node in the shard chain.
type ShardBlockNode struct {
Parent *ShardBlockNode
BlockHash chainhash.Hash
StateRoot chainhash.Hash
Slot uint64
Height uint64
}
// GetAncestorAtSlot gets the first block node that occurred at or before a certain slot.
func (node *ShardBlockNode) GetAncestorAtSlot(slot uint64) *ShardBlockNode {
if node.Slot < slot {
return nil
}
current := node
for current != nil && slot < current.Slot {
current = current.Parent
}
return current
}
// GetClosestAncestorAtSlot gets the closest ancestor at or before a certain slot.
func (node *ShardBlockNode) GetClosestAncestorAtSlot(slot uint64) *ShardBlockNode {
n := node.GetAncestorAtSlot(slot)
if n == nil {
return node
}
return n
}
// GetAncestorAtHeight gets the first block node that occurred at a certain height.
func (node *ShardBlockNode) GetAncestorAtHeight(height uint64) *ShardBlockNode {
if node.Height < height {
return nil
}
current := node
for current != nil && height < current.Height {
current = current.Parent
}
return current
}
// ShardBlockIndex keeps a map of block hash to block.
type ShardBlockIndex struct {
Lock *sync.RWMutex
Index map[chainhash.Hash]*ShardBlockNode
}
// AddToIndex adds a block to the block index.
func (i *ShardBlockIndex) AddToIndex(block primitives.ShardBlock) (*ShardBlockNode, error) {
i.Lock.Lock()
defer i.Lock.Unlock()
parent, found := i.Index[block.Header.PreviousBlockHash]
if !found {
return nil, fmt.Errorf("missing parent block %s", block.Header.PreviousBlockHash)
}
blockHash, err := ssz.HashTreeRoot(block)
if err != nil {
return nil, err
}
node := &ShardBlockNode{
Parent: parent,
BlockHash: blockHash,
StateRoot: block.Header.StateRoot,
Slot: block.Header.Slot,
Height: parent.Height + 1,
}
i.Index[blockHash] = node
return node, nil
}
// HasBlock returns true if the block index contains a certain block.
func (i *ShardBlockIndex) HasBlock(hash chainhash.Hash) bool {
i.Lock.RLock()
defer i.Lock.RUnlock()
_, found := i.Index[hash]
return found
}
// GetNodeByHash gets a block node by hash.
func (i *ShardBlockIndex) GetNodeByHash(h *chainhash.Hash) (*ShardBlockNode, error) {
i.Lock.RLock()
defer i.Lock.RUnlock()
node, found := i.Index[*h]
if !found {
return nil, fmt.Errorf("do not have block with hash %s", h)
}
return node, nil
}
// NewShardBlockIndex creates a new block index.
func NewShardBlockIndex(genesisBlock primitives.ShardBlock) *ShardBlockIndex {
index := &ShardBlockIndex{
Index: make(map[chainhash.Hash]*ShardBlockNode),
Lock: new(sync.RWMutex),
}
genesisHash, err := ssz.HashTreeRoot(genesisBlock)
if err != nil {
panic(err)
}
index.Index[genesisHash] = &ShardBlockNode{
Parent: nil,
BlockHash: genesisHash,
StateRoot: genesisBlock.Header.StateRoot,
Slot: 0,
Height: 0,
}
return index
} | shard/chain/index/index.go | 0.674479 | 0.40645 | index.go | starcoder |
package util
import (
"errors"
"fmt"
"log"
"math"
"time"
"github.com/cmoscofian/meliponto/src/context"
"github.com/cmoscofian/meliponto/src/util/constants"
)
var holidays []time.Time
var today time.Time
func init() {
ctx := context.Create()
for _, d := range ctx.Holidays {
day, err := time.Parse(constants.DateInputLayout, d)
if err != nil {
log.Fatalln(err)
}
holidays = append(holidays, day)
}
d := 24 * time.Hour
today = time.Now().Truncate(d)
}
// IsSameDay reports whether day1 and day2 have same
// day, month and year. It does not check for hours, minutes
// and seconds.
func IsSameDay(day1, day2 time.Time) bool {
y1, m1, d1 := day1.Date()
y2, m2, d2 := day2.Date()
return y1 == y2 && m1 == m2 && d1 == d2
}
// IsWeekday reports whether date is a valid weekday
// neither saturday nor sunday.
func IsWeekday(date time.Time) bool {
return !IsSaturday(date) && !IsSunday(date)
}
// IsSaturday reports whether date is a saturday
func IsSaturday(date time.Time) bool {
date.IsZero()
return date.Weekday() == time.Saturday
}
// IsSunday reports whether date is a saturday
func IsSunday(date time.Time) bool {
return date.Weekday() == time.Sunday
}
// IsHoliday reports whether date is a holiday
// as provided in the config file.
func IsHoliday(date time.Time) bool {
y1, m1, d1 := date.Date()
for _, h := range holidays {
if y2, m2, d2 := h.Date(); d1 == d2 && m1 == m2 && y1 == y2 {
return true
}
}
return false
}
// IsRegularDay reports whether date is NOT a holiday
// as provided by the config file and is also a weekday
// neither saturday nor sunday.
func IsRegularDay(date time.Time) bool {
return IsWeekday(date) && !IsHoliday(date)
}
// IsWeekHoliday reports whether date is a holiday
// as provided by the config file and is also a weekday
// neither saturday nor sunday.
func IsWeekHoliday(date time.Time) bool {
return IsWeekday(date) && IsHoliday(date)
}
// ParseFlagDate returns a Time and an error. It will attempt to parse the date
// given that it is in a valid format ('d-m-yy') and return it as a time.Time format.
func ParseFlagDate(date string) (time.Time, error) {
data, err := time.Parse(constants.DateInputLayout, date)
if err != nil {
return data, errors.New(constants.InvalidDateError)
}
if data.After(today) || data.Equal(today) {
return data, errors.New(constants.PastDateError)
}
return data, nil
}
// ParseFlagDatetime returns a Time and an error. It will attempt to parse the date
// and hour given that they are both in valid formats (date: 'd-m-yy', hour: 'HH:mm')
// and return it as a time.Time format.
func ParseFlagDatetime(date, hour string) (time.Time, error) {
layout := fmt.Sprintf("%s %s", constants.DateInputLayout, constants.TimeLayout)
data, err := time.Parse(layout, fmt.Sprintf("%s %s", date, hour))
if data.After(time.Now()) {
return data, errors.New(constants.PastDateTimeError)
}
return data, err
}
// RangeBetweenDatesInDays returns start and end as time.Time formats an int
// representing the range size in days and an error.
// It will attempt to parse start and end date given that they are in a valid format
// ('d-m-yy') to time.Time format and calculate the range in days between them.
func RangeBetweenDatesInDays(start, end string) (time.Time, time.Time, int, error) {
d1, err := ParseFlagDate(start)
if err != nil {
return d1, time.Now(), -1, err
}
d2, err := ParseFlagDate(end)
if err != nil {
return d1, d2, -1, err
}
if d1.After(d2) {
return d1, d2, -1, errors.New(constants.EndAfterBeginDateError)
}
duration := d2.Sub(d1)
return d1, d2, int(math.Ceil(duration.Hours()/24)) + 1, nil
} | src/util/datetime.go | 0.678327 | 0.427277 | datetime.go | starcoder |
package quadtree
import (
"fmt"
)
// Box defines an axis-aligned bounding box using the (x,y) value of the
// bottom, left corner along with the width and height of the box.
type Box struct {
x int
y int
width int
height int
}
func (b *Box) String() string {
return fmt.Sprintf("(%d, %d), (%d, %d)", b.Left(), b.Bottom(), b.Right(), b.Top())
}
// Left returns the x-value of the left side of the box.
func (b *Box) Left() int { return b.x }
// Right returns the x-value of the right side of the box.
func (b *Box) Right() int { return b.x + b.width }
// Top returns the y-value of the top side of the box.
func (b *Box) Top() int { return b.y + b.height }
// Bottom returns the y-value of the bottom side of the box.
func (b *Box) Bottom() int { return b.y }
// CenterX returns the x value of the center of the box
func (b *Box) CenterX() int {
return b.x + int(b.width/2)
}
// CenterY returns the y value of the center of the box
func (b *Box) CenterY() int {
return b.y + int(b.height/2)
}
// Quarter splits a box into its four quadrants starting at the top right
// quadrant and going counter-clockwise.
func (b *Box) Quarter() [4]*Box {
var quarters [4]*Box
w := int(b.width / 2)
h := int(b.height / 2)
quarters[0] = NewBox(b.Left()+w, b.Bottom()+h, w, h) // Top Right
quarters[1] = NewBox(b.Left(), b.Bottom()+h, w, h) // Top Left
quarters[2] = NewBox(b.Left(), b.Bottom(), w, h) // Bottom Left
quarters[3] = NewBox(b.Left()+w, b.Bottom(), w, h) // Bottom Right
return quarters
}
// Contains returns true if the given box is fully contained by this box.
func (b *Box) Contains(c *Box) bool {
x := (b.Left() <= c.Left()) && (b.Right() >= c.Right())
y := (b.Top() >= c.Top()) && (b.Bottom() <= c.Bottom())
return x && y
}
// ContainsCenter returns true if the center of the given box is contained by
// this box.
func (b *Box) ContainsCenter(c *Box) bool {
x := (b.Left() <= c.CenterX()) && (b.Right() >= c.CenterX())
y := (b.Top() >= c.CenterY()) && (b.Bottom() <= c.CenterY())
return x && y
}
// Intersects returns true if the give box overlaps this box.
func (b *Box) Intersects(c *Box) bool {
xIntersect := (b.Left() < c.Right()) && (c.Left() < b.Right())
yIntersect := (b.Top() > c.Bottom()) && (c.Top() > b.Bottom())
return xIntersect && yIntersect
}
// NewBox creates a new Box structure.
func NewBox(x, y, width, height int) *Box {
b := new(Box)
b.x = x
b.y = y
b.width = width
b.height = height
return b
} | quadtree/box.go | 0.917242 | 0.506958 | box.go | starcoder |
package vcl
import (
. "github.com/ying32/govcl/vcl/api"
. "github.com/ying32/govcl/vcl/types"
"unsafe"
)
type TBevel struct {
IControl
instance uintptr
// 特殊情况下使用,主要应对Go的GC问题,与LCL没有太多关系。
ptr unsafe.Pointer
}
// CN: 创建一个新的对象。
// EN: Create a new object.
func NewBevel(owner IComponent) *TBevel {
b := new(TBevel)
b.instance = Bevel_Create(CheckPtr(owner))
b.ptr = unsafe.Pointer(b.instance)
// 不敢启用,因为不知道会发生什么...
// runtime.SetFinalizer(b, (*TBevel).Free)
return b
}
// CN: 动态转换一个已存在的对象实例。或者使用Obj.As().<目标对象>。
// EN: Dynamically convert an existing object instance. Or use Obj.As().<Target object>.
func AsBevel(obj interface{}) *TBevel {
instance, ptr := getInstance(obj)
if instance == 0 { return nil }
return &TBevel{instance: instance, ptr: ptr}
}
// -------------------------- Deprecated begin --------------------------
// CN: 新建一个对象来自已经存在的对象实例指针。
// EN: Create a new object from an existing object instance pointer.
// Deprecated: use AsBevel.
func BevelFromInst(inst uintptr) *TBevel {
return AsBevel(inst)
}
// CN: 新建一个对象来自已经存在的对象实例。
// EN: Create a new object from an existing object instance.
// Deprecated: use AsBevel.
func BevelFromObj(obj IObject) *TBevel {
return AsBevel(obj)
}
// CN: 新建一个对象来自不安全的地址。注意:使用此函数可能造成一些不明情况,慎用。
// EN: Create a new object from an unsecured address. Note: Using this function may cause some unclear situations and be used with caution..
// Deprecated: use AsBevel.
func BevelFromUnsafePointer(ptr unsafe.Pointer) *TBevel {
return AsBevel(ptr)
}
// -------------------------- Deprecated end --------------------------
// CN: 释放对象。
// EN: Free object.
func (b *TBevel) Free() {
if b.instance != 0 {
Bevel_Free(b.instance)
b.instance, b.ptr = 0, nullptr
}
}
// CN: 返回对象实例指针。
// EN: Return object instance pointer.
func (b *TBevel) Instance() uintptr {
return b.instance
}
// CN: 获取一个不安全的地址。
// EN: Get an unsafe address.
func (b *TBevel) UnsafeAddr() unsafe.Pointer {
return b.ptr
}
// CN: 检测地址是否为空。
// EN: Check if the address is empty.
func (b *TBevel) IsValid() bool {
return b.instance != 0
}
// CN: 检测当前对象是否继承自目标对象。
// EN: Checks whether the current object is inherited from the target object.
func (b *TBevel) Is() TIs {
return TIs(b.instance)
}
// CN: 动态转换当前对象为目标对象。
// EN: Dynamically convert the current object to the target object.
//func (b *TBevel) As() TAs {
// return TAs(b.instance)
//}
// CN: 获取类信息指针。
// EN: Get class information pointer.
func TBevelClass() TClass {
return Bevel_StaticClassType()
}
// CN: 将控件置于最前。
// EN: Bring the control to the front.
func (b *TBevel) BringToFront() {
Bevel_BringToFront(b.instance)
}
// CN: 将客户端坐标转为绝对的屏幕坐标。
// EN: Convert client coordinates to absolute screen coordinates.
func (b *TBevel) ClientToScreen(Point TPoint) TPoint {
return Bevel_ClientToScreen(b.instance, Point)
}
// CN: 将客户端坐标转为父容器坐标。
// EN: Convert client coordinates to parent container coordinates.
func (b *TBevel) ClientToParent(Point TPoint, AParent IWinControl) TPoint {
return Bevel_ClientToParent(b.instance, Point , CheckPtr(AParent))
}
// CN: 是否在拖拽中。
// EN: Is it in the middle of dragging.
func (b *TBevel) Dragging() bool {
return Bevel_Dragging(b.instance)
}
// CN: 是否有父容器。
// EN: Is there a parent container.
func (b *TBevel) HasParent() bool {
return Bevel_HasParent(b.instance)
}
// CN: 隐藏控件。
// EN: Hidden control.
func (b *TBevel) Hide() {
Bevel_Hide(b.instance)
}
// CN: 要求重绘。
// EN: Redraw.
func (b *TBevel) Invalidate() {
Bevel_Invalidate(b.instance)
}
// CN: 发送一个消息。
// EN: Send a message.
func (b *TBevel) Perform(Msg uint32, WParam uintptr, LParam int) int {
return Bevel_Perform(b.instance, Msg , WParam , LParam)
}
// CN: 刷新控件。
// EN: Refresh control.
func (b *TBevel) Refresh() {
Bevel_Refresh(b.instance)
}
// CN: 重绘。
// EN: Repaint.
func (b *TBevel) Repaint() {
Bevel_Repaint(b.instance)
}
// CN: 将屏幕坐标转为客户端坐标。
// EN: Convert screen coordinates to client coordinates.
func (b *TBevel) ScreenToClient(Point TPoint) TPoint {
return Bevel_ScreenToClient(b.instance, Point)
}
// CN: 将父容器坐标转为客户端坐标。
// EN: Convert parent container coordinates to client coordinates.
func (b *TBevel) ParentToClient(Point TPoint, AParent IWinControl) TPoint {
return Bevel_ParentToClient(b.instance, Point , CheckPtr(AParent))
}
// CN: 控件至于最后面。
// EN: The control is placed at the end.
func (b *TBevel) SendToBack() {
Bevel_SendToBack(b.instance)
}
// CN: 设置组件边界。
// EN: Set component boundaries.
func (b *TBevel) SetBounds(ALeft int32, ATop int32, AWidth int32, AHeight int32) {
Bevel_SetBounds(b.instance, ALeft , ATop , AWidth , AHeight)
}
// CN: 显示控件。
// EN: Show control.
func (b *TBevel) Show() {
Bevel_Show(b.instance)
}
// CN: 控件更新。
// EN: Update.
func (b *TBevel) Update() {
Bevel_Update(b.instance)
}
// CN: 获取控件的字符,如果有。
// EN: Get the characters of the control, if any.
func (b *TBevel) GetTextBuf(Buffer *string, BufSize int32) int32 {
return Bevel_GetTextBuf(b.instance, Buffer , BufSize)
}
// CN: 获取控件的字符长,如果有。
// EN: Get the character length of the control, if any.
func (b *TBevel) GetTextLen() int32 {
return Bevel_GetTextLen(b.instance)
}
// CN: 设置控件字符,如果有。
// EN: Set control characters, if any.
func (b *TBevel) SetTextBuf(Buffer string) {
Bevel_SetTextBuf(b.instance, Buffer)
}
// CN: 查找指定名称的组件。
// EN: Find the component with the specified name.
func (b *TBevel) FindComponent(AName string) *TComponent {
return AsComponent(Bevel_FindComponent(b.instance, AName))
}
// CN: 获取类名路径。
// EN: Get the class name path.
func (b *TBevel) GetNamePath() string {
return Bevel_GetNamePath(b.instance)
}
// CN: 复制一个对象,如果对象实现了此方法的话。
// EN: Copy an object, if the object implements this method.
func (b *TBevel) Assign(Source IObject) {
Bevel_Assign(b.instance, CheckPtr(Source))
}
// CN: 获取类的类型信息。
// EN: Get class type information.
func (b *TBevel) ClassType() TClass {
return Bevel_ClassType(b.instance)
}
// CN: 获取当前对象类名称。
// EN: Get the current object class name.
func (b *TBevel) ClassName() string {
return Bevel_ClassName(b.instance)
}
// CN: 获取当前对象实例大小。
// EN: Get the current object instance size.
func (b *TBevel) InstanceSize() int32 {
return Bevel_InstanceSize(b.instance)
}
// CN: 判断当前类是否继承自指定类。
// EN: Determine whether the current class inherits from the specified class.
func (b *TBevel) InheritsFrom(AClass TClass) bool {
return Bevel_InheritsFrom(b.instance, AClass)
}
// CN: 与一个对象进行比较。
// EN: Compare with an object.
func (b *TBevel) Equals(Obj IObject) bool {
return Bevel_Equals(b.instance, CheckPtr(Obj))
}
// CN: 获取类的哈希值。
// EN: Get the hash value of the class.
func (b *TBevel) GetHashCode() int32 {
return Bevel_GetHashCode(b.instance)
}
// CN: 文本类信息。
// EN: Text information.
func (b *TBevel) ToString() string {
return Bevel_ToString(b.instance)
}
func (b *TBevel) AnchorToNeighbour(ASide TAnchorKind, ASpace int32, ASibling IControl) {
Bevel_AnchorToNeighbour(b.instance, ASide , ASpace , CheckPtr(ASibling))
}
func (b *TBevel) AnchorParallel(ASide TAnchorKind, ASpace int32, ASibling IControl) {
Bevel_AnchorParallel(b.instance, ASide , ASpace , CheckPtr(ASibling))
}
// CN: 置于指定控件的横向中心。
// EN: .
func (b *TBevel) AnchorHorizontalCenterTo(ASibling IControl) {
Bevel_AnchorHorizontalCenterTo(b.instance, CheckPtr(ASibling))
}
// CN: 置于指定控件的纵向中心。
// EN: .
func (b *TBevel) AnchorVerticalCenterTo(ASibling IControl) {
Bevel_AnchorVerticalCenterTo(b.instance, CheckPtr(ASibling))
}
func (b *TBevel) AnchorAsAlign(ATheAlign TAlign, ASpace int32) {
Bevel_AnchorAsAlign(b.instance, ATheAlign , ASpace)
}
func (b *TBevel) AnchorClient(ASpace int32) {
Bevel_AnchorClient(b.instance, ASpace)
}
// CN: 获取控件自动调整。
// EN: Get Control automatically adjusts.
func (b *TBevel) Align() TAlign {
return Bevel_GetAlign(b.instance)
}
// CN: 设置控件自动调整。
// EN: Set Control automatically adjusts.
func (b *TBevel) SetAlign(value TAlign) {
Bevel_SetAlign(b.instance, value)
}
// CN: 获取四个角位置的锚点。
// EN: .
func (b *TBevel) Anchors() TAnchors {
return Bevel_GetAnchors(b.instance)
}
// CN: 设置四个角位置的锚点。
// EN: .
func (b *TBevel) SetAnchors(value TAnchors) {
Bevel_SetAnchors(b.instance, value)
}
// CN: 获取约束控件大小。
// EN: .
func (b *TBevel) Constraints() *TSizeConstraints {
return AsSizeConstraints(Bevel_GetConstraints(b.instance))
}
// CN: 设置约束控件大小。
// EN: .
func (b *TBevel) SetConstraints(value *TSizeConstraints) {
Bevel_SetConstraints(b.instance, CheckPtr(value))
}
// CN: 获取以父容器的ShowHint属性为准。
// EN: .
func (b *TBevel) ParentShowHint() bool {
return Bevel_GetParentShowHint(b.instance)
}
// CN: 设置以父容器的ShowHint属性为准。
// EN: .
func (b *TBevel) SetParentShowHint(value bool) {
Bevel_SetParentShowHint(b.instance, value)
}
func (b *TBevel) Shape() TBevelShape {
return Bevel_GetShape(b.instance)
}
func (b *TBevel) SetShape(value TBevelShape) {
Bevel_SetShape(b.instance, value)
}
// CN: 获取显示鼠标悬停提示。
// EN: Get Show mouseover tips.
func (b *TBevel) ShowHint() bool {
return Bevel_GetShowHint(b.instance)
}
// CN: 设置显示鼠标悬停提示。
// EN: Set Show mouseover tips.
func (b *TBevel) SetShowHint(value bool) {
Bevel_SetShowHint(b.instance, value)
}
func (b *TBevel) Style() TBevelStyle {
return Bevel_GetStyle(b.instance)
}
func (b *TBevel) SetStyle(value TBevelStyle) {
Bevel_SetStyle(b.instance, value)
}
// CN: 获取控件可视。
// EN: Get the control visible.
func (b *TBevel) Visible() bool {
return Bevel_GetVisible(b.instance)
}
// CN: 设置控件可视。
// EN: Set the control visible.
func (b *TBevel) SetVisible(value bool) {
Bevel_SetVisible(b.instance, value)
}
// CN: 获取控件启用。
// EN: Get the control enabled.
func (b *TBevel) Enabled() bool {
return Bevel_GetEnabled(b.instance)
}
// CN: 设置控件启用。
// EN: Set the control enabled.
func (b *TBevel) SetEnabled(value bool) {
Bevel_SetEnabled(b.instance, value)
}
func (b *TBevel) Action() *TAction {
return AsAction(Bevel_GetAction(b.instance))
}
func (b *TBevel) SetAction(value IComponent) {
Bevel_SetAction(b.instance, CheckPtr(value))
}
func (b *TBevel) BiDiMode() TBiDiMode {
return Bevel_GetBiDiMode(b.instance)
}
func (b *TBevel) SetBiDiMode(value TBiDiMode) {
Bevel_SetBiDiMode(b.instance, value)
}
func (b *TBevel) BoundsRect() TRect {
return Bevel_GetBoundsRect(b.instance)
}
func (b *TBevel) SetBoundsRect(value TRect) {
Bevel_SetBoundsRect(b.instance, value)
}
// CN: 获取客户区高度。
// EN: Get client height.
func (b *TBevel) ClientHeight() int32 {
return Bevel_GetClientHeight(b.instance)
}
// CN: 设置客户区高度。
// EN: Set client height.
func (b *TBevel) SetClientHeight(value int32) {
Bevel_SetClientHeight(b.instance, value)
}
func (b *TBevel) ClientOrigin() TPoint {
return Bevel_GetClientOrigin(b.instance)
}
// CN: 获取客户区矩形。
// EN: Get client rectangle.
func (b *TBevel) ClientRect() TRect {
return Bevel_GetClientRect(b.instance)
}
// CN: 获取客户区宽度。
// EN: Get client width.
func (b *TBevel) ClientWidth() int32 {
return Bevel_GetClientWidth(b.instance)
}
// CN: 设置客户区宽度。
// EN: Set client width.
func (b *TBevel) SetClientWidth(value int32) {
Bevel_SetClientWidth(b.instance, value)
}
// CN: 获取控件状态。
// EN: Get control state.
func (b *TBevel) ControlState() TControlState {
return Bevel_GetControlState(b.instance)
}
// CN: 设置控件状态。
// EN: Set control state.
func (b *TBevel) SetControlState(value TControlState) {
Bevel_SetControlState(b.instance, value)
}
// CN: 获取控件样式。
// EN: Get control style.
func (b *TBevel) ControlStyle() TControlStyle {
return Bevel_GetControlStyle(b.instance)
}
// CN: 设置控件样式。
// EN: Set control style.
func (b *TBevel) SetControlStyle(value TControlStyle) {
Bevel_SetControlStyle(b.instance, value)
}
func (b *TBevel) Floating() bool {
return Bevel_GetFloating(b.instance)
}
// CN: 获取控件父容器。
// EN: Get control parent container.
func (b *TBevel) Parent() *TWinControl {
return AsWinControl(Bevel_GetParent(b.instance))
}
// CN: 设置控件父容器。
// EN: Set control parent container.
func (b *TBevel) SetParent(value IWinControl) {
Bevel_SetParent(b.instance, CheckPtr(value))
}
// CN: 获取左边位置。
// EN: Get Left position.
func (b *TBevel) Left() int32 {
return Bevel_GetLeft(b.instance)
}
// CN: 设置左边位置。
// EN: Set Left position.
func (b *TBevel) SetLeft(value int32) {
Bevel_SetLeft(b.instance, value)
}
// CN: 获取顶边位置。
// EN: Get Top position.
func (b *TBevel) Top() int32 {
return Bevel_GetTop(b.instance)
}
// CN: 设置顶边位置。
// EN: Set Top position.
func (b *TBevel) SetTop(value int32) {
Bevel_SetTop(b.instance, value)
}
// CN: 获取宽度。
// EN: Get width.
func (b *TBevel) Width() int32 {
return Bevel_GetWidth(b.instance)
}
// CN: 设置宽度。
// EN: Set width.
func (b *TBevel) SetWidth(value int32) {
Bevel_SetWidth(b.instance, value)
}
// CN: 获取高度。
// EN: Get height.
func (b *TBevel) Height() int32 {
return Bevel_GetHeight(b.instance)
}
// CN: 设置高度。
// EN: Set height.
func (b *TBevel) SetHeight(value int32) {
Bevel_SetHeight(b.instance, value)
}
// CN: 获取控件光标。
// EN: Get control cursor.
func (b *TBevel) Cursor() TCursor {
return Bevel_GetCursor(b.instance)
}
// CN: 设置控件光标。
// EN: Set control cursor.
func (b *TBevel) SetCursor(value TCursor) {
Bevel_SetCursor(b.instance, value)
}
// CN: 获取组件鼠标悬停提示。
// EN: Get component mouse hints.
func (b *TBevel) Hint() string {
return Bevel_GetHint(b.instance)
}
// CN: 设置组件鼠标悬停提示。
// EN: Set component mouse hints.
func (b *TBevel) SetHint(value string) {
Bevel_SetHint(b.instance, value)
}
// CN: 获取组件总数。
// EN: Get the total number of components.
func (b *TBevel) ComponentCount() int32 {
return Bevel_GetComponentCount(b.instance)
}
// CN: 获取组件索引。
// EN: Get component index.
func (b *TBevel) ComponentIndex() int32 {
return Bevel_GetComponentIndex(b.instance)
}
// CN: 设置组件索引。
// EN: Set component index.
func (b *TBevel) SetComponentIndex(value int32) {
Bevel_SetComponentIndex(b.instance, value)
}
// CN: 获取组件所有者。
// EN: Get component owner.
func (b *TBevel) Owner() *TComponent {
return AsComponent(Bevel_GetOwner(b.instance))
}
// CN: 获取组件名称。
// EN: Get the component name.
func (b *TBevel) Name() string {
return Bevel_GetName(b.instance)
}
// CN: 设置组件名称。
// EN: Set the component name.
func (b *TBevel) SetName(value string) {
Bevel_SetName(b.instance, value)
}
// CN: 获取对象标记。
// EN: Get the control tag.
func (b *TBevel) Tag() int {
return Bevel_GetTag(b.instance)
}
// CN: 设置对象标记。
// EN: Set the control tag.
func (b *TBevel) SetTag(value int) {
Bevel_SetTag(b.instance, value)
}
// CN: 获取左边锚点。
// EN: .
func (b *TBevel) AnchorSideLeft() *TAnchorSide {
return AsAnchorSide(Bevel_GetAnchorSideLeft(b.instance))
}
// CN: 设置左边锚点。
// EN: .
func (b *TBevel) SetAnchorSideLeft(value *TAnchorSide) {
Bevel_SetAnchorSideLeft(b.instance, CheckPtr(value))
}
// CN: 获取顶边锚点。
// EN: .
func (b *TBevel) AnchorSideTop() *TAnchorSide {
return AsAnchorSide(Bevel_GetAnchorSideTop(b.instance))
}
// CN: 设置顶边锚点。
// EN: .
func (b *TBevel) SetAnchorSideTop(value *TAnchorSide) {
Bevel_SetAnchorSideTop(b.instance, CheckPtr(value))
}
// CN: 获取右边锚点。
// EN: .
func (b *TBevel) AnchorSideRight() *TAnchorSide {
return AsAnchorSide(Bevel_GetAnchorSideRight(b.instance))
}
// CN: 设置右边锚点。
// EN: .
func (b *TBevel) SetAnchorSideRight(value *TAnchorSide) {
Bevel_SetAnchorSideRight(b.instance, CheckPtr(value))
}
// CN: 获取底边锚点。
// EN: .
func (b *TBevel) AnchorSideBottom() *TAnchorSide {
return AsAnchorSide(Bevel_GetAnchorSideBottom(b.instance))
}
// CN: 设置底边锚点。
// EN: .
func (b *TBevel) SetAnchorSideBottom(value *TAnchorSide) {
Bevel_SetAnchorSideBottom(b.instance, CheckPtr(value))
}
// CN: 获取边框间距。
// EN: .
func (b *TBevel) BorderSpacing() *TControlBorderSpacing {
return AsControlBorderSpacing(Bevel_GetBorderSpacing(b.instance))
}
// CN: 设置边框间距。
// EN: .
func (b *TBevel) SetBorderSpacing(value *TControlBorderSpacing) {
Bevel_SetBorderSpacing(b.instance, CheckPtr(value))
}
// CN: 获取指定索引组件。
// EN: Get the specified index component.
func (b *TBevel) Components(AIndex int32) *TComponent {
return AsComponent(Bevel_GetComponents(b.instance, AIndex))
}
// CN: 获取锚侧面。
// EN: .
func (b *TBevel) AnchorSide(AKind TAnchorKind) *TAnchorSide {
return AsAnchorSide(Bevel_GetAnchorSide(b.instance, AKind))
} | vcl/bevel.go | 0.562177 | 0.41401 | bevel.go | starcoder |
package qrad
import (
"fmt"
"math/cmplx"
)
type Matrix struct {
Elements []Complex
Width, Height int
}
func NewMatrix() *Matrix {
return &Matrix{Elements: make([]Complex, 0), Width: 0, Height: 0}
}
func NewMatrixFromElements(elements [][]Complex) *Matrix {
c := &Matrix{}
c.Height = len(elements)
c.Width = len(elements[0])
c.Elements = make([]Complex, c.Width*c.Height)
for w := 0; w < c.Width; w++ {
for h := 0; h < c.Height; h++ {
c.Set(w, h, elements[h][w])
}
}
return c
}
func (c *Matrix) Resize(x, y int) *Matrix {
c.Elements = make([]Complex, x*y)
c.Width = x
c.Height = y
return c
}
func (c Matrix) At(x, y int) Complex {
// 0, 1, 2, 3
// 4, 5, 6, 7
if x >= c.Width || y >= c.Height {
fmt.Println(x, c.Width, y, c.Height)
panic("Invalid dimensions")
}
return c.Elements[x+c.Width*y]
}
func (c *Matrix) Set(x, y int, e Complex) {
if x >= c.Width || y >= c.Height {
panic("Invalid dimensions")
}
c.Elements[x+c.Width*y] = e
}
func (c *Matrix) Add(a, b Matrix) *Matrix {
if a.Width != b.Width || a.Height != b.Height {
panic("Invalid dimensions")
}
c.Resize(a.Width, a.Height)
for x := 0; x < a.Width; x++ {
for y := 0; y < a.Height; y++ {
c.Set(x, y, a.At(x, y)+b.At(x, y))
}
}
return c
}
func (c *Matrix) Sub(a, b Matrix) *Matrix {
if a.Width != b.Width || a.Height != b.Height {
panic("Invalid dimensions")
}
c.Resize(a.Width, a.Height)
for x := 0; x < a.Width; x++ {
for y := 0; y < a.Height; y++ {
c.Set(x, y, a.At(x, y)-b.At(x, y))
}
}
return c
}
func (c *Matrix) MulScalar(a Matrix, e Complex) *Matrix {
c.Resize(a.Width, a.Height)
for x := 0; x < a.Width; x++ {
for y := 0; y < a.Height; y++ {
c.Set(x, y, e*a.At(x, y))
}
}
return c
}
func (c *Matrix) Transpose(a Matrix) *Matrix {
c.Resize(a.Height, a.Width)
for x := 0; x < a.Width; x++ {
for y := 0; y < a.Height; y++ {
c.Set(y, x, a.At(x, y))
}
}
return c
}
func (c *Matrix) Conjugate(a Matrix) *Matrix {
c.Resize(a.Width, a.Height)
for x := 0; x < a.Width; x++ {
for y := 0; y < a.Height; y++ {
c.Set(x, y, Complex(cmplx.Conj(complex128(a.At(x, y)))))
}
}
return c
}
func (c *Matrix) Adjoint(a Matrix) *Matrix {
c.Transpose(a)
c.Conjugate(*c)
return c
}
func (c *Matrix) MulMatrix(a, b Matrix) *Matrix {
if a.Width != b.Height {
fmt.Println(a.Width, b.Height)
panic("Invalid dimensions")
}
c.Resize(b.Width, a.Height)
for w := 0; w < b.Width; w++ {
for h := 0; h < a.Height; h++ {
sum := NewComplex(0, 0)
for i := 0; i < a.Width; i++ {
sum += a.At(i, h) * b.At(w, i)
}
c.Set(w, h, sum)
}
}
return c
}
func (c *Matrix) Clone(a Matrix) *Matrix {
c.Elements = a.Elements[:]
c.Height = a.Height
c.Width = a.Width
return c
}
func (c *Matrix) TensorProduct(a, b Matrix) *Matrix {
if a.Width == 0 || a.Height == 0 {
c.Clone(b)
return c
} else if b.Width == 0 || b.Height == 0 {
c.Clone(a)
return c
}
c.Resize(a.Width*b.Width, a.Height*b.Height)
for aw := 0; aw < a.Width; aw++ {
for ah := 0; ah < a.Height; ah++ {
for bw := 0; bw < b.Width; bw++ {
for bh := 0; bh < b.Height; bh++ {
cw := aw*b.Width + bw
ch := ah*b.Height + bh
c.Set(cw, ch, a.At(aw, ah)*b.At(bw, bh))
}
}
}
}
return c
}
func (c *Matrix) TensorProducts(a ...Matrix) *Matrix {
for _, m := range a {
c.TensorProduct(*c, m)
}
return c
}
func (c Matrix) Equals(a Matrix) bool {
for h := 0; h < a.Height; h++ {
for w := 0; w < a.Width; w++ {
if !c.At(w, h).Equals(a.At(w, h)) {
return false
}
}
}
return true
}
func (a Matrix) PPrint() {
fmt.Print("[")
for h := 0; h < a.Height; h++ {
fmt.Print("[")
if h != 0 {
fmt.Print(" ")
}
for w := 0; w < a.Width; w++ {
fmt.Printf("%.01f + %.01fj, ", real(a.At(w, h)), imag(a.At(w, h)))
}
fmt.Print("]")
if h != a.Height-1 {
fmt.Println("")
}
}
fmt.Print("]\n")
}
func (m Matrix) IsSquare() bool {
return m.Height == m.Width
}
func (m Matrix) EnsureDimensions() {
if len(m.Elements) != m.Width*m.Height {
panic("invalid dimensions")
}
}
func (m Matrix) IsIdentity() bool {
m.EnsureDimensions()
if !m.IsSquare() {
return false
}
for h := 0; h < m.Height; h++ {
for w := 0; w < m.Width; w++ {
if w == h {
if !m.At(w, h).Equals(NewComplex(1, 0)) {
return false
}
} else {
if !m.At(w, h).Equals(NewComplex(0, 0)) {
return false
}
}
}
}
return true
}
func (m Matrix) IsUnitary() bool {
a := NewMatrix().Adjoint(m)
a.MulMatrix(*a, m)
return a.IsIdentity()
} | matrix.go | 0.735547 | 0.541469 | matrix.go | starcoder |
package pricing
import (
"fmt"
"strconv"
"go.uber.org/zap"
"github.com/transcom/mymove/pkg/models"
)
// parseDomesticLinehaulPrices: parser for 2a) Domestic Linehaul Prices
var parseDomesticLinehaulPrices processXlsxSheet = func(params ParamConfig, sheetIndex int, logger Logger) (interface{}, error) {
// XLSX Sheet consts
const xlsxDataSheetNum int = 6 // 2a) Domestic Linehaul Prices
const feeColIndexStart int = 6 // start at column 6 to get the rates
const feeRowIndexStart int = 14 // start at row 14 to get the rates
const serviceAreaNumberColumn int = 2
const originServiceAreaColumn int = 3
const serviceScheduleColumn int = 4
const numEscalationYearsToProcess = sharedNumEscalationYearsToProcess
if xlsxDataSheetNum != sheetIndex {
return nil, fmt.Errorf("parseDomesticLinehaulPrices expected to process sheet %d, but received sheetIndex %d", xlsxDataSheetNum, sheetIndex)
}
logger.Info("Parsing domestic linehaul prices")
var domPrices []models.StageDomesticLinehaulPrice
dataRows := params.XlsxFile.Sheets[xlsxDataSheetNum].Rows[feeRowIndexStart:]
for _, row := range dataRows {
colIndex := feeColIndexStart
// For number of baseline + Escalation years
for escalation := 0; escalation < numEscalationYearsToProcess; escalation++ {
// For each Rate Season
for _, r := range rateSeasons {
// For each weight band
for _, w := range dlhWeightBands {
// For each mileage range
for _, m := range dlhMilesRanges {
domPrice := models.StageDomesticLinehaulPrice{
ServiceAreaNumber: getCell(row.Cells, serviceAreaNumberColumn),
OriginServiceArea: getCell(row.Cells, originServiceAreaColumn),
ServicesSchedule: getCell(row.Cells, serviceScheduleColumn),
Season: r,
WeightLower: strconv.Itoa(w.lowerLbs),
WeightUpper: strconv.Itoa(w.upperLbs),
MilesLower: strconv.Itoa(m.lower),
MilesUpper: strconv.Itoa(m.upper),
EscalationNumber: strconv.Itoa(escalation),
Rate: getCell(row.Cells, colIndex),
}
colIndex++
if params.ShowOutput == true {
logger.Info("", zap.Any("StageDomesticLinehaulPrice", domPrice))
}
domPrices = append(domPrices, domPrice)
}
}
colIndex++ // skip 1 column (empty column) before starting next Rate type
}
}
}
return domPrices, nil
}
// verifyDomesticLinehaulPrices: verification for 2a) Domestic Linehaul Prices
var verifyDomesticLinehaulPrices verifyXlsxSheet = func(params ParamConfig, sheetIndex int) error {
if dlhWeightBandNumCells != dlhWeightBandNumCellsExpected {
return fmt.Errorf("parseDomesticLinehaulPrices(): Exepected %d columns per weight band, found %d defined in golang parser", dlhWeightBandNumCellsExpected, dlhWeightBandNumCells)
}
if len(dlhWeightBands) != dlhWeightBandCountExpected {
return fmt.Errorf("parseDomesticLinehaulPrices(): Exepected %d weight bands, found %d defined in golang parser", dlhWeightBandCountExpected, len(dlhWeightBands))
}
// XLSX Sheet consts
const xlsxDataSheetNum int = 6 // 2a) Domestic Linehaul Prices
const feeColIndexStart int = 6 // start at column 6 to get the rates
const feeRowIndexStart int = 14 // start at row 14 to get the rates
const serviceAreaNumberColumn int = 2
const originServiceAreaColumn int = 3
const serviceScheduleColumn int = 4
const numEscalationYearsToProcess int = 2
// Check headers
const feeRowMilageHeaderIndexStart = feeRowIndexStart - 3
const verifyHeaderIndexEnd = feeRowMilageHeaderIndexStart + 2
if xlsxDataSheetNum != sheetIndex {
return fmt.Errorf("verifyDomesticLinehaulPrices expected to process sheet %d, but received sheetIndex %d", xlsxDataSheetNum, sheetIndex)
}
dataRows := params.XlsxFile.Sheets[xlsxDataSheetNum].Rows[feeRowMilageHeaderIndexStart:verifyHeaderIndexEnd]
for dataRowsIndex, row := range dataRows {
colIndex := feeColIndexStart
// For number of baseline + Escalation years
for escalation := 0; escalation < numEscalationYearsToProcess; escalation++ {
// For each Rate Season
for _, r := range rateSeasons {
// For each weight band
for _, w := range dlhWeightBands {
// For each milage range
for dlhMilesRangesIndex, m := range dlhMilesRanges {
// skip the last index because the text is not easily checked
if dlhMilesRangesIndex == len(dlhMilesRanges)-1 {
colIndex++
continue
}
verificationLog := fmt.Sprintf(" , verfication for row index: %d, colIndex: %d, Escalation: %d, rateSeasons %v, dlhWeightBands %v",
dataRowsIndex, colIndex, escalation, r, w)
if dataRowsIndex == 0 {
fromMilesCell := getCell(row.Cells, colIndex)
fromMiles, err := getInt(fromMilesCell)
if err != nil {
return fmt.Errorf("could not convert %s to int: %w", fromMilesCell, err)
}
if m.lower != fromMiles {
return fmt.Errorf("format error: From Miles --> does not match expected number expected %d got %s\n%s", m.lower, getCell(row.Cells, colIndex), verificationLog)
}
if "ServiceAreaNumber" != removeWhiteSpace(getCell(row.Cells, serviceAreaNumberColumn)) {
return fmt.Errorf("format error: Header <ServiceAreaNumber> is missing got <%s> instead\n%s", removeWhiteSpace(getCell(row.Cells, serviceAreaNumberColumn)), verificationLog)
}
if "OriginServiceArea" != removeWhiteSpace(getCell(row.Cells, originServiceAreaColumn)) {
return fmt.Errorf("format error: Header <OriginServiceArea> is missing got <%s> instead\n%s", removeWhiteSpace(getCell(row.Cells, originServiceAreaColumn)), verificationLog)
}
if "ServicesSchedule" != removeWhiteSpace(getCell(row.Cells, serviceScheduleColumn)) {
return fmt.Errorf("format error: Header <SServicesSchedule> is missing got <%s> instead\n%s", removeWhiteSpace(getCell(row.Cells, serviceScheduleColumn)), verificationLog)
}
} else if dataRowsIndex == 1 {
toMilesCell := getCell(row.Cells, colIndex)
toMiles, err := getInt(toMilesCell)
if err != nil {
return fmt.Errorf("could not convert %s to int: %w", toMilesCell, err)
}
if m.upper != toMiles {
return fmt.Errorf("format error: To Miles --> does not match expected number expected %d got %s\n%s", m.upper, getCell(row.Cells, colIndex), verificationLog)
}
} else if dataRowsIndex == 2 {
if "EXAMPLE" != getCell(row.Cells, originServiceAreaColumn) {
return fmt.Errorf("format error: Filler text <EXAMPLE> is missing got <%s> instead\n%s", getCell(row.Cells, originServiceAreaColumn), verificationLog)
}
}
colIndex++
}
}
colIndex++ // skip 1 column (empty column) before starting next Rate type
}
}
}
return nil
} | pkg/parser/pricing/parse_domestic_linehaul_prices.go | 0.516352 | 0.544559 | parse_domestic_linehaul_prices.go | starcoder |
// Package windowsiana provides support in converting date timezones from
// the non-standard windows timezone format into UTC. It also provides a function
// to return a timezone-aware date given a date string and an IANA timezone
// the date can then be used as a UTC date via the .UTC() function
package windowsiana
import (
"errors"
"time"
)
// WinIANA is a pseudo constant that provides a mapping between the Windows timezones and the IANA zones
// CAUTION : I can't vouch for the ongoing reliability/accuracy of the mapping, which is partly why
// I've made this an accessible variable so that you can update it at runtime if needs be
var WinIANA = map[string]string{
"(UTC-12:00) International Date Line West": "Etc/GMT+12",
"(UTC-11:00) Co-ordinated Universal Time-11": "Etc/GMT+11",
"(UTC-11:00) Coordinated Universal Time-11": "Etc/GMT+11",
"(UTC-10:00) Aleutian Islands": "US/Aleutian",
"(UTC-10:00) Hawaii": "Pacific/Honolulu",
"(UTC-09:30) Marquesas Islands": "Pacific/Marquesas",
"(UTC-09:00) Alaska": "America/Anchorage",
"(UTC-09:00) Co-ordinated Universal Time-09": "Etc/GMT+9",
"(UTC-09:00) Coordinated Universal Time-09": "Etc/GMT+9",
"(UTC-08:00) Baja California": "America/Tijuana",
"(UTC-08:00) Co-ordinated Universal Time-08": "Etc/GMT+8",
"(UTC-08:00) Coordinated Universal Time-08": "Etc/GMT+8",
"(UTC-08:00) Pacific Time (US & Canada)": "America/Los_Angeles",
"(UTC-07:00) Arizona": "America/Phoenix",
"(UTC-07:00) Chihuahua, La Paz, Mazatlan": "America/Chihuahua",
"(UTC-07:00) Mountain Time (US & Canada)": "America/Denver",
"(UTC-06:00) Central America": "America/Guatemala",
"(UTC-06:00) Central Time (US & Canada)": "America/Chicago",
"(UTC-06:00) Easter Island": "Pacific/Easter",
"(UTC-06:00) Guadalajara, Mexico City, Monterrey": "America/Mexico_City",
"(UTC-06:00) Saskatchewan": "America/Regina",
"(UTC-05:00) Bogota, Lima, Quito, Rio Branco": "America/Bogota",
"(UTC-05:00) Chetumal": "America/Cancun",
"(UTC-05:00) Eastern Time (US & Canada)": "America/New_York",
"(UTC-05:00) Haiti": "America/Port-au-Prince",
"(UTC-05:00) Havana": "America/Havana",
"(UTC-05:00) Indiana (East)": "America/Indianapolis",
"(UTC-05:00) Turks and Caicos": "Etc/GMT+5",
"(UTC-04:00) Asuncion": "America/Asuncion",
"(UTC-04:00) Atlantic Time (Canada)": "America/Halifax",
"(UTC-04:00) Caracas": "America/Caracas",
"(UTC-04:00) Cuiaba": "America/Cuiaba",
"(UTC-04:00) Georgetown, La Paz, Manaus, San Juan": "America/La_Paz",
"(UTC-04:00) Santiago": "America/Santiago",
"(UTC-04:00) Turks and Caicos": "America/Grand_Turk",
"(UTC-03:30) Newfoundland": "America/St_Johns",
"(UTC-03:00) Araguaina": "America/Araguaina",
"(UTC-03:00) Brasilia": "America/Sao_Paulo",
"(UTC-03:00) Cayenne, Fortaleza": "America/Cayenne",
"(UTC-03:00) City of Buenos Aires": "America/Buenos_Aires",
"(UTC-03:00) Greenland": "America/Godthab",
"(UTC-03:00) Montevideo": "America/Montevideo",
"(UTC-03:00) Punta Arenas": "America/Punta_Arenas",
"(UTC-03:00) Saint Pierre and Miquelon": "America/Miquelon",
"(UTC-03:00) Salvador": "America/Bahia",
"(UTC-02:00) Co-ordinated Universal Time-02": "Etc/GMT+2",
"(UTC-02:00) Coordinated Universal Time-02": "Etc/GMT+2",
"(UTC-02:00) Mid-Atlantic - Old": "Etc/GMT+2",
"(UTC-01:00) Azores": "Atlantic/Azores",
"(UTC-01:00) Cabo Verde Is.": "Atlantic/Cape_Verde",
"(UTC) Co-ordinated Universal Time": "Etc/GMT",
"(UTC) Coordinated Universal Time": "Etc/GMT",
"(UTC+00:00) Casablanca": "Africa/Casablanca",
"(UTC+00:00) Dublin, Edinburgh, Lisbon, London": "Europe/London",
"(UTC+00:00) <NAME>": "Atlantic/Reykjavik",
"(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna": "Europe/Berlin",
"(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague": "Europe/Budapest",
"(UTC+01:00) Brussels, Copenhagen, Madrid, Paris": "Europe/Paris",
"(UTC+01:00) Casablanca": "Africa/Casablanca",
"(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb": "Europe/Warsaw",
"(UTC+01:00) West Central Africa": "Africa/Lagos",
"(UTC+01:00) Windhoek": "Africa/Windhoek",
"(UTC+02:00) Amman": "Asia/Amman",
"(UTC+02:00) A<NAME>": "Europe/Bucharest",
"(UTC+02:00) Beirut": "Asia/Beirut",
"(UTC+02:00) Cairo": "Africa/Cairo",
"(UTC+02:00) Chisinau": "Europe/Chisinau",
"(UTC+02:00) Damascus": "Asia/Damascus",
"(UTC+02:00) <NAME>": "Asia/Gaza",
"(UTC+02:00) <NAME>": "Africa/Johannesburg",
"(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius": "Europe/Kiev",
"(UTC+02:00) Istanbul": "Europe/Istanbul",
"(UTC+03:00) Istanbul": "Europe/Istanbul",
"(UTC+02:00) Jerusalem": "Asia/Jerusalem",
"(UTC+02:00) Kaliningrad": "Europe/Kaliningrad",
"(UTC+02:00) Windhoek": "Africa/Windhoek",
"(UTC+02:00) Khartoum": "Africa/Khartoum",
"(UTC+02:00) Tripoli": "Africa/Tripoli",
"(UTC+03:00) Baghdad": "Asia/Baghdad",
"(UTC+03:00) <NAME>": "Asia/Riyadh",
"(UTC+03:00) Minsk": "Europe/Minsk",
"(UTC+03:00) Moscow, St. Petersburg": "Europe/Moscow",
"(UTC+03:00) Moscow, St. Petersburg, Volgograd": "Europe/Moscow",
"(UTC+03:00) Nairobi": "Africa/Nairobi",
"(UTC+03:30) Tehran": "Asia/Tehran",
"(UTC+04:00) Abu Dhabi, Muscat": "Asia/Dubai",
"(UTC+04:00) Astrakhan, Ulyanovsk": "Europe/Samara",
"(UTC+04:00) Baku": "Asia/Baku",
"(UTC+04:00) Izhevsk, Samara": "Europe/Samara",
"(UTC+04:00) Port Louis": "Indian/Mauritius",
"(UTC+04:00) Saratov": "Europe/Saratov",
"(UTC+04:00) Tbilisi": "Asia/Tbilisi",
"(UTC+04:00) Volgograd": "Europe/Volgograd",
"(UTC+04:00) Yerevan": "Asia/Yerevan",
"(UTC+04:30) Kabul": "Asia/Kabul",
"(UTC+05:00) <NAME>": "Asia/Tashkent",
"(UTC+05:00) Ekaterinburg": "Asia/Yekaterinburg",
"(UTC+05:00) <NAME>i": "Asia/Karachi",
"(UTC+05:00) Qyzylorda": "Asia/Qyzylorda",
"(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi": "Asia/Calcutta",
"(UTC+05:30) Sri Jayawardenepura": "Asia/Colombo",
"(UTC+05:45) Kathmandu": "Asia/Kathmandu",
"(UTC+06:00) Astana": "Asia/Almaty",
"(UTC+06:00) Dhaka": "Asia/Dhaka",
"(UTC+06:00) Omsk": "Asia/Omsk",
"(UTC+06:00) Novosibirsk": "Asia/Novosibirsk",
"(UTC+06:30) Yangon (Rangoon)": "Asia/Rangoon",
"(UTC+07:00) Bangkok, Hanoi, Jakarta": "Asia/Bangkok",
"(UTC+07:00) Barnaul, Gorno-Altaysk": "Asia/Krasnoyarsk",
"(UTC+07:00) Hovd": "Asia/Hovd",
"(UTC+07:00) Krasnoyarsk": "Asia/Krasnoyarsk",
"(UTC+07:00) Novosibirsk": "Asia/Novosibirsk",
"(UTC+07:00) Tomsk": "Asia/Tomsk",
"(UTC+08:00) Beijing, Chongqing, Hong Kong SAR, Urumqi": "Asia/Shanghai",
"(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi": "Asia/Shanghai",
"(UTC+08:00) Irkutsk": "Asia/Irkutsk",
"(UTC+08:00) Kuala Lumpur, Singapore": "Asia/Singapore",
"(UTC+08:00) Perth": "Australia/Perth",
"(UTC+08:00) Taipei": "Asia/Taipei",
"(UTC+08:00) Ulaanbaatar": "Asia/Ulaanbaatar",
"(UTC+08:30) Pyongyang": "Asia/Pyongyang",
"(UTC+09:00) Pyongyang": "Asia/Pyongyang",
"(UTC+08:45) Eucla": "Australia/Eucla",
"(UTC+09:00) Chita": "Asia/Chita",
"(UTC+09:00) Osaka, Sapporo, Tokyo": "Asia/Tokyo",
"(UTC+09:00) Seoul": "Asia/Seoul",
"(UTC+09:00) Yakutsk": "Asia/Yakutsk",
"(UTC+09:30) Adelaide": "Australia/Adelaide",
"(UTC+09:30) Darwin": "Australia/Darwin",
"(UTC+10:00) Brisbane": "Australia/Brisbane",
"(UTC+10:00) Canberra, Melbourne, Sydney": "Australia/Sydney",
"(UTC+10:00) Guam, Port Moresby": "Pacific/Port_Moresby",
"(UTC+10:00) Hobart": "Australia/Hobart",
"(UTC+10:00) Vladivostok": "Asia/Vladivostok",
"(UTC+10:30) Lord Howe Island": "Australia/Lord_Howe",
"(UTC+11:00) Bougainville Island": "Pacific/Bougainville",
"(UTC+11:00) Chokurdakh": "Asia/Srednekolymsk",
"(UTC+11:00) Magadan": "Asia/Magadan",
"(UTC+11:00) Norfolk Island": "Pacific/Norfolk",
"(UTC+11:00) Sakhalin": "Asia/Sakhalin",
"(UTC+11:00) Solomon Is., New Caledonia": "Pacific/Guadalcanal",
"(UTC+12:00) Anadyr, Petropavlovsk-Kamchatsky": "Asia/Kamchatka",
"(UTC+12:00) Auckland, Wellington": "Pacific/Auckland",
"(UTC+12:00) Co-ordinated Universal Time+12": "Etc/GMT-12",
"(UTC+12:00) Coordinated Universal Time+12": "Etc/GMT-12",
"(UTC+12:00) Petropavlovsk-Kamchatsky - Old": "Etc/GMT-12",
"(UTC+12:00) Fiji": "Pacific/Fiji",
"(UTC+12:45) Chatham Islands": "Pacific/Chatham",
"(UTC+13:00) Nuku'alofa": "Pacific/Tongatapu",
"(UTC+13:00) Co-ordinated Universal Time+13": "Etc/GMT-13",
"(UTC+13:00) Coordinated Universal Time+13": "Etc/GMT-13",
"(UTC+13:00) Samoa": "Pacific/Apia",
"(UTC+14:00) Kiritimati Island": "Pacific/Kiritimati",
}
// TimezoneParseWindows accepts a timestring in the format "2006-01-02T15:04:05" as the tstring
// parameter and a windows time zone (eg "(UTC+12:00) Fiji") as the timezone. It will return
// a timezoned date, which will correctly handle daylight savings time if it's in force at the given date
func TimezoneParseWindows(tstring string, tzone string) (time.Time, error) {
ianazone := WinIANA[tzone]
if ianazone == "" {
var t time.Time
return t, errors.New("Could not match windows timezone to IANA timezone")
}
return TimezoneParseIANA(tstring, WinIANA[tzone])
}
// TimezoneParseIANA accepts a timestring in the format "2006-01-02T15:04:05" as the tstring
// parameter and am IANA time zone (eg "Pacific/Fiji") as the timezone. It will return
// a timezoned date, which will correctly handle daylight savings time if it's in force at the given date
func TimezoneParseIANA(tstring string, tzone string) (time.Time, error) {
var zulutime time.Time
it, err := time.Parse("2006-01-02T15:04:05", tstring)
if err != nil {
return zulutime, err
}
loc, err := time.LoadLocation(tzone)
if err != nil {
return zulutime, err
}
zulutime = time.Date(it.Year(), it.Month(), it.Day(), it.Hour(), it.Minute(), it.Second(), 0, loc)
return zulutime, nil
}
// StripTimezoneFromDate removes the same point in time and date, but strips apart the timezone
func StripTimezoneFromDate(indate time.Time) time.Time {
return time.Unix(indate.Unix(), 0)
} | windowsiana.go | 0.510985 | 0.634345 | windowsiana.go | starcoder |
package wiremock
// Types of params matching.
const (
ParamEqualTo ParamMatchingStrategy = "equalTo"
ParamMatches ParamMatchingStrategy = "matches"
ParamContains ParamMatchingStrategy = "contains"
ParamEqualToXml ParamMatchingStrategy = "equalToXml"
ParamEqualToJson ParamMatchingStrategy = "equalToJson"
ParamMatchesXPath ParamMatchingStrategy = "matchesXPath"
ParamMatchesJsonPath ParamMatchingStrategy = "matchesJsonPath"
ParamAbsent ParamMatchingStrategy = "absent"
ParamDoesNotMatch ParamMatchingStrategy = "doesNotMatch"
)
// Types of url matching.
const (
URLEqualToRule URLMatchingStrategy = "url"
URLPathEqualToRule URLMatchingStrategy = "urlPath"
URLPathMatchingRule URLMatchingStrategy = "urlPathPattern"
URLMatchingRule URLMatchingStrategy = "urlPattern"
)
// URLMatchingStrategy is enum url matching type.
type URLMatchingStrategy string
// ParamMatchingStrategy is enum params matching type.
type ParamMatchingStrategy string
// URLMatcher is structure for defining the type of url matching.
type URLMatcher struct {
strategy URLMatchingStrategy
value string
}
// Strategy returns URLMatchingStrategy of URLMatcher.
func (m URLMatcher) Strategy() URLMatchingStrategy {
return m.strategy
}
// Value returns value of URLMatcher.
func (m URLMatcher) Value() string {
return m.value
}
// URLEqualTo returns URLMatcher with URLEqualToRule matching strategy.
func URLEqualTo(url string) URLMatcher {
return URLMatcher{
strategy: URLEqualToRule,
value: url,
}
}
// URLPathEqualTo returns URLMatcher with URLPathEqualToRule matching strategy.
func URLPathEqualTo(url string) URLMatcher {
return URLMatcher{
strategy: URLPathEqualToRule,
value: url,
}
}
// URLPathMatching returns URLMatcher with URLPathMatchingRule matching strategy.
func URLPathMatching(url string) URLMatcher {
return URLMatcher{
strategy: URLPathMatchingRule,
value: url,
}
}
// URLMatching returns URLMatcher with URLMatchingRule matching strategy.
func URLMatching(url string) URLMatcher {
return URLMatcher{
strategy: URLMatchingRule,
value: url,
}
}
// ParamMatcher is structure for defining the type of params.
type ParamMatcher struct {
strategy ParamMatchingStrategy
value string
}
// Strategy returns ParamMatchingStrategy of ParamMatcher.
func (m ParamMatcher) Strategy() ParamMatchingStrategy {
return m.strategy
}
// Value returns value of ParamMatcher.
func (m ParamMatcher) Value() string {
return m.value
}
// EqualTo returns ParamMatcher with ParamEqualTo matching strategy.
func EqualTo(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamEqualTo,
value: param,
}
}
// Matching returns ParamMatcher with ParamMatches matching strategy.
func Matching(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamMatches,
value: param,
}
}
// Contains returns ParamMatcher with ParamContains matching strategy.
func Contains(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamContains,
value: param,
}
}
// EqualToXml returns ParamMatcher with ParamEqualToXml matching strategy.
func EqualToXml(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamEqualToXml,
value: param,
}
}
// EqualToJson returns ParamMatcher with ParamEqualToJson matching strategy.
func EqualToJson(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamEqualToJson,
value: param,
}
}
// MatchingXPath returns ParamMatcher with ParamMatchesXPath matching strategy.
func MatchingXPath(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamMatchesXPath,
value: param,
}
}
// MatchingJsonPath returns ParamMatcher with ParamMatchesJsonPath matching strategy.
func MatchingJsonPath(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamMatchesJsonPath,
value: param,
}
}
// NotMatching returns ParamMatcher with ParamDoesNotMatch matching strategy.
func NotMatching(param string) ParamMatcher {
return ParamMatcher{
strategy: ParamDoesNotMatch,
value: param,
}
} | matching.go | 0.810104 | 0.402686 | matching.go | starcoder |
package text
//diff proposes simple and naive functions to visualize differences between
//strings. It probably is only working to have some eyecandies when loking at
//test results or to support some command line interactions.
import (
"encoding/json"
"fmt"
"strings"
"unicode"
)
type diffType int
const (
isSame diffType = iota
isDiff
onlyLeft
onlyRight
minSimilarity int = 20
)
var (
//Diff does not highlight differences, only indicates where differences
//are.
Diff = DiffHighlighter{
SameL: func(s string) string { return s },
DiffL: func(s string) string { return s },
LackL: func(s string) string { return "" },
ExcessL: func(s string) string { return s },
SameR: func(s string) string { return s },
DiffR: func(s string) string { return s },
LackR: func(s string) string { return "" },
ExcessR: func(s string) string { return s },
Symbols: [...]string{"=", "<>", "-", "+"},
}
//DiffShowNonPrintable does not highlight differences but shows spaces,
//tabs and non printable runes.
DiffShowNonPrintable = DiffHighlighter{
SameL: func(s string) string { return s },
DiffL: func(s string) string { return showNonPrintableRune(s) },
LackL: func(s string) string { return "" },
ExcessL: func(s string) string { return s },
SameR: func(s string) string { return s },
DiffR: func(s string) string { return showNonPrintableRune(s) },
LackR: func(s string) string { return "" },
ExcessR: func(s string) string { return s },
Symbols: [...]string{"=", "<>", "-", "+"},
}
//LightMkdDiff highlights differences in pseudo Markdonw
LightMkdDiff = DiffHighlighter{
SameL: func(s string) string { return s },
DiffL: func(s string) string { return fmt.Sprintf("**%s**", s) },
LackL: func(s string) string { return "" },
ExcessL: func(s string) string { return fmt.Sprintf("~~%s~~", s) },
SameR: func(s string) string { return s },
DiffR: func(s string) string { return fmt.Sprintf("**%s**", s) },
LackR: func(s string) string { return "" },
ExcessR: func(s string) string { return fmt.Sprintf("~~%s~~", s) },
Symbols: [...]string{"=", "<>", "-", "+"},
}
//ColorDiff highlights differences in colors
ColorDiff = DiffHighlighter{
SameL: func(s string) string { return s },
DiffL: func(s string) string { return fmt.Sprintf("\x1b[4;31m%s\x1b[0m", s) }, //Red underline (can detect space)
LackL: func(s string) string { return "" },
ExcessL: func(s string) string { return fmt.Sprintf("\x1b[4;31m%s\x1b[0m", s) }, //Red underline (can detect space)
SameR: func(s string) string { return s },
DiffR: func(s string) string { return fmt.Sprintf("\x1b[4;31m%s\x1b[0m", s) }, //Red underline (can detect space)
LackR: func(s string) string { return "" },
ExcessR: func(s string) string { return fmt.Sprintf("\x1b[4;31m%s\x1b[0m", s) }, //Red underline (can detect space)
Symbols: [...]string{"=", "<>", "-", "+"},
}
//DeviationDiff highlights differences in colors showing differences as deviations
DeviationDiff = DiffHighlighter{
SameL: func(s string) string { return s },
DiffL: func(s string) string { return fmt.Sprintf("\x1b[32m%s\x1b[0m", s) }, //Green
LackL: func(s string) string { return "" },
ExcessL: func(s string) string { return fmt.Sprintf("\x1b[32;1m%s\x1b[0m", s) }, //Green + Bold
SameR: func(s string) string { return s },
DiffR: func(s string) string { return fmt.Sprintf("\x1b[31m%s\x1b[0m", s) }, //Red
LackR: func(s string) string { //Underlined space
if len(s) > 0 {
return fmt.Sprintf("\x1b[31;4m%s\x1b[0m", strings.Repeat(" ", len(s)-1))
}
return ""
},
ExcessR: func(s string) string { return fmt.Sprintf("\x1b[31;9m%s\x1b[0m", s) }, //Red + Strikeout
Symbols: [...]string{"=", "<>", "-", "+"},
}
)
//DiffHighlighter gathers styling functions to highlight differences between
//strings All functions have to be defined otherwise diff will panic
type DiffHighlighter struct {
SameL, DiffL, LackL, ExcessL func(s string) string
SameR, DiffR, LackR, ExcessR func(s string) string
Symbols [4]string //Same order than diffType constants
}
//Slices returns the differences between two slices of strings Differences are
//looked at line by line, then word by word and finally rune by rune
func (h *DiffHighlighter) Slices(left, right []string) (dT []string, dL []string, dR []string) {
d := findDiffAndInsertion(left, right)
for i := range d.T {
switch d.T[i] {
case isDiff:
_, lwords, rwords := h.Bywords(d.L[i], d.R[i])
dT, dL, dR = append(dT, h.Symbols[isDiff]), append(dL, strings.Join(lwords, "")), append(dR, strings.Join(rwords, ""))
default:
ht, hl, hr := h.highlights(d.T[i], d.L[i], d.R[i])
dT, dL, dR = append(dT, ht), append(dL, hl), append(dR, hr)
}
}
return
}
//Anything returns a visualisation of the differences between two objects of
//unknown types. It 'stringifies' these interface{} using a human friendly
//Json representation (or if not possible golang internal string (Gostring)
//then proced to a string diff
func (h *DiffHighlighter) Anything(l, r interface{}) (diffType []string, diffLeft []string, diffRight []string) {
return h.Bylines(stringify(l), stringify(r))
}
//Bylines returns the differences between 'left' and 'right' strings. Differences are looked at
//line by line, then word by word and finally rune by rune
func (h *DiffHighlighter) Bylines(left, right string) (dT []string, dL []string, dR []string) {
Llines, Rlines := strings.Split(left, "\n"), strings.Split(right, "\n")
return h.Slices(Llines, Rlines)
}
//Bywords returns the differences between 'left' and 'right' strings.
//Differences are looked at word by word, then rune by rune.
func (h *DiffHighlighter) Bywords(left, right string) (dT []string, dL []string, dR []string) {
Lwords, Rwords := splitInWords(left), splitInWords(right)
d := findDiffAndInsertion(Lwords, Rwords).Compact()
if d.SimilarityLevel() < minSimilarity {
ht, hl, hr := h.highlights(isDiff, left, right)
return append(dT, ht), append(dL, hl), append(dR, hr)
}
for i := range d.T {
switch d.T[i] {
case isDiff:
_, lrunes, rrunes := h.Byrunes(d.L[i], d.R[i])
dT, dL, dR = append(dT, h.Symbols[isDiff]), append(dL, strings.Join(lrunes, "")), append(dR, strings.Join(rrunes, ""))
default:
ht, hl, hr := h.highlights(d.T[i], d.L[i], d.R[i])
dT, dL, dR = append(dT, ht), append(dL, hl), append(dR, hr)
}
}
return
}
//Byrunes returns the differences between 'left' and 'right'. Differences are
//looked at rune by rune
func (h *DiffHighlighter) Byrunes(left, right string) (dT []string, dL []string, dR []string) {
Lrunes, Rrunes := strings.Split(left, ""), strings.Split(right, "")
d := findDiff(Lrunes, Rrunes).Compact()
if d.SimilarityLevel() < minSimilarity {
ht, hl, hr := h.highlights(isDiff, left, right)
return append(dT, ht), append(dL, hl), append(dR, hr)
}
for i := range d.T {
ht, hl, hr := h.highlights(d.T[i], d.L[i], d.R[i])
dT, dL, dR = append(dT, ht), append(dL, hl), append(dR, hr)
}
return
}
func (h *DiffHighlighter) highlights(dT diffType, dL, dR string) (t string, l string, r string) {
switch dT {
case isSame:
t, l, r = h.Symbols[dT], h.SameL(dL), h.SameR(dR)
case isDiff:
t, l, r = h.Symbols[dT], h.DiffL(dL), h.DiffR(dR)
case onlyLeft:
t, l, r = h.Symbols[dT], h.ExcessL(dL), h.LackR(dR)
case onlyRight:
t, l, r = h.Symbols[dT], h.LackL(dL), h.ExcessR(dR)
}
return
}
func findDiff(left, right []string) *diff {
d := &diff{}
i := 0
lenL, lenR := len(left), len(right)
for {
if i == lenL {
for n := i; n < lenR; n++ {
d.OnlyRight(right[n])
}
break
}
if i == lenR {
for n := i; n < lenL; n++ {
d.OnlyLeft(left[n])
}
break
}
if left[i] == right[i] {
d.IsSame(left[i])
i++
continue
}
d.IsDifferent(left[i], right[i])
i++
}
return d
}
func findDiffAndInsertion(left, right []string) *diff {
d := &diff{}
iL, iR := 0, 0
lenL, lenR := len(left), len(right)
Loop:
for {
if iL == lenL {
for n := iR; n < lenR; n++ {
d.OnlyRight(right[n])
}
break
}
if iR == lenR {
for n := iL; n < lenL; n++ {
d.OnlyLeft(left[n])
}
break
}
for n := iR; n < lenR; n++ {
if n != iR && n < lenL && left[n] == right[n] {
break
}
if left[iL] == right[n] {
for p := iR; p < n; p++ {
d.OnlyRight(right[p])
}
d.IsSame(left[iL])
iL++
iR = n + 1
goto Loop
}
}
for n := iL; n < lenL; n++ {
if n != iL && n < lenR && left[n] == right[n] {
break
}
if left[n] == right[iR] {
for p := iL; p < n; p++ {
d.OnlyLeft(left[p])
}
d.IsSame(right[iR])
iR++
iL = n + 1
goto Loop
}
}
d.IsDifferent(left[iL], right[iR])
iL++
iR++
}
return d
}
//diff captures differences between two strings
type diff struct {
T []diffType
L, R []string
}
func (d *diff) IsSame(left string) {
d.L = append(d.L, left)
d.R = append(d.R, left)
d.T = append(d.T, isSame)
}
func (d *diff) OnlyLeft(left string) {
d.L = append(d.L, left)
d.R = append(d.R, left)
d.T = append(d.T, onlyLeft)
}
func (d *diff) OnlyRight(right string) {
d.L = append(d.L, right)
d.R = append(d.R, right)
d.T = append(d.T, onlyRight)
}
func (d *diff) IsDifferent(left, right string) {
d.L = append(d.L, left)
d.R = append(d.R, right)
d.T = append(d.T, isDiff)
}
func (d *diff) SimilarityLevel() int {
same := 0
for _, t := range d.T {
if t == isSame {
same++
}
}
return int(100.0 * same / len(d.T))
}
func (d *diff) Compact() *diff {
var t []diffType
var r, l []string
var curT diffType
var curL, curR string
for i := range d.T {
if i > 0 && d.T[i] != curT {
t, l, r = append(t, curT), append(l, curL), append(r, curR)
curL, curR = "", ""
}
curT, curL, curR = d.T[i], curL+d.L[i], curR+d.R[i]
}
t, l, r = append(t, curT), append(l, curL), append(r, curR)
return &diff{t, l, r}
}
func splitInWords(s string) (split []string) {
var lasti int
for i, r := range s {
if unicode.IsSpace(r) || unicode.IsPunct(r) {
split = append(split, s[lasti:i])
split = append(split, string(r))
lasti = i + 1
}
}
if lasti < len(s) {
split = append(split, s[lasti:])
}
return
}
func stringify(v interface{}) string {
switch v := v.(type) {
case string:
return v
default:
b, err := json.MarshalIndent(v, "", " ")
if err != nil {
return fmt.Sprintf("%#v", v)
}
return string(b)
}
}
func showNonPrintableRune(s string) string {
return strings.Map(func(r rune) rune {
if r == '\t' {
return '\u21e5'
}
if unicode.IsSpace(r) {
return '\u00B7'
}
if !unicode.IsPrint(r) {
return '\ufffd'
}
return r
}, s)
} | style/text/diff.go | 0.560493 | 0.45538 | diff.go | starcoder |
package data
import (
"context"
"net/url"
"strings"
errs "github.com/ONSdigital/dp-frontend-search-controller/apperrors"
"github.com/ONSdigital/log.go/log"
)
// Filter represents information of filters selected by user
type Filter struct {
Query []string `json:"query,omitempty"`
LocaliseKeyName []string `json:"localise_key,omitempty"`
}
// Category represents all the search categories in search page
type Category struct {
LocaliseKeyName string `json:"localise_key"`
Count int `json:"count"`
ContentTypes []ContentType `json:"content_types"`
}
// ContentType represents the type of the search results and the number of results for each type
type ContentType struct {
LocaliseKeyName string `json:"localise_key"`
Count int `json:"count"`
Type string `json:"type"`
SubTypes []string `json:"sub_types"`
}
var (
// Categories represent the list of all search categories
Categories = []Category{Publication, Data, Other}
// Publication - search information on publication category
Publication = Category{
LocaliseKeyName: "Publication",
ContentTypes: []ContentType{Bulletin, Article, Compendium},
}
// Data - search information on data category
Data = Category{
LocaliseKeyName: "Data",
ContentTypes: []ContentType{TimeSeries, Datasets, UserRequestedData},
}
// Other - search information on other categories
Other = Category{
LocaliseKeyName: "Other",
ContentTypes: []ContentType{Methodology, CorporateInformation},
}
// Bulletin - Search information specific for statistical bulletins
Bulletin = ContentType{
LocaliseKeyName: "StatisticalBulletin",
Type: "bulletin",
SubTypes: []string{"bulletin"},
}
// Article - Search information specific for articles
Article = ContentType{
LocaliseKeyName: "Article",
Type: "article",
SubTypes: []string{"article", "article_download"},
}
// Compendium - Search information specific for compendium
Compendium = ContentType{
LocaliseKeyName: "Compendium",
Type: "compendia",
SubTypes: []string{"compendium_landing_page"},
}
// TimeSeries - Search information specific for time series
TimeSeries = ContentType{
LocaliseKeyName: "TimeSeries",
Type: "time_series",
SubTypes: []string{"timeseries"},
}
// Datasets - Search information specific for datasets
Datasets = ContentType{
LocaliseKeyName: "Datasets",
Type: "datasets",
SubTypes: []string{"dataset_landing_page", "reference_tables"},
}
// UserRequestedData - Search information specific for user requested data
UserRequestedData = ContentType{
LocaliseKeyName: "UserRequestedData",
Type: "user_requested_data",
SubTypes: []string{"static_adhoc"},
}
// Methodology - Search information specific for methodologies
Methodology = ContentType{
LocaliseKeyName: "Methodology",
Type: "methodology",
SubTypes: []string{"static_methodology", "static_methodology_download", "static_qmi"},
}
// CorporateInformation - Search information specific for corporate information
CorporateInformation = ContentType{
LocaliseKeyName: "CorporateInformation",
Type: "corporate_information",
SubTypes: []string{"static_foi", "static_page", "static_landing_page", "static_article"},
}
// filterOptions contains all the possible filter available on the search page
filterOptions = map[string]ContentType{
Article.Type: Article,
Bulletin.Type: Bulletin,
Compendium.Type: Compendium,
CorporateInformation.Type: CorporateInformation,
Datasets.Type: Datasets,
Methodology.Type: Methodology,
TimeSeries.Type: TimeSeries,
UserRequestedData.Type: UserRequestedData,
}
)
// reviewFilter retrieves filters from query, checks if they are one of the filter options, and updates validatedQueryParams
func reviewFilters(ctx context.Context, urlQuery url.Values, validatedQueryParams *SearchURLParams) error {
filtersQuery := urlQuery["filter"]
for _, filterQuery := range filtersQuery {
filterQuery = strings.ToLower(filterQuery)
if filterQuery == "" {
continue
}
filter, found := filterOptions[filterQuery]
if !found {
err := errs.ErrFilterNotFound
logData := log.Data{"filter not found": filter}
log.Event(ctx, "failed to find filter", log.Error(err), log.ERROR, logData)
return err
}
validatedQueryParams.Filter.Query = append(validatedQueryParams.Filter.Query, filter.Type)
validatedQueryParams.Filter.LocaliseKeyName = append(validatedQueryParams.Filter.LocaliseKeyName, filter.LocaliseKeyName)
}
return nil
}
// GetCategories returns all the categories and its content types where all the count is set to zero
func GetCategories() []Category {
var categories []Category
categories = append(categories, Categories...)
// To get a different reference of ContentType - deep copy
for i, category := range categories {
categories[i].ContentTypes = []ContentType{}
categories[i].ContentTypes = append(categories[i].ContentTypes, Categories[i].ContentTypes...)
// To get a different reference of SubTypes - deep copy
for j := range category.ContentTypes {
categories[i].ContentTypes[j].SubTypes = []string{}
categories[i].ContentTypes[j].SubTypes = append(categories[i].ContentTypes[j].SubTypes, Categories[i].ContentTypes[j].SubTypes...)
}
}
return categories
}
// updateQueryWithAPIFilters retrieves and adds all available sub filters which is related to the search filter given by the user
func updateQueryWithAPIFilters(apiQuery url.Values) {
filters := apiQuery["content_type"]
if len(filters) > 0 {
subFilters := getSubFilters(filters)
apiQuery.Set("content_type", strings.Join(subFilters, ","))
}
}
// getSubFilters gets all available sub filters which is related to the search filter given by the user
func getSubFilters(filters []string) []string {
var subFilters = make([]string, 0)
for _, filter := range filters {
subFilter := filterOptions[filter]
subFilters = append(subFilters, subFilter.SubTypes...)
}
return subFilters
} | data/filter.go | 0.634656 | 0.403655 | filter.go | starcoder |
package camera
import (
"github.com/go-gl/mathgl/mgl32"
"github.com/wieku/danser-go/app/settings"
"github.com/wieku/danser-go/framework/math/vector"
)
const OsuWidth = 512.0
const OsuHeight = 384.0
type Rectangle struct {
MinX, MinY, MaxX, MaxY float32
}
type Camera struct {
screenRect Rectangle
projection mgl32.Mat4
view mgl32.Mat4
projectionView mgl32.Mat4
invProjectionView mgl32.Mat4
viewDirty bool
origin vector.Vector2d
position vector.Vector2d
rotation float64
scale vector.Vector2d
rebuildCache bool
cache []mgl32.Mat4
}
func NewCamera() *Camera {
return &Camera{scale: vector.NewVec2d(1, 1)}
}
func (camera *Camera) SetViewport(width, height int, yDown bool) {
camera.screenRect.MinX = -float32(width) / 2
camera.screenRect.MaxX = float32(width) / 2
if yDown {
camera.screenRect.MinY = float32(height) / 2
camera.screenRect.MaxY = -float32(height) / 2
} else {
camera.screenRect.MinY = -float32(height) / 2
camera.screenRect.MaxY = float32(height) / 2
}
if yDown {
camera.projection = mgl32.Ortho(camera.screenRect.MinX, camera.screenRect.MaxX, camera.screenRect.MinY, camera.screenRect.MaxY, 1, -1)
} else {
camera.projection = mgl32.Ortho(camera.screenRect.MinX, camera.screenRect.MaxX, camera.screenRect.MinY, camera.screenRect.MaxY, -1, 1)
}
camera.rebuildCache = true
camera.viewDirty = true
}
func (camera *Camera) SetOsuViewport(width, height int, scale float64, offset bool) {
baseScale := float64(height) / OsuHeight
if OsuWidth/OsuHeight > float64(width)/float64(height) {
baseScale = float64(width) / OsuWidth
}
scl := baseScale * 0.8 * scale
shift := settings.Playfield.ShiftY
if offset {
shift = 8
}
camera.SetViewport(width, height, true)
camera.SetOrigin(vector.NewVec2d(OsuWidth/2, OsuHeight/2))
camera.SetPosition(vector.NewVec2d(settings.Playfield.ShiftX, shift).Scl(scl))
camera.SetScale(vector.NewVec2d(scl, scl))
camera.Update()
camera.rebuildCache = true
camera.viewDirty = true
}
func (camera *Camera) SetViewportF(x, y, width, height int) {
camera.screenRect.MinX = float32(x)
camera.screenRect.MaxX = float32(width)
camera.screenRect.MinY = float32(y)
camera.screenRect.MaxY = float32(height)
camera.projection = mgl32.Ortho(camera.screenRect.MinX, camera.screenRect.MaxX, camera.screenRect.MinY, camera.screenRect.MaxY, 1, -1)
camera.rebuildCache = true
camera.viewDirty = true
}
func (camera *Camera) calculateView() {
camera.view = mgl32.Translate3D(camera.position.X32(), camera.position.Y32(), 0).Mul4(mgl32.HomogRotate3DZ(float32(camera.rotation))).Mul4(mgl32.Scale3D(camera.scale.X32(), camera.scale.Y32(), 1)).Mul4(mgl32.Translate3D(camera.origin.X32(), camera.origin.Y32(), 0))
}
func (camera *Camera) SetPosition(pos vector.Vector2d) {
camera.position = pos
camera.viewDirty = true
}
func (camera *Camera) SetOrigin(pos vector.Vector2d) {
camera.origin = pos.Scl(-1)
camera.viewDirty = true
}
func (camera *Camera) SetScale(scale vector.Vector2d) {
camera.scale = scale
camera.viewDirty = true
}
func (camera *Camera) SetRotation(rad float64) {
camera.rotation = rad
camera.viewDirty = true
}
func (camera *Camera) Rotate(rad float64) {
camera.rotation += rad
camera.viewDirty = true
}
func (camera *Camera) Translate(pos vector.Vector2d) {
camera.position = camera.position.Add(pos)
camera.viewDirty = true
}
func (camera *Camera) Scale(scale vector.Vector2d) {
camera.scale = camera.scale.Mult(scale)
camera.viewDirty = true
}
func (camera *Camera) Update() {
if camera.viewDirty {
camera.calculateView()
camera.projectionView = camera.projection.Mul4(camera.view)
camera.invProjectionView = camera.projectionView.Inv()
camera.rebuildCache = true
camera.viewDirty = false
}
}
func (camera *Camera) GenRotated(rotations int, rotOffset float64) []mgl32.Mat4 {
if len(camera.cache) != rotations || camera.rebuildCache {
if len(camera.cache) != rotations {
camera.cache = make([]mgl32.Mat4, rotations)
}
pos := mgl32.Translate3D(camera.position.X32(), camera.position.Y32(), 0)
view := mgl32.HomogRotate3DZ(float32(camera.rotation)).Mul4(mgl32.Scale3D(camera.scale.X32(), camera.scale.Y32(), 1)).Mul4(mgl32.Translate3D(camera.origin.X32(), camera.origin.Y32(), 0))
for i := 0; i < rotations; i++ {
camera.cache[i] = camera.projection.Mul4(pos).Mul4(mgl32.HomogRotate3DZ(float32(i) * float32(rotOffset))).Mul4(view)
}
camera.rebuildCache = false
}
return camera.cache
}
func (camera Camera) GetProjectionView() mgl32.Mat4 {
return camera.projectionView
}
func (camera Camera) Unproject(screenPos vector.Vector2d) vector.Vector2d {
res := camera.invProjectionView.Mul4x1(mgl32.Vec4{(screenPos.X32() + camera.screenRect.MinX) / camera.screenRect.MaxX, -(screenPos.Y32() + camera.screenRect.MaxY) / camera.screenRect.MinY, 0.0, 1.0})
return vector.NewVec2d(float64(res[0]), float64(res[1]))
}
func (camera Camera) Project(worldPos vector.Vector2d) vector.Vector2d {
res := camera.projectionView.Mul4x1(mgl32.Vec4{worldPos.X32(), worldPos.Y32(), 0.0, 1.0}).Add(mgl32.Vec4{1, 1, 0, 0}).Mul(0.5)
//midX := camera.screenRect.MaxX-camera.screenRect.MinX
return vector.NewVec2f(camera.screenRect.MinX+res[0]*(camera.screenRect.MaxX-camera.screenRect.MinX), camera.screenRect.MinY+res[1]*(camera.screenRect.MaxY-camera.screenRect.MinY)).Copy64()
}
func (camera Camera) GetWorldRect() Rectangle {
res := camera.invProjectionView.Mul4x1(mgl32.Vec4{-1.0, 1.0, 0.0, 1.0})
var rectangle Rectangle
rectangle.MinX = res[0]
rectangle.MinY = res[1]
res = camera.invProjectionView.Mul4x1(mgl32.Vec4{1.0, -1.0, 0.0, 1.0})
rectangle.MaxX = res[0]
rectangle.MaxY = res[1]
if rectangle.MinY > rectangle.MaxY {
a := rectangle.MinY
rectangle.MinY, rectangle.MaxY = rectangle.MaxY, a
}
return rectangle
} | app/bmath/camera/camera.go | 0.793186 | 0.53777 | camera.go | starcoder |
package expression
import (
"fmt"
"regexp"
errors "gopkg.in/src-d/go-errors.v1"
"gopkg.in/src-d/go-mysql-server.v0/sql"
)
// Comparer implements a comparison expression.
type Comparer interface {
sql.Expression
Compare(ctx *sql.Context, row sql.Row) (int, error)
Left() sql.Expression
Right() sql.Expression
}
// ErrNilOperand ir returned if some or both of the comparison's operands is nil.
var ErrNilOperand = errors.NewKind("nil operand found in comparison")
type comparison struct {
BinaryExpression
compareType sql.Type
}
func newComparison(left, right sql.Expression) comparison {
return comparison{BinaryExpression{left, right}, nil}
}
// Compare the two given values using the types of the expressions in the comparison.
// Since both types should be equal, it does not matter which type is used, but for
// reference, the left type is always used.
func (c *comparison) Compare(ctx *sql.Context, row sql.Row) (int, error) {
left, right, err := c.evalLeftAndRight(ctx, row)
if err != nil {
return 0, err
}
if left == nil || right == nil {
return 0, ErrNilOperand.New()
}
if c.Left().Type() == c.Right().Type() {
return c.Left().Type().Compare(left, right)
}
left, right, err = c.castLeftAndRight(left, right)
if err != nil {
return 0, err
}
return c.compareType.Compare(left, right)
}
func (c *comparison) evalLeftAndRight(ctx *sql.Context, row sql.Row) (interface{}, interface{}, error) {
left, err := c.Left().Eval(ctx, row)
if err != nil {
return nil, nil, err
}
right, err := c.Right().Eval(ctx, row)
if err != nil {
return nil, nil, err
}
return left, right, nil
}
func (c *comparison) castLeftAndRight(left, right interface{}) (interface{}, interface{}, error) {
if sql.IsNumber(c.Left().Type()) || sql.IsNumber(c.Right().Type()) {
if sql.IsDecimal(c.Left().Type()) || sql.IsDecimal(c.Right().Type()) {
left, right, err := convertLeftAndRight(left, right, ConvertToDecimal)
if err != nil {
return nil, nil, err
}
c.compareType = sql.Float64
return left, right, nil
}
if sql.IsSigned(c.Left().Type()) || sql.IsSigned(c.Right().Type()) {
left, right, err := convertLeftAndRight(left, right, ConvertToSigned)
if err != nil {
return nil, nil, err
}
c.compareType = sql.Int64
return left, right, nil
}
left, right, err := convertLeftAndRight(left, right, ConvertToUnsigned)
if err != nil {
return nil, nil, err
}
c.compareType = sql.Uint64
return left, right, nil
}
left, right, err := convertLeftAndRight(left, right, ConvertToChar)
if err != nil {
return nil, nil, err
}
c.compareType = sql.Text
return left, right, nil
}
func convertLeftAndRight(left, right interface{}, convertTo string) (interface{}, interface{}, error) {
l, err := convertValue(left, convertTo)
if err != nil {
return nil, nil, err
}
r, err := convertValue(right, convertTo)
if err != nil {
return nil, nil, err
}
return l, r, nil
}
// Type implements the Expression interface.
func (*comparison) Type() sql.Type {
return sql.Boolean
}
// Left implements Comparer interface
func (c *comparison) Left() sql.Expression { return c.BinaryExpression.Left }
// Right implements Comparer interface
func (c *comparison) Right() sql.Expression { return c.BinaryExpression.Right }
// Equals is a comparison that checks an expression is equal to another.
type Equals struct {
comparison
}
// NewEquals returns a new Equals expression.
func NewEquals(left sql.Expression, right sql.Expression) *Equals {
return &Equals{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (e *Equals) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.Equals")
defer span.Finish()
result, err := e.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result == 0, nil
}
// TransformUp implements the Expression interface.
func (e *Equals) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := e.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := e.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewEquals(left, right))
}
func (e *Equals) String() string {
return fmt.Sprintf("%s = %s", e.Left(), e.Right())
}
// Regexp is a comparison that checks an expression matches a regexp.
type Regexp struct {
comparison
}
// NewRegexp creates a new Regexp expression.
func NewRegexp(left sql.Expression, right sql.Expression) *Regexp {
return &Regexp{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (re *Regexp) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.Regexp")
defer span.Finish()
if sql.IsText(re.Left().Type()) && sql.IsText(re.Right().Type()) {
return re.compareRegexp(ctx, row)
}
result, err := re.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result == 0, nil
}
func (re *Regexp) compareRegexp(ctx *sql.Context, row sql.Row) (interface{}, error) {
left, right, err := re.evalLeftAndRight(ctx, row)
if err != nil {
return nil, err
}
if left == nil || right == nil {
return nil, nil
}
left, err = sql.Text.Convert(left)
if err != nil {
return nil, err
}
right, err = sql.Text.Convert(right)
if err != nil {
return nil, err
}
reg, err := regexp.Compile(right.(string))
if err != nil {
return false, err
}
return reg.MatchString(left.(string)), nil
}
// TransformUp implements the Expression interface.
func (re *Regexp) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := re.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := re.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewRegexp(left, right))
}
func (re *Regexp) String() string {
return fmt.Sprintf("%s REGEXP %s", re.Left(), re.Right())
}
// GreaterThan is a comparison that checks an expression is greater than another.
type GreaterThan struct {
comparison
}
// NewGreaterThan creates a new GreaterThan expression.
func NewGreaterThan(left sql.Expression, right sql.Expression) *GreaterThan {
return &GreaterThan{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (gt *GreaterThan) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.GreaterThan")
defer span.Finish()
result, err := gt.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result == 1, nil
}
// TransformUp implements the Expression interface.
func (gt *GreaterThan) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := gt.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := gt.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewGreaterThan(left, right))
}
func (gt *GreaterThan) String() string {
return fmt.Sprintf("%s > %s", gt.Left(), gt.Right())
}
// LessThan is a comparison that checks an expression is less than another.
type LessThan struct {
comparison
}
// NewLessThan creates a new LessThan expression.
func NewLessThan(left sql.Expression, right sql.Expression) *LessThan {
return &LessThan{newComparison(left, right)}
}
// Eval implements the expression interface.
func (lt *LessThan) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.LessThan")
defer span.Finish()
result, err := lt.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result == -1, nil
}
// TransformUp implements the Expression interface.
func (lt *LessThan) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := lt.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := lt.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewLessThan(left, right))
}
func (lt *LessThan) String() string {
return fmt.Sprintf("%s < %s", lt.Left(), lt.Right())
}
// GreaterThanOrEqual is a comparison that checks an expression is greater or equal to
// another.
type GreaterThanOrEqual struct {
comparison
}
// NewGreaterThanOrEqual creates a new GreaterThanOrEqual
func NewGreaterThanOrEqual(left sql.Expression, right sql.Expression) *GreaterThanOrEqual {
return &GreaterThanOrEqual{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (gte *GreaterThanOrEqual) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.GreaterThanOrEqual")
defer span.Finish()
result, err := gte.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result > -1, nil
}
// TransformUp implements the Expression interface.
func (gte *GreaterThanOrEqual) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := gte.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := gte.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewGreaterThanOrEqual(left, right))
}
func (gte *GreaterThanOrEqual) String() string {
return fmt.Sprintf("%s >= %s", gte.Left(), gte.Right())
}
// LessThanOrEqual is a comparison that checks an expression is equal or lower than
// another.
type LessThanOrEqual struct {
comparison
}
// NewLessThanOrEqual creates a LessThanOrEqual expression.
func NewLessThanOrEqual(left sql.Expression, right sql.Expression) *LessThanOrEqual {
return &LessThanOrEqual{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (lte *LessThanOrEqual) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.LessThanOrEqual")
defer span.Finish()
result, err := lte.Compare(ctx, row)
if err != nil {
if ErrNilOperand.Is(err) {
return nil, nil
}
return nil, err
}
return result < 1, nil
}
// TransformUp implements the Expression interface.
func (lte *LessThanOrEqual) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := lte.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := lte.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewLessThanOrEqual(left, right))
}
func (lte *LessThanOrEqual) String() string {
return fmt.Sprintf("%s <= %s", lte.Left(), lte.Right())
}
var (
// ErrUnsupportedInOperand is returned when there is an invalid righthand
// operand in an IN operator.
ErrUnsupportedInOperand = errors.NewKind("right operand in IN operation must be tuple, but is %T")
// ErrInvalidOperandColumns is returned when the columns in the left operand
// and the elements of the right operand don't match.
ErrInvalidOperandColumns = errors.NewKind("operand should have %d columns, but has %d")
)
// In is a comparison that checks an expression is inside a list of expressions.
type In struct {
comparison
}
// NewIn creates a In expression.
func NewIn(left sql.Expression, right sql.Expression) *In {
return &In{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (in *In) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.In")
defer span.Finish()
typ := in.Left().Type()
leftElems := sql.NumColumns(typ)
left, err := in.Left().Eval(ctx, row)
if err != nil {
return nil, err
}
if left == nil {
return nil, err
}
left, err = typ.Convert(left)
if err != nil {
return nil, err
}
// TODO: support subqueries
switch right := in.Right().(type) {
case Tuple:
for _, el := range right {
if sql.NumColumns(el.Type()) != leftElems {
return nil, ErrInvalidOperandColumns.New(leftElems, sql.NumColumns(el.Type()))
}
}
for _, el := range right {
right, err := el.Eval(ctx, row)
if err != nil {
return nil, err
}
right, err = typ.Convert(right)
if err != nil {
return nil, err
}
cmp, err := typ.Compare(left, right)
if err != nil {
return nil, err
}
if cmp == 0 {
return true, nil
}
}
return false, nil
default:
return nil, ErrUnsupportedInOperand.New(right)
}
}
// TransformUp implements the Expression interface.
func (in *In) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := in.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := in.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewIn(left, right))
}
func (in *In) String() string {
return fmt.Sprintf("%s IN %s", in.Left(), in.Right())
}
// Children implements the Expression interface.
func (in *In) Children() []sql.Expression {
return []sql.Expression{in.Left(), in.Right()}
}
// NotIn is a comparison that checks an expression is not inside a list of expressions.
type NotIn struct {
comparison
}
// NewNotIn creates a In expression.
func NewNotIn(left sql.Expression, right sql.Expression) *NotIn {
return &NotIn{newComparison(left, right)}
}
// Eval implements the Expression interface.
func (in *NotIn) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
span, ctx := ctx.Span("expression.NotIn")
defer span.Finish()
typ := in.Left().Type()
leftElems := sql.NumColumns(typ)
left, err := in.Left().Eval(ctx, row)
if err != nil {
return nil, err
}
if left == nil {
return nil, err
}
left, err = typ.Convert(left)
if err != nil {
return nil, err
}
// TODO: support subqueries
switch right := in.Right().(type) {
case Tuple:
for _, el := range right {
if sql.NumColumns(el.Type()) != leftElems {
return nil, ErrInvalidOperandColumns.New(leftElems, sql.NumColumns(el.Type()))
}
}
for _, el := range right {
right, err := el.Eval(ctx, row)
if err != nil {
return nil, err
}
right, err = typ.Convert(right)
if err != nil {
return nil, err
}
cmp, err := typ.Compare(left, right)
if err != nil {
return nil, err
}
if cmp == 0 {
return false, nil
}
}
return true, nil
default:
return nil, ErrUnsupportedInOperand.New(right)
}
}
// TransformUp implements the Expression interface.
func (in *NotIn) TransformUp(f sql.TransformExprFunc) (sql.Expression, error) {
left, err := in.Left().TransformUp(f)
if err != nil {
return nil, err
}
right, err := in.Right().TransformUp(f)
if err != nil {
return nil, err
}
return f(NewNotIn(left, right))
}
func (in *NotIn) String() string {
return fmt.Sprintf("%s NOT IN %s", in.Left(), in.Right())
}
// Children implements the Expression interface.
func (in *NotIn) Children() []sql.Expression {
return []sql.Expression{in.Left(), in.Right()}
} | vendor/gopkg.in/src-d/go-mysql-server.v0/sql/expression/comparison.go | 0.825906 | 0.468851 | comparison.go | starcoder |
package common
import "math"
type Matrix3 struct {
elements [9]float32 // COLUMN-MAJOR (just like WebGL)
}
func NewMatrix3() *Matrix3 {
mtx := Matrix3{elements: [9]float32{1, 0, 0, 0, 1, 0, 0, 0, 1}} // identity matrix
return &mtx
}
func (self *Matrix3) GetElements() *[9]float32 {
return &self.elements // reference
}
// ----------------------------------------------------------------------------
// Setting element values
// ----------------------------------------------------------------------------
func (self *Matrix3) Set(v00 float32, v01 float32, v02 float32, v10 float32, v11 float32, v12 float32, v20 float32, v21 float32, v22 float32) *Matrix3 {
self.elements = [9]float32{ // COLUMN-MAJOR (just like WebGL)
v00, v10, v20,
v01, v11, v21,
v02, v12, v22}
return self
}
func (self *Matrix3) SetIdentity() *Matrix3 {
self.Set(1, 0, 0, 0, 1, 0, 0, 0, 1)
return self
}
func (self *Matrix3) SetCopy(m *Matrix3) *Matrix3 {
self.elements = m.elements // copy values
return self
}
func (self *Matrix3) SetTranspose() *Matrix3 {
e := &self.elements // reference
e[1], e[3] = e[3], e[1] // [0], [1], [2]
e[2], e[6] = e[6], e[2] // [3], [4], [5]
e[5], e[7] = e[7], e[5] // [6], [7], [8]
return self
}
func (self *Matrix3) SetTranslation(tx float32, ty float32) *Matrix3 {
self.Set(
1.0, 0.0, tx,
0.0, 1.0, ty,
0.0, 0.0, 1.0)
return self
}
func (self *Matrix3) SetScaling(sx float32, sy float32) *Matrix3 {
self.Set(
sx, 0.0, 0,
0.0, sy, 0,
0.0, 0.0, 1.0)
return self
}
func (self *Matrix3) SetRotation(angle_in_degree float32) *Matrix3 {
// Based on http://www.gamedev.net/reference/articles/article1199.asp
cos := float32(math.Cos(float64(angle_in_degree) * (math.Pi / 180.0)))
sin := float32(math.Sin(float64(angle_in_degree) * (math.Pi / 180.0)))
self.Set(
cos, -sin, 0.0,
+sin, cos, 0.0,
0.0, 0.0, 1.0)
return self
}
func (self *Matrix3) SetMultiplyMatrices(matrices ...*Matrix3) *Matrix3 {
if len(matrices) > 0 {
m := matrices[0] // multiply all the matrices first,
for i := 1; i < len(matrices); i++ {
m = m.MultiplyToTheRight(matrices[i])
}
self.SetCopy(m) // and then copy (overwriting old values)
}
return self
}
// ----------------------------------------------------------------------------
// Creating new matrix
// ----------------------------------------------------------------------------
func (self *Matrix3) Copy() *Matrix3 {
return &Matrix3{elements: self.elements}
}
func (self *Matrix3) Transpose() *Matrix3 {
o := &self.elements // reference
return &Matrix3{elements: [9]float32{o[0], o[1], o[2], o[3], o[4], o[5], o[6], o[7], o[8]}}
}
func (self *Matrix3) MultiplyToTheLeft(matrix *Matrix3) *Matrix3 {
o := &self.elements // reference
m := &matrix.elements // reference
return &Matrix3{elements: [9]float32{
o[0]*m[0] + o[1]*m[3] + o[2]*m[6],
o[0]*m[1] + o[1]*m[4] + o[2]*m[7],
o[0]*m[2] + o[1]*m[5] + o[2]*m[8],
o[3]*m[0] + o[4]*m[3] + o[5]*m[6],
o[3]*m[1] + o[4]*m[4] + o[5]*m[7],
o[3]*m[2] + o[4]*m[5] + o[5]*m[8],
o[6]*m[0] + o[7]*m[3] + o[8]*m[6],
o[6]*m[1] + o[7]*m[4] + o[8]*m[7],
o[6]*m[2] + o[7]*m[5] + o[8]*m[8]}}
}
func (self *Matrix3) MultiplyToTheRight(matrix *Matrix3) *Matrix3 {
o := &self.elements // reference
m := &matrix.elements // reference
return &Matrix3{elements: [9]float32{
m[0]*o[0] + m[1]*o[3] + m[2]*o[6],
m[0]*o[1] + m[1]*o[4] + m[2]*o[7],
m[0]*o[2] + m[1]*o[5] + m[2]*o[8],
m[3]*o[0] + m[4]*o[3] + m[5]*o[6],
m[3]*o[1] + m[4]*o[4] + m[5]*o[7],
m[3]*o[2] + m[4]*o[5] + m[5]*o[8],
m[6]*o[0] + m[7]*o[3] + m[8]*o[6],
m[6]*o[1] + m[7]*o[4] + m[8]*o[7],
m[6]*o[2] + m[7]*o[5] + m[8]*o[8]}}
}
// ----------------------------------------------------------------------------
// Handling Vector
// ----------------------------------------------------------------------------
func (self *Matrix3) MultiplyVector2(v [2]float32) [2]float32 {
e := &self.elements // reference
return [2]float32{
e[0]*v[0] + e[3]*v[1] + e[6], // COLUMN-MAJOR
e[1]*v[0] + e[4]*v[1] + e[7]}
} | common/matrix3.go | 0.739328 | 0.582283 | matrix3.go | starcoder |
package ns
/**
* Configuration for encryption key resource.
*/
type Nsencryptionkey struct {
/**
* Key name. This follows the same syntax rules as other expression entity names:
It must begin with an alpha character (A-Z or a-z) or an underscore (_).
The rest of the characters must be alpha, numeric (0-9) or underscores.
It cannot be re or xp (reserved for regular and XPath expressions).
It cannot be an expression reserved word (e.g. SYS or HTTP).
It cannot be used for an existing expression object (HTTP callout, patset, dataset, stringmap, or named expression).
*/
Name string `json:"name,omitempty"`
/**
* Cipher method to be used to encrypt and decrypt content.
NONE - no encryption or decryption is performed The output of ENCRYPT() and DECRYPT() is the same as the input.
RC4 - the RC4 stream cipher with a 128 bit (16 byte) key; RC4 is now considered insecure and should only be used if required by existing applciations.
DES[-<mode>] - the Data Encryption Standard (DES) block cipher with a 64-bit (8 byte) key, with 56 data bits and 8 parity bits. DES is considered less secure than DES3 or AES so it should only be used if required by an existing applicastion. The optional mode is described below; DES without a mode is equivalent to DES-CBC.
DES3[-<mode>] - the Triple Data Encryption Standard (DES) block cipher with a 192-bit (24 byte) key. The optional mode is described below; DES3 without a mode is equivalent to DES3-CBC.
AES<keysize>[-<mode>] - the Advanced Encryption Standard block cipher, available with 128 bit (16 byte), 192 bit (24 byte), and 256 bit (32 byte) keys. The optional mode is described below; AES<keysize> without a mode is equivalent to AES<keysize>-CBC.
For a block cipher, the <mode> specifies how multiple blocks of plaintext are encrypted and how the Initialization Vector (IV) is used. Choices are
CBC (Cipher Block Chaining) - Each block of plaintext is XORed with the previous ciphertext block, or IV for the first block, before being encrypted. Padding is required if the plaintext is not a multiple of the cipher block size.
CFB (Cipher Feedback) - The previous ciphertext block, or the IV for the first block, is encrypted and the output is XORed with the current plaintext block to create the current ciphertext block. The 128-bit version of CFB is provided. Padding is not required.
OFB (Output Feedback) - A keystream is generated by applying the cipher successfully to the IV and XORing the keystream blocks with the plaintext. Padding is not required.
ECB (Electronic Codebook) - Each block of plaintext is independently encrypted. An IV is not used. Padding is required. This mode is considered less secure than the other modes because the same plaintext always produces the same encrypted text and should only be used if required by an existing application.
*/
Method string `json:"method,omitempty"`
/**
* The hex-encoded key value. The length is determined by the cipher method:
RC4 - 16 bytes
DES - 8 bytes (all modes)
DES3 - 24 bytes (all modes)
AES128 - 16 bytes (all modes)
AES192 - 24 bytes (all modes)
AES256 - 32 bytes (all modes)
Note that the keyValue will be encrypted when it it is saved.
There is a special key value AUTO which generates a new random key for the specified method. This kind of key is
intended for use cases where the NetScaler both encrypts and decrypts the same data, such an HTTP header.
*/
Keyvalue string `json:"keyvalue,omitempty"`
/**
* Enables or disables the padding of plaintext to meet the block size requirements of block ciphers:
ON - For encryption, PKCS5/7 padding is used, which appends n bytes of value n on the end of the plaintext to bring it to the cipher block lnegth. If the plaintext length is alraady a multiple of the block length, an additional block with bytes of value block_length will be added. For decryption, ISO 10126 padding is accepted, which expects the last byte of the block to be the number of added pad bytes. Note that this accepts PKCS5/7 padding, as well as ANSI_X923 padding. Padding ON is the default for the ECB and CBD modes.
OFF - No padding. An Undef error will occur with the ECB or CBC modes if the plaintext length is not a multitple of the cipher block size. This can be used with the CFB and OFB modes, and with the ECB and CBC modes if the plaintext will always be an integral number of blocks, or if custom padding is implemented using a policy extension function. Padding OFf is the default for CFB and OFB modes.
*/
Padding string `json:"padding,omitempty"`
/**
* The initalization voector (IV) for a block cipher, one block of data used to initialize the encryption. The best practice is to not specify an IV, in which case a new random IV will be generated for each encryption. The format must be iv_data or keyid_iv_data to include the generated IV in the encrypted data. The IV should only be specified if it cannot be included in the encrypted data. The IV length is the cipher block size:
RC4 - not used (error if IV is specified)
DES - 8 bytes (all modes)
DES3 - 8 bytes (all modes)
AES128 - 16 bytes (all modes)
AES192 - 16 bytes (all modes)
AES256 - 16 bytes (all modes)
*/
Iv string `json:"iv,omitempty"`
/**
* Comments associated with this encryption key.
*/
Comment string `json:"comment,omitempty"`
} | resource/config/ns/nsencryptionkey.go | 0.701406 | 0.609146 | nsencryptionkey.go | starcoder |
package main
import (
"fmt"
"io/ioutil"
"regexp"
)
var digitRegexp = regexp.MustCompile("[0-9]+")
// FindDigits find digits in file in a consistent way
func FindDigits(filename string) []byte {
b, _ := ioutil.ReadFile(filename)
b = digitRegexp.Find(b)
c := make([]byte, len(b))
c = append(c, b...)
return digitRegexp.Find(b)
}
// Slices describe slices type for golang
func Slices() {
// Unlike arrays, slices are typed only by the elements they contain (not the number of elements).
// To create an empty slice with non-zero length, use the builtin make.
// Here we make a slice of strings of length 3 (initially zero-valued).
s := make([]string, 3)
fmt.Println("emp:", s)
// We can set and get just like with arrays.
s[0] = "a"
s[1] = "b"
s[2] = "c"
fmt.Println("set: ", s)
fmt.Println("get: ", s[2])
// len returns the length of the slice as expected.
fmt.Println("len: ", len(s))
// In addition to these basic operations, slices support several more that make them richer than arrays.
// One is the builtin append, which returns a slice containing one or more new values.
// Note that we need to accept a return value from append as we may get a new slice value.
s = append(s, "d")
s = append(s, "e", "f")
fmt.Println("apd: ", s)
// Slices can also be copy’d.
// Here we create an empty slice c of the same length as s and copy into c from s.
c := make([]string, len(s))
copy(c, s)
fmt.Println("cpy: ", c)
// Slices support a “slice” operator with the syntax slice[low:high].
// For example, this gets a slice of the elements s[2], s[3], and s[4].
l := s[2:5]
fmt.Println("sl1: ", l)
// This slices up to (but excluding) s[5].
l = s[:5]
fmt.Println("sl2: ", l)
// And this slices up from (and including) s[2].
l = s[2:]
fmt.Println("sl3: ", l)
// We can declare and initialize a variable for slice in a single line as well.
t := []string{"g", "h", "i"}
fmt.Println("dcl: ", t)
// Slices can be composed into multi-dimensional data structures.
// The length of the inner slices can vary, unlike with multi-dimensional arrays.
twoD := make([][]int, 3)
for i := 0; i < 3; i++ {
innerLen := i + 1
twoD[i] = make([]int, innerLen)
for j := 0; j < innerLen; j++ {
twoD[i][j] = i + j
}
}
fmt.Println("2d: ", twoD)
// To append one slice to another, use ... to expand the second argument to a list of arguments.
a := []string{"Ismail", "BahaEddine"}
b := []string{"Pogba", "Mane"}
a = append(a, b...) // equivalent to "append(a, b[0], b[1], b[2])"
fmt.Println("append 2 slices: ", a)
// Some gotcha
// http://blog.golang.org/2011/01/go-slices-usage-and-internals.html
fmt.Println("found digits: ", FindDigits("./file.txt"))
} | slices.go | 0.53437 | 0.406597 | slices.go | starcoder |
package slice
import (
"reflect"
"sort"
"time"
"github.com/jgbaldwinbrown/go-gg/generic"
)
// CanSort returns whether the value v can be sorted.
func CanSort(v interface{}) bool {
switch v.(type) {
case sort.Interface, []time.Time:
return true
}
return generic.CanOrderR(reflect.TypeOf(v).Elem().Kind())
}
// Sort sorts v in increasing order. v must implement sort.Interface,
// be a slice whose elements are orderable, or be a []time.Time.
func Sort(v interface{}) {
sort.Sort(Sorter(v))
}
// Sorter returns a sort.Interface for sorting v. v must implement
// sort.Interface, be a slice whose elements are orderable, or be a
// []time.Time.
func Sorter(v interface{}) sort.Interface {
switch v := v.(type) {
case []int:
return sort.IntSlice(v)
case []float64:
return sort.Float64Slice(v)
case []string:
return sort.StringSlice(v)
case []time.Time:
return sortTimeSlice(v)
case sort.Interface:
return v
}
rv := reflectSlice(v)
switch rv.Type().Elem().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return sortIntSlice{rv}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return sortUintSlice{rv}
case reflect.Float32, reflect.Float64:
return sortFloatSlice{rv}
case reflect.String:
return sortStringSlice{rv}
}
panic(&generic.TypeError{rv.Type().Elem(), nil, "is not orderable"})
}
type sortIntSlice struct {
reflect.Value
}
func (s sortIntSlice) Len() int {
return s.Value.Len()
}
func (s sortIntSlice) Less(i, j int) bool {
return s.Index(i).Int() < s.Index(j).Int()
}
func (s sortIntSlice) Swap(i, j int) {
a, b := s.Index(i).Int(), s.Index(j).Int()
s.Index(i).SetInt(b)
s.Index(j).SetInt(a)
}
type sortUintSlice struct {
reflect.Value
}
func (s sortUintSlice) Len() int {
return s.Value.Len()
}
func (s sortUintSlice) Less(i, j int) bool {
return s.Index(i).Uint() < s.Index(j).Uint()
}
func (s sortUintSlice) Swap(i, j int) {
a, b := s.Index(i).Uint(), s.Index(j).Uint()
s.Index(i).SetUint(b)
s.Index(j).SetUint(a)
}
type sortFloatSlice struct {
reflect.Value
}
func (s sortFloatSlice) Len() int {
return s.Value.Len()
}
func (s sortFloatSlice) Less(i, j int) bool {
return s.Index(i).Float() < s.Index(j).Float()
}
func (s sortFloatSlice) Swap(i, j int) {
a, b := s.Index(i).Float(), s.Index(j).Float()
s.Index(i).SetFloat(b)
s.Index(j).SetFloat(a)
}
type sortStringSlice struct {
reflect.Value
}
func (s sortStringSlice) Len() int {
return s.Value.Len()
}
func (s sortStringSlice) Less(i, j int) bool {
return s.Index(i).String() < s.Index(j).String()
}
func (s sortStringSlice) Swap(i, j int) {
a, b := s.Index(i).String(), s.Index(j).String()
s.Index(i).SetString(b)
s.Index(j).SetString(a)
}
type sortTimeSlice []time.Time
func (s sortTimeSlice) Len() int { return len(s) }
func (s sortTimeSlice) Less(i, j int) bool { return s[i].Before(s[j]) }
func (s sortTimeSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } | generic/slice/sort.go | 0.832032 | 0.490846 | sort.go | starcoder |
package questions
import (
"fmt"
"strings"
track "github.com/OscarZhou/gotrack"
)
type ValidNumber struct {
Question
Track *track.Track
}
func (e *ValidNumber) Init() {
e.No = 65
e.Title = "Valid Number"
e.FullTitle = "Valid Number"
e.URL = "https://leetcode.com/problems/jewels-and-stones"
e.Level = LevelHard
e.FuncName = "validNumber"
NumberMap[e.No] = e
TitleMap[e.Title] = e
}
func (e ValidNumber) Print() {
fmt.Println(`
func isNumber(s string) bool {
state := 0
s = strings.TrimSpace(s)
chars := []rune(s)
i := 0
for i < len(chars) {
switch chars[i] {
case '+', '-':
if state == 0 {
state = 1
i++
} else if state == 5 {
state = 7
i++
} else {
return false
}
case 'e':
if state == 2 || state == 3 || state == 4 || state == 9 {
state = 5
i++
} else {
return false
}
case '.':
if state == 0 || state == 1 {
state = 8
i++
} else if state == 2 {
state = 3
i++
} else {
return false
}
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
switch state {
case 0, 1, 2:
state = 2
i++
case 3, 4:
state = 4
i++
case 5, 6, 7:
state = 6
i++
case 8, 9:
state = 9
i++
default:
return false
}
default:
return false
}
}
if state == 2 || state == 3 || state == 4 || state == 6 || state == 9 {
return true
}
return false
}
`)
}
func (e ValidNumber) PrintTitle() {
fmt.Printf("%d, %s\n", e.No, e.Title)
}
func (e ValidNumber) PrintDetail() {
fmt.Printf("%d, %s\n", e.No, e.Title)
}
func (e ValidNumber) Run() error {
e.Track.Start()
defer e.Track.End()
sArray := []string{
"0",
" 0.1",
"abc",
"1 a",
"2e10",
" -90e3",
" 1e",
"e3",
" 6e-1",
" 99e2.5 ",
"53.5e93",
" --6",
"-+3",
"95a54e53",
"1.",
".3",
"40.81",
"46.e3",
}
expectedArray := []bool{
true,
true,
false,
false,
true,
true,
false,
false,
true,
false,
true,
false,
false,
false,
true,
true,
true,
true,
}
for i, v := range sArray {
actual := validNumber(v)
if expectedArray[i] == actual {
fmt.Printf("index=%d, expected=%v, actual=%v\n", i, expectedArray[i], actual)
}
}
return nil
}
func validNumber(s string) bool {
return isNumber(s)
}
func isNumber(s string) bool {
state := 0
s = strings.TrimSpace(s)
chars := []rune(s)
i := 0
for i < len(chars) {
switch chars[i] {
case '+', '-':
if state == 0 {
state = 1
i++
} else if state == 5 {
state = 7
i++
} else {
return false
}
case 'e':
if state == 2 || state == 3 || state == 4 || state == 9 {
state = 5
i++
} else {
return false
}
case '.':
if state == 0 || state == 1 {
state = 8
i++
} else if state == 2 {
state = 3
i++
} else {
return false
}
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
switch state {
case 0, 1, 2:
state = 2
i++
case 3, 4:
state = 4
i++
case 5, 6, 7:
state = 6
i++
case 8, 9:
state = 9
i++
default:
return false
}
default:
return false
}
}
if state == 2 || state == 3 || state == 4 || state == 6 || state == 9 {
return true
}
return false
} | questions/valid_number.go | 0.569972 | 0.405596 | valid_number.go | starcoder |
package gtasa
// See https://gtasa-savegame-editor.github.io/docs/#/block16
type Block16 struct {
ProgressMade float32 `gta:"index:0"`
MaxProgress float32 `gta:"index:4"`
DistanceTravelledByFoot float32 `gta:"index:12"`
DistanceTravelledByCar float32 `gta:"index:16"`
DistanceTravelledByMotorbike float32 `gta:"index:20"`
DistanceTravelledByBoat float32 `gta:"index:24"`
DistanceTravelledByGolfCart float32 `gta:"index:28"`
DistanceTravelledByHelicopter float32 `gta:"index:32"`
DistanceTravelledByPlane float32 `gta:"index:36"`
LongestWheelieDistance float32 `gta:"index:40"`
LongestStoppieDistance float32 `gta:"index:44"`
LongestTwoWheelsDistance float32 `gta:"index:48"`
BudgetWeapons float32 `gta:"index:52"`
BudgetFashion float32 `gta:"index:56"`
BudgetProperty float32 `gta:"index:60"`
BudgetAutoRepair float32 `gta:"index:64"`
LongestWheelieTime float32 `gta:"index:68"`
LongestStoppieTime float32 `gta:"index:72"`
LongestTwoWheelsTime float32 `gta:"index:76"`
BudgetFood float32 `gta:"index:80"`
Fat float32 `gta:"index:84"`
Stamina float32 `gta:"index:88"`
Muscle float32 `gta:"index:92"`
MaxHealth float32 `gta:"index:100"`
// 104: unknown
DistanceTravelledBySwimming float32 `gta:"index:108"`
DistanceTravelledByBicycle float32 `gta:"index:112"`
DistanceTravelledByTreadmill float32 `gta:"index:116"`
DistanceTravelledByExerciseBike float32 `gta:"index:120"`
BudgetTattoo float32 `gta:"index:124"`
BudgetHairdressing float32 `gta:"index:128"`
// 132: unknown
BudgetProstitute float32 `gta:"index:136"`
// 140: unknown
GamblingExpenditure float32 `gta:"index:144"`
PimpRevenue float32 `gta:"index:148"`
GamblingRevenue float32 `gta:"index:152"`
GamblingLargestWin float32 `gta:"index:156"`
GamblingLargestLoss float32 `gta:"index:160"`
BurglaryLargestSwag float32 `gta:"index:164"`
BurglaryRevenue float32 `gta:"index:168"`
WastedByOthers int `gta:"index:328"`
Wasted int `gta:"index:332"`
LandVehiclesDestroyed int `gta:"index:336"`
SeaVehiclesDestroyed int `gta:"index:340"`
AirVehiclesDestroyed int `gta:"index:344"`
PropertyDamage int `gta:"index:348"`
BulletsFired int `gta:"index:352"`
// TODO: explosives used?
BulletsHit int `gta:"index:360"`
TiresPoppedByGunfire int `gta:"index:364"`
Headshots int `gta:"index:368"`
TotalWantedStars int `gta:"index:372"`
TotalWantedStarsEvaded int `gta:"index:376"`
Busted int `gta:"index:380"`
DaysPassed int `gta:"index:384"`
MissionsAttempted int `gta:"index:432"`
MissionsPassed int `gta:"index:436"`
TaxiRevenue int `gta:"index:444"`
TaxiPassengers int `gta:"index:448"`
ParamedicPeopleSaved int `gta:"index:452"`
VigilanteCriminalsKilled int `gta:"index:456"`
FirefighterFiresExtinguished int `gta:"index:460"`
VigilanteHighestLevel int `gta:"index:476"`
ParamedicHighestLevel int `gta:"index:480"`
FirefighterHighestLevel int `gta:"index:484"`
SkillDriving int `gta:"index:488"`
PimpGirls int `gta:"index:532"`
PimpHighestLevel int `gta:"index:688"`
SkillFlying int `gta:"index:740"`
SkillMotorbike int `gta:"index:764"`
SkillCycling int `gta:"index:768"`
}
// Gets the progress made, as a percentage value
func (b *Block16) GetProgress() float32 {
return b.ProgressMade / b.MaxProgress * 100
} | gtasa/block16.go | 0.712532 | 0.499207 | block16.go | starcoder |
package trace
import (
"math"
"github.com/peterstace/grayt/xmath"
)
type grid struct {
minBound xmath.Vector
maxBound xmath.Vector
stride xmath.Vector
data []*link
resolution xmath.Triple
}
func newGrid(lambda float64, objs []object) *grid {
minBound, maxBound := bounds(objs)
boundDiff := maxBound.Sub(minBound)
volume := boundDiff.X * boundDiff.Y * boundDiff.Z
resolutionFactor := math.Pow(lambda*float64(len(objs))/volume, 1.0/3.0)
resolution := xmath.Truncate(boundDiff.Scale(resolutionFactor)).Max(xmath.Triple{1, 1, 1})
stride := boundDiff.Div(resolution.AsVector())
data := make([]*link, resolution.X*resolution.Y*resolution.Z)
grid := &grid{
minBound,
maxBound,
stride,
data,
resolution,
}
grid.populate(objs)
return grid
}
func bounds(objs []object) (xmath.Vector, xmath.Vector) {
inf := math.Inf(+1)
minBound, maxBound := xmath.Vect(+inf, +inf, +inf), xmath.Vect(-inf, -inf, -inf)
for _, obj := range objs {
min, max := obj.Surface.bound()
minBound = minBound.Min(min)
maxBound = maxBound.Max(max)
}
return minBound, maxBound
}
func (g *grid) populate(objs []object) {
for _, obj := range objs {
min, max := obj.Surface.bound()
minCoord := xmath.Truncate(min.Sub(g.minBound).Div(g.stride)).Min(g.resolution.Sub(xmath.Triple{1, 1, 1}))
maxCoord := xmath.Truncate(max.Sub(g.minBound).Div(g.stride)).Min(g.resolution.Sub(xmath.Triple{1, 1, 1}))
var pos xmath.Triple
for pos.X = minCoord.X; pos.X <= maxCoord.X; pos.X++ {
for pos.Y = minCoord.Y; pos.Y <= maxCoord.Y; pos.Y++ {
for pos.Z = minCoord.Z; pos.Z <= maxCoord.Z; pos.Z++ {
idx := g.dataIndex(pos)
g.data[idx] = &link{g.data[idx], obj}
}
}
}
}
}
func (g *grid) closestHit(r xmath.Ray) (intersection, material, bool) {
var distance float64
if !g.insideBoundingBox(r.Start) {
var hit bool
distance, hit = g.hitBoundingBox(r)
if !hit {
return intersection{}, material{}, false
}
}
cellCoordsFloat := g.cellCoordsFloat(r.At(distance))
initialPos := g.cellCoordsInt(cellCoordsFloat)
delta := g.delta(r)
inc := g.inc(r)
initialNextHitDistance := g.next(cellCoordsFloat, r)
var pos = initialPos
for true {
nextHitDistance := pos.Sub(initialPos).AsVector().Abs().Mul(delta).Add(initialNextHitDistance)
if intersection, material, hit := g.findHitInCell(pos, nextHitDistance, r); hit {
return intersection, material, true
}
var exitGrid bool
pos, exitGrid = g.nextCell(nextHitDistance, initialPos, pos, inc)
if exitGrid {
break
}
}
return intersection{}, material{}, false
}
func (g *grid) insideBoundingBox(v xmath.Vector) bool {
return true &&
v.X >= g.minBound.X && v.X <= g.maxBound.X &&
v.Y >= g.minBound.Y && v.Y <= g.maxBound.Y &&
v.Z >= g.minBound.Z && v.Z <= g.maxBound.Z
}
func (g *grid) hitBoundingBox(r xmath.Ray) (float64, bool) {
tx1 := (g.minBound.X - r.Start.X) / r.Dir.X
tx2 := (g.maxBound.X - r.Start.X) / r.Dir.X
ty1 := (g.minBound.Y - r.Start.Y) / r.Dir.Y
ty2 := (g.maxBound.Y - r.Start.Y) / r.Dir.Y
tz1 := (g.minBound.Z - r.Start.Z) / r.Dir.Z
tz2 := (g.maxBound.Z - r.Start.Z) / r.Dir.Z
tmin, tmax := math.Inf(-1), math.Inf(+1)
tmin = math.Max(tmin, math.Min(tx1, tx2))
tmax = math.Min(tmax, math.Max(tx1, tx2))
tmin = math.Max(tmin, math.Min(ty1, ty2))
tmax = math.Min(tmax, math.Max(ty1, ty2))
tmin = math.Max(tmin, math.Min(tz1, tz2))
tmax = math.Min(tmax, math.Max(tz1, tz2))
return tmin, tmin <= tmax && tmin >= 0
}
func (g *grid) cellCoordsFloat(v xmath.Vector) xmath.Vector {
return v.
Sub(g.minBound).
Div(g.stride)
}
func (g *grid) cellCoordsInt(cellCoordsFloat xmath.Vector) xmath.Triple {
return xmath.Truncate(cellCoordsFloat).
Min(g.resolution.Sub(xmath.Triple{1, 1, 1})).
Max(xmath.Triple{})
}
func (g *grid) delta(r xmath.Ray) xmath.Vector {
return g.stride.
Div(r.Dir).
Abs()
}
func (g *grid) inc(r xmath.Ray) xmath.Triple {
return xmath.Truncate(
r.Dir.Sign(),
)
}
func (g *grid) next(cellCoordsFloat xmath.Vector, r xmath.Ray) xmath.Vector {
return g.cellCoordsInt(cellCoordsFloat).AsVector().
Add(r.Dir.
Sign().
Scale(0.5).
Add(xmath.Vect(0.5, 0.5, 0.5)),
).
Mul(g.stride).
Sub(r.Start.Sub(g.minBound)).
Div(r.Dir)
}
func (g *grid) nextCell(next xmath.Vector, initialPos, pos, inc xmath.Triple) (xmath.Triple, bool) {
var exitGrid bool
switch {
case next.X < math.Min(next.Y, next.Z):
pos.X += inc.X
exitGrid = pos.X < 0 && inc.X < 0 || pos.X >= g.resolution.X && inc.X > 0
case next.Y < next.Z:
pos.Y += inc.Y
exitGrid = pos.Y < 0 && inc.Y < 0 || pos.Y >= g.resolution.Y && inc.Y > 0
default:
pos.Z += inc.Z
exitGrid = pos.Z < 0 && inc.Z < 0 || pos.Z >= g.resolution.Z && inc.Z > 0
}
return pos, exitGrid
}
func (g *grid) dataIndex(pos xmath.Triple) int {
return pos.X + g.resolution.X*pos.Y + g.resolution.X*g.resolution.Y*pos.Z
}
func (g *grid) findHitInCell(pos xmath.Triple, next xmath.Vector, r xmath.Ray) (intersection, material, bool) {
var closest struct {
intersection intersection
material material
hit bool
}
for link := g.data[g.dataIndex(pos)]; link != nil; link = link.next {
intersection, hit := link.obj.Surface.intersect(r)
if !hit {
continue
}
nextCell := xmath.AddULPs(math.Min(next.X, math.Min(next.Y, next.Z)), ulpFudgeFactor)
if intersection.distance > nextCell {
continue
}
if !closest.hit || intersection.distance < closest.intersection.distance {
closest.intersection = intersection
closest.material = link.obj.Material
closest.hit = true
}
}
return closest.intersection, closest.material, closest.hit
}
type link struct {
next *link
obj object
} | trace/grid.go | 0.748904 | 0.559049 | grid.go | starcoder |
package types
type MetricDefinition struct {
// Name of the metric returning the timeseries.
Metric string `json:"metric,omitempty"`
// Metric dimensions / metadata related to each timeseries.
Dimensions map[string]string `json:"dimensions,omitempty"`
}
type MetricsQueryRequest struct {
// A list of metrics queries.
Queries []MetricsQueryRow `json:"queries"`
TimeRange *ResolvableTimeRange `json:"timeRange"`
}
type MetricsQueryResponse struct {
// A list of the time series returned by metric query.
QueryResult []TimeSeriesRow `json:"queryResult,omitempty"`
Errors *ErrorResponse `json:"errors"`
}
type MetricsQueryRow struct {
// Row id for the query row, A to Z letter.
RowId string `json:"rowId"`
// A metric query consists of a metric, one or more filters and optionally, one or more [Metrics Operators](https://help.sumologic.com/?cid=10144). Strictly speaking, both filters and operators are optional. Most of the [Metrics Operators](https://help.sumologic.com/?cid=10144) are allowed in the query string except `fillmissing`, `outlier`, `quantize` and `timeshift`. * `fillmissing`: Not supported in API. * `outlier`: Not supported in API. * `quantize`: Only supported through `quantization` param. * `timeshift`: Only supported through `timeshift` param. In practice, your metric queries will almost always contain filters that narrow the scope of your query. For more information about the query language see [Metrics Queries](http://help.sumologic.com/?cid=1079).
Query string `json:"query"`
// Segregates time series data by time period. This allows you to create aggregated results in buckets of fixed intervals (for example, 5-minute intervals). The value is in milliseconds.
Quantization int64 `json:"quantization,omitempty"`
// We use the term rollup to refer to the aggregation function Sumo Logic uses when quantizing metrics. Can be `Avg`, `Sum`, `Min`, `Max`, `Count` or `None`.
Rollup string `json:"rollup,omitempty"`
// Shifts the time series from your metrics query by the specified amount of time. This can help when comparing a time series across multiple time periods. Specified as a signed duration in milliseconds.
Timeshift int64 `json:"timeshift,omitempty"`
// Determines if the row should be returned in the response. Can be used in conjunction with a join, if only the result of the join is needed, and not the intermediate rows.
Transient bool `json:"transient,omitempty"`
}
type Points struct {
// Array of timestamps of datapoints in milliseconds.
Timestamps []int64 `json:"timestamps"`
// Array of values of datapoints corresponding to timestamp array.
Values []float64 `json:"values"`
}
type TimeSeries struct {
MetricDefinition *MetricDefinition `json:"metricDefinition"`
Points *Points `json:"points"`
}
type TimeSeriesList struct {
// A list of timeseries returned by corresponding query.
TimeSeries []TimeSeries `json:"timeSeries"`
// Unit of the query.
Unit string `json:"unit,omitempty"`
// Time shift value if specified in request in human readable format.
TimeShiftLabel string `json:"timeShiftLabel,omitempty"`
}
type TimeSeriesRow struct {
// Row id for the query row as specified in the request.
RowId string `json:"rowId"`
TimeSeriesList *TimeSeriesList `json:"timeSeriesList"`
} | service/cip/types/metrics_query.go | 0.910406 | 0.488588 | metrics_query.go | starcoder |
// Package fleetspeak provides functionality for network sensors to communicate with the Emitto
// service via Fleetspeak.
package fleetspeak
import (
"math/rand"
"time"
"github.com/golang/protobuf/ptypes"
"github.com/google/fleetspeak/fleetspeak/src/client/channel"
"github.com/google/fleetspeak/fleetspeak/src/client/service"
"github.com/google/fleetspeak/fleetspeak/src/client/socketservice/client"
log "github.com/golang/glog"
pb "github.com/google/emitto/source/sensor/proto"
fspb "github.com/google/fleetspeak/fleetspeak/src/common/proto/fleetspeak"
)
const (
// Service name for Fleetspeak messages.
serviceName = "Emitto"
// Maximum size of Messages channel used to receive Fleetspeak client messages.
maxMessages = 1
)
// Client contains functionality to send and receive messages to/from a Fleetspeak Client.
type Client struct {
// Channel used for sending messages to the Fleetspeak client.
fsChan *channel.RelentlessChannel
// Callback for Fleetspeak client send acknowledgements.
callbackChan chan string
// Messages is used to queue received Fleetspeak client messages for sensor client consumption.
messages chan *fspb.Message
}
// New initializes a Client.
func New(socket string) *Client {
rc := client.OpenChannel(socket, time.Now().Format(time.RFC1123Z))
return &Client{
fsChan: rc,
callbackChan: make(chan string, 5), // To prevent potential locking.
messages: make(chan *fspb.Message, maxMessages),
}
}
// SendMessage a message to the Fleetspeak client. This call blocks until Fleetspeak has
// acknowledged the message.
func (c *Client) SendMessage(m *pb.SensorMessage) (string, error) {
req, err := c.createRequest(m)
if err != nil {
return "", err
}
return c.sendAndWait(req), nil
}
// sendAndWait sends a message to the Fleetspeak client and waits indefinitely. Only one sendAndWait
// should be called by the Client at a time to avoid non-chronological request ID logging from the
// Fleetspeak client callback channel.
func (c *Client) sendAndWait(msg *service.AckMessage) string {
c.fsChan.Out <- *msg
log.Infof("Sent message (%X) to Fleetspeak; awaiting acknowledgement...", msg.M.GetSourceMessageId())
ack := <-c.callbackChan
log.Infof("Received ack %q from Fleetspeak", ack)
return ack
}
// createRequest composes a Fleetspeak AckMessage.
// Fleetspeak is optimized to handle messages sizes < 2MB.
func (c *Client) createRequest(m *pb.SensorMessage) (*service.AckMessage, error) {
data, err := ptypes.MarshalAny(m)
if err != nil {
return nil, err
}
id := make([]byte, 16)
rand.Read(id)
return &service.AckMessage{
M: &fspb.Message{
SourceMessageId: id,
Destination: &fspb.Address{
ServiceName: serviceName,
},
Data: data,
Background: true,
},
Ack: func() {
c.callbackChan <- m.Id
},
}, nil
}
// Receive continuously receives new messages from the Fleetspeak client's In channel. Once it
// receives a message, it will send it to the Messages channel for the sensor client to process.
func (c *Client) Receive(done <-chan struct{}) {
for {
select {
case m := <-c.fsChan.In:
log.Infof("Received message (%X) from Fleetspeak", m.GetSourceMessageId())
c.messages <- m
case <-done:
log.Warning("Stopped receiving messages from Fleetspeak")
close(c.messages)
return
}
}
}
// Messages returns the channel containing incoming Fleetspeak messages.
func (c *Client) Messages() chan *fspb.Message {
return c.messages
} | source/sensor/fleetspeak/fleetspeak.go | 0.720073 | 0.400339 | fleetspeak.go | starcoder |
package typedesc
import (
"fmt"
"go/types"
"reflect"
)
// TypeDesc describes types for generating code
type TypeDesc struct {
TypeString string
Underlying string
KindTuple
}
func (d *TypeDesc) IsType(t string) bool {
return d.TypeString == t || d.Underlying == t
}
func (d *TypeDesc) IsTime() bool {
return d.IsBareTime() || d.IsPtrTime()
}
func (d *TypeDesc) IsBareTime() bool {
return d.IsType("time.Time")
}
func (d *TypeDesc) IsPtrTime() bool {
return d.IsType("*time.Time")
}
func (d *TypeDesc) IsJSON() bool {
return d.IsType("json.RawMessage")
}
func (d *TypeDesc) IsSliceOfBasicOrTime() bool {
return d.Container == reflect.Slice && isBasic(d.Elem) ||
d.TypeString == "[]time.Time" ||
d.Underlying == "[]time.Time" ||
d.TypeString == "[]*time.Time" ||
d.Underlying == "[]*time.Time"
}
// KindTuple represents an underlying type. We do not support double pointer.
type KindTuple struct {
Ptr bool
Container reflect.Kind // Only slice or array
PtrElem bool
Elem reflect.Kind // int32, int64, string, struct, map
}
// IsPtr ...
func (k KindTuple) IsPtr() bool {
return k.Ptr || k.PtrElem
}
// IsNillable ...
func (k KindTuple) IsNillable() bool {
return k.IsPtr() ||
k.Container == reflect.Slice ||
k.Elem == reflect.Map ||
k.Elem == reflect.Slice
}
func (k KindTuple) IsPtrNumber() bool {
return k.Ptr && k.IsNumber()
}
func (k KindTuple) IsNumber() bool {
return isNumber(k.Elem) && k.Container == 0
}
func isNumber(k reflect.Kind) bool {
return k >= reflect.Int && k <= reflect.Uint64 ||
k == reflect.Float32 || k == reflect.Float64
}
func (k KindTuple) IsPtrBasic() bool {
return k.Ptr && k.IsBasic()
}
func (k KindTuple) IsBasic() bool {
return isBasic(k.Elem) && k.Container == 0
}
func isBasic(k reflect.Kind) bool {
return k == reflect.String || k == reflect.Bool ||
isNumber(k)
}
func (k KindTuple) IsSimple() bool {
return k.Container == 0
}
func (k KindTuple) IsSlice() bool {
return k.Container == reflect.Slice
}
func (k KindTuple) IsKind(kind reflect.Kind) bool {
return k.Container == kind || (k.Elem == kind && k.Container == 0)
}
func (k KindTuple) IsSimpleKind(ptr bool, kind reflect.Kind) bool {
return k == SimpleKind(ptr, kind) && k.Container == 0
}
func (k KindTuple) IsKindTuple(kind KindTuple) bool {
return k == kind
}
func SimpleKind(ptr bool, elem reflect.Kind) KindTuple {
return KindTuple{Ptr: ptr, Elem: elem}
}
func NewKindTuple(typ types.Type) (kt KindTuple, err error) {
t := UnderlyingOf(typ)
if pt, ok := t.(*types.Pointer); ok {
kt.Ptr = true
t = UnderlyingOf(pt.Elem())
}
switch pt := t.(type) {
case *types.Slice:
kt.Container = reflect.Slice
t = UnderlyingOf(pt.Elem())
case *types.Array:
kt.Container = reflect.Array
t = UnderlyingOf(pt.Elem())
}
if pt, ok := t.(*types.Pointer); ok {
kt.PtrElem = true
t = UnderlyingOf(pt.Elem())
}
if kt.Container == 0 && kt.Ptr && kt.PtrElem {
err = fmt.Errorf("unsupported double pointer for type: %v", typ)
return
}
switch pt := t.(type) {
case *types.Basic:
kt.Elem = convertBasicKindToReflectKind(pt.Kind())
case *types.Map:
kt.Elem = reflect.Map
case *types.Struct:
kt.Elem = reflect.Struct
case *types.Interface:
kt.Elem = reflect.Interface
case *types.Pointer:
err = fmt.Errorf("unsupported double pointer for type: %v", typ)
default:
err = fmt.Errorf("unsupported type: %v", typ)
}
return
}
func convertBasicKindToReflectKind(k types.BasicKind) reflect.Kind {
if k <= types.Complex128 {
return reflect.Kind(k)
}
switch k {
case types.String:
return reflect.String
case types.UnsafePointer:
return reflect.UnsafePointer
}
panic(fmt.Sprintf("unexpected kind: %v", k))
}
func UnderlyingOf(typ types.Type) types.Type {
for typ != typ.Underlying() {
typ = typ.Underlying()
}
return typ
} | gen/typedesc/typedesc.go | 0.604049 | 0.444203 | typedesc.go | starcoder |
package crm_extensions
import (
"encoding/json"
)
// CardObjectTypeBody struct for CardObjectTypeBody
type CardObjectTypeBody struct {
// A CRM object type where this card should be displayed.
Name string `json:"name"`
// An array of properties that should be sent to this card's target URL when the data fetch request is made. Must be valid properties for the corresponding CRM object type.
PropertiesToSend []string `json:"propertiesToSend"`
}
// NewCardObjectTypeBody instantiates a new CardObjectTypeBody object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewCardObjectTypeBody(name string, propertiesToSend []string) *CardObjectTypeBody {
this := CardObjectTypeBody{}
this.Name = name
this.PropertiesToSend = propertiesToSend
return &this
}
// NewCardObjectTypeBodyWithDefaults instantiates a new CardObjectTypeBody object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewCardObjectTypeBodyWithDefaults() *CardObjectTypeBody {
this := CardObjectTypeBody{}
return &this
}
// GetName returns the Name field value
func (o *CardObjectTypeBody) GetName() string {
if o == nil {
var ret string
return ret
}
return o.Name
}
// GetNameOk returns a tuple with the Name field value
// and a boolean to check if the value has been set.
func (o *CardObjectTypeBody) GetNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Name, true
}
// SetName sets field value
func (o *CardObjectTypeBody) SetName(v string) {
o.Name = v
}
// GetPropertiesToSend returns the PropertiesToSend field value
func (o *CardObjectTypeBody) GetPropertiesToSend() []string {
if o == nil {
var ret []string
return ret
}
return o.PropertiesToSend
}
// GetPropertiesToSendOk returns a tuple with the PropertiesToSend field value
// and a boolean to check if the value has been set.
func (o *CardObjectTypeBody) GetPropertiesToSendOk() (*[]string, bool) {
if o == nil {
return nil, false
}
return &o.PropertiesToSend, true
}
// SetPropertiesToSend sets field value
func (o *CardObjectTypeBody) SetPropertiesToSend(v []string) {
o.PropertiesToSend = v
}
func (o CardObjectTypeBody) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["name"] = o.Name
}
if true {
toSerialize["propertiesToSend"] = o.PropertiesToSend
}
return json.Marshal(toSerialize)
}
type NullableCardObjectTypeBody struct {
value *CardObjectTypeBody
isSet bool
}
func (v NullableCardObjectTypeBody) Get() *CardObjectTypeBody {
return v.value
}
func (v *NullableCardObjectTypeBody) Set(val *CardObjectTypeBody) {
v.value = val
v.isSet = true
}
func (v NullableCardObjectTypeBody) IsSet() bool {
return v.isSet
}
func (v *NullableCardObjectTypeBody) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableCardObjectTypeBody(val *CardObjectTypeBody) *NullableCardObjectTypeBody {
return &NullableCardObjectTypeBody{value: val, isSet: true}
}
func (v NullableCardObjectTypeBody) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableCardObjectTypeBody) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | generated/crm_extensions/model_card_object_type_body.go | 0.617628 | 0.412294 | model_card_object_type_body.go | starcoder |
package streamstats
import (
"fmt"
"math"
)
// MomentStats is a datastructure for computing the first four moments of a stream
type MomentStats struct {
n uint64
m1 float64
m2 float64
m3 float64
m4 float64
}
// NewMomentStats returns an empty MomentStats structure with no values
func NewMomentStats() *MomentStats {
return &MomentStats{}
}
// Add updates the moment stats
func (m *MomentStats) Add(x float64) {
m.n++
fN := float64(m.n) // explicitly cast the number of observations to float64 for arithmetic operations
delta := x - m.m1
deltaN := delta / fN
deltaN2 := deltaN * deltaN
term1 := delta * deltaN * (fN - 1)
m.m1 += deltaN
m.m4 += term1*deltaN2*(fN*fN-3*fN+3) + 6*deltaN2*m.m2 - 4*deltaN*m.m3
m.m3 += term1*deltaN*(fN-2) - 3*deltaN*m.m2
m.m2 += term1
}
// N returns the observations stored so far
func (m *MomentStats) N() uint64 {
return m.n
}
// Mean returns the mean of the observations seen so far
func (m *MomentStats) Mean() float64 {
return m.m1
}
// Variance returns the variance of the observations seen so far
func (m *MomentStats) Variance() float64 {
if m.n < 2 {
return 0.0
}
return m.m2 / (float64(m.n) - 1.0)
}
// StdDev returns the standard deviation of the samples seen so far
func (m *MomentStats) StdDev() float64 {
return math.Sqrt(m.Variance())
}
// Skewness returns the skewness of the samples seen so far
func (m *MomentStats) Skewness() float64 {
if m.m2 <= 0.0 {
return 0.0
}
return math.Sqrt(float64(m.n)) * m.m3 / math.Pow(m.m2, 1.5)
}
// Kurtosis returns the excess kurtosis of the samples seen so far
func (m *MomentStats) Kurtosis() float64 {
if m.m2 <= 0.0 {
return 0.0
}
return float64(m.n)*m.m4/(m.m2*m.m2) - 3.0
}
// Combine combines the stats from two MomentStats structures
func (m *MomentStats) Combine(b *MomentStats) MomentStats {
var combined MomentStats
combined.n = m.n + b.n
mN := float64(m.n) // convert to floats for arithmetic operations
bN := float64(b.n)
cN := float64(combined.n)
delta := b.m1 - m.m1
delta2 := delta * delta
delta3 := delta * delta2
delta4 := delta2 * delta2
combined.m1 = (mN*m.m1 + bN*b.m1) / cN
combined.m2 = m.m2 + b.m2 + delta2*mN*bN/cN
combined.m3 = m.m3 + b.m3 + delta3*mN*bN*(mN-bN)/(cN*cN)
combined.m3 += 3.0 * delta * (mN*b.m2 - bN*m.m2) / cN
combined.m4 = m.m4 + b.m4 + delta4*mN*bN*(mN*mN-mN*bN+bN*bN)/(cN*cN*cN)
combined.m4 += 6.0*delta2*(mN*mN*b.m2+bN*bN*m.m2)/(cN*cN) + 4.0*delta*(mN*b.m3-bN*m.m3)/cN
return combined
}
// String returns the standard string representation of the samples seen so far
func (m *MomentStats) String() string {
return fmt.Sprintf("Mean: %0.3f Variance: %0.3f Skewness: %0.3f Kurtosis: %0.3f N: %d", m.Mean(), m.Variance(), m.Skewness(), m.Kurtosis(), m.N())
} | momentstats.go | 0.901476 | 0.621168 | momentstats.go | starcoder |
package renderer
import (
"image"
"math"
"github.com/nightmarlin/murum/layout"
"github.com/nightmarlin/murum/provider"
)
// Renderer allows for murum image rendering
type Renderer interface {
// Render draws an image with the bounding rectangle rect filled with the pattern defined in the
// layout.L. Each L section will be filled with an image based on an entry from the
// provider.AlbumInfo slice. If len(l) > len(ai) then some regions may be left blank. If len(ai) >
// len(l) then not all Album Infos need to be used.
Render(l *layout.L, ai []provider.AlbumInfo) (image.Image, error)
}
// GetBounds finds the bounding image.Rectangle for a set of points
func GetBounds(ps []image.Point) image.Rectangle {
if len(ps) == 0 {
return image.Rectangle{}
}
min, max := ps[0], ps[0]
for _, p := range ps {
if p.X < min.X {
min.X = p.X
}
if p.Y < min.Y {
min.Y = p.Y
}
if p.X > max.X {
max.X = p.X
}
if p.Y > max.Y {
max.Y = p.Y
}
}
return image.Rectangle{
Min: min,
// Add {1, 1} as image.Rectangle max values are exclusive
Max: max.Add(image.Point{X: 1, Y: 1}),
}
}
// ScaleRect scales a rectangle to fill at least the area of some target rectangle while maintaining
// its original aspect ratio. If any dimension of either passed rectangle is 0 the bounding
// rectangle is returned. The rectangle will be scaled about the origin (image.Point{0, 0}).
func ScaleRect(toScale, bounding image.Rectangle) image.Rectangle {
if toScale.Dx() == 0 || bounding.Dx() == 0 || toScale.Dy() == 0 || bounding.Dy() == 0 {
return bounding
}
var (
xSF = float64(bounding.Dx()) / float64(toScale.Dx())
ySF = float64(bounding.Dy()) / float64(toScale.Dy())
scaleFactor = math.Max(xSF, ySF)
doScale = func(i int) int { return int(math.Ceil(float64(i) * scaleFactor)) }
)
return image.Rectangle{
Min: image.Point{X: doScale(toScale.Min.X), Y: doScale(toScale.Min.Y)},
Max: image.Point{X: doScale(toScale.Max.X), Y: doScale(toScale.Max.Y)},
}
} | renderer/renderer.go | 0.790975 | 0.511961 | renderer.go | starcoder |
package main
import (
"errors"
"os"
"time"
)
// Pin represents a single pin, which can be used either for reading or writing
type Pin struct {
Number uint
direction direction
f *os.File
}
// NewInput opens the given pin number for reading. The number provided should be the pin number known by the kernel
func NewInput(p uint) Pin {
pin := Pin{
Number: p,
}
exportGPIO(pin)
time.Sleep(10 * time.Millisecond)
pin.direction = inDirection
setDirection(pin, inDirection, 0)
pin = openPin(pin, false)
return pin
}
// NewOutput opens the given pin number for writing. The number provided should be the pin number known by the kernel
// NewOutput also needs to know whether the pin should be initialized high (true) or low (false)
func NewOutput(p uint, initHigh bool) Pin {
pin := Pin{
Number: p,
}
exportGPIO(pin)
time.Sleep(10 * time.Millisecond)
initVal := uint(0)
if initHigh {
initVal = uint(1)
}
pin.direction = outDirection
setDirection(pin, outDirection, initVal)
pin = openPin(pin, true)
return pin
}
// Close releases the resources related to Pin
func (p Pin) Close() {
p.f.Close()
}
// Read returns the value read at the pin as reported by the kernel. This should only be used for input pins
func (p Pin) Read() (value uint, err error) {
if p.direction != inDirection {
return 0, errors.New("pin is not configured for input")
}
return readPin(p)
}
// SetLogicLevel sets the logic level for the Pin. This can be
// either "active high" or "active low"
func (p Pin) SetLogicLevel(logicLevel LogicLevel) error {
return setLogicLevel(p, logicLevel)
}
// High sets the value of an output pin to logic high
func (p Pin) High() error {
if p.direction != outDirection {
return errors.New("pin is not configured for output")
}
return writePin(p, 1)
}
// Low sets the value of an output pin to logic low
func (p Pin) Low() error {
if p.direction != outDirection {
return errors.New("pin is not configured for output")
}
return writePin(p, 0)
} | watchdog/io.go | 0.812942 | 0.402979 | io.go | starcoder |
package aoc2020
import (
"fmt"
"strconv"
goutils "github.com/simonski/goutils"
)
/*
--- Part Two ---
For some reason, the sea port's computer system still can't communicate with your ferry's docking program. It must be using version 2 of the decoder chip!
A version 2 decoder chip doesn't modify the values being written at all. Instead, it acts as a memory address decoder. Immediately before a value is written to memory, each bit in the bitmask modifies the corresponding bit of the destination memory address in the following way:
If the bitmask bit is 0, the corresponding memory address bit is unchanged.
If the bitmask bit is 1, the corresponding memory address bit is overwritten with 1.
If the bitmask bit is X, the corresponding memory address bit is floating.
A floating bit is not connected to anything and instead fluctuates unpredictably. In practice, this means the floating bits will take on all possible values, potentially causing many memory addresses to be written all at once!
For example, consider the following program:
mask = 000000000000000000000000000000X1001X
mem[42] = 100
mask = 00000000000000000000000000000000X0XX
mem[26] = 1
When this program goes to write to memory address 42, it first applies the bitmask:
address: 000000000000000000000000000000101010 (decimal 42)
mask: 000000000000000000000000000000X1001X
result: 000000000000000000000000000000X1101X
After applying the mask, four bits are overwritten, three of which are different, and two of which are floating. Floating bits take on every possible combination of values; with two floating bits, four actual memory addresses are written:
000000000000000000000000000000011010 (decimal 26)
000000000000000000000000000000011011 (decimal 27)
000000000000000000000000000000111010 (decimal 58)
000000000000000000000000000000111011 (decimal 59)
Next, the program is about to write to memory address 26 with a different bitmask:
address: 000000000000000000000000000000011010 (decimal 26)
mask: 00000000000000000000000000000000X0XX
result: 00000000000000000000000000000001X0XX
This results in an address with three floating bits, causing writes to eight memory addresses:
000000000000000000000000000000010000 (decimal 16)
000000000000000000000000000000010001 (decimal 17)
000000000000000000000000000000010010 (decimal 18)
000000000000000000000000000000010011 (decimal 19)
000000000000000000000000000000011000 (decimal 24)
000000000000000000000000000000011001 (decimal 25)
000000000000000000000000000000011010 (decimal 26)
000000000000000000000000000000011011 (decimal 27)
The entire 36-bit address space still begins initialized to the value 0 at every address, and you still need the sum of all values left in memory at the end of the program. In this example, the sum is 208.
Execute the initialization program using an emulator for a version 2 decoder chip. What is the sum of all values left in memory after it completes?
*/
func AOC_2020_14_part2_attempt1(cli *goutils.CLI) {
filename := cli.GetFileExistsOrDie("-input")
p := NewDay14ProgramFromFilename(filename)
p.RunV2()
p.Debug()
fmt.Printf("Total is %v\n", p.Sum())
}
func (m *Memory) SetV2(index int, value int64) {
// convert the index (the memory position) to a bitset itself
// memoryAddress := NewBitSet(int64(index))
// now we mask the memoryAddress
// the changes to memoryAddress give us the input to recurse on to find
// the actual memory addresses to change
// mask := m.GetMask()
// memoryAddress.ApplyMask(mask)
binaryMemoryAddress := goutils.Decimal_to_binary(int64(index))
floatingMask := m.GetMask().DeriveNewMask(binaryMemoryAddress)
addresses := floatingMask.GetVariations()
for _, binaryAddress := range addresses {
index := goutils.Binary_to_decimal(binaryAddress)
b := m.Get(int(index))
b.SetValue(value)
}
// b := m.Get(index)
// b.SetValue(value)
// b.ApplyMask(m.Mask)
}
func (p *Day14Program) ExecuteV2(instruction string) {
fmt.Printf("ExecuteV2('%v')\n", instruction)
if p.IsMask(instruction) {
mask := p.ParseMask(instruction)
p.Memory.SetMask(mask)
} else if p.IsMem(instruction) {
position, value := p.ParseMem(instruction)
iposition, _ := strconv.Atoi(position)
ivalue, _ := strconv.Atoi(value)
p.Memory.SetV2(iposition, int64(ivalue))
}
fmt.Printf("\n")
} | app/aoc2020/aoc2020_14_part2.go | 0.607197 | 0.626895 | aoc2020_14_part2.go | starcoder |
package textile
import (
"image"
)
// Textile represents every cell in a display as a string that ideally renders
// as a single glyph. Like images and slices, the textile is a thin header
// that can share allocated memory with other textiles.
type Textile struct {
Strings []string
Stride int
Rect image.Rectangle
}
// New returns a Textile with the given rectangle.
// As with images, the rectangle need not rest at the origin.
func New(r image.Rectangle) *Textile {
w, h := r.Dx(), r.Dy()
count := w * h
buf := make([]string, count)
return &Textile{
Strings: buf,
Stride: w,
Rect: r,
}
}
// Bounds returns the bounding box of the textile.
func (t *Textile) Bounds() image.Rectangle {
return t.Rect
}
// Draw writes the text from a source textile onto a destination textile,
// within a rectangle of the destination textile, offset by a position within
// the source textile.
func Draw(dst *Textile, r image.Rectangle, src *Textile, sp image.Point) {
// internal.Clip(dst.Bounds(), &r, src.Bounds(), &sp, nil, nil)
r = r.Intersect(dst.Bounds())
if r.Empty() {
return
}
w, h := r.Dx(), r.Dy()
for y := 0; y < h; y++ {
for x := 0; x < w; x++ {
t := src.At(sp.X+x, sp.Y+y)
if t != "" {
dst.Set(r.Min.X+x, r.Min.Y+y, t)
}
}
}
}
// Fill overwrites every cell in the textile with the given string.
func (t *Textile) Fill(str string) {
area := t.Rect
for y := area.Min.Y; y < area.Max.Y; y++ {
for x := area.Min.X; x < area.Max.X; x++ {
t.Set(x, y, str)
}
}
}
// At returns the string at a given point.
func (t *Textile) At(x, y int) string {
if !(image.Point{x, y}.In(t.Rect)) {
return ""
}
i := t.StringsOffset(x, y)
return t.Strings[i]
}
// Set overwrites the string at a point.
func (t *Textile) Set(x, y int, str string) {
if !(image.Point{x, y}.In(t.Rect)) {
return
}
i := t.StringsOffset(x, y)
t.Strings[i] = str
}
// SubText returns a region of text within the textile.
func (t *Textile) SubText(r image.Rectangle) *Textile {
r = r.Intersect(t.Rect)
if r.Empty() {
return &Textile{}
}
i := t.StringsOffset(r.Min.X, r.Min.Y)
return &Textile{
Strings: t.Strings[i:],
Stride: t.Stride,
Rect: r,
}
}
// StringOffset is a utility for seeking a slice of the underlying strings
// starting at the given position within the allocation.
func (t *Textile) StringsOffset(x, y int) int {
return (y-t.Rect.Min.Y)*t.Stride + (x - t.Rect.Min.X)
} | textile/textile.go | 0.892557 | 0.648341 | textile.go | starcoder |
package graphql
import "github.com/vektah/gqlparser/v2/ast"
var __Schema = &ast.Definition{
Kind: ast.Object,
Description: "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.",
Name: "__Schema",
Fields: ast.FieldList{
{
Description: "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.",
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "A list of all types supported by this server.",
Name: "types",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "The type that query operations will be rooted at.",
Name: "queryType",
Type: &ast.Type{
NamedType: __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "If this server supports mutation, the type that mutation operations will be rooted at.",
Name: "mutationType",
Type: &ast.Type{
NamedType: __Type.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "If this server support subscription, the type that subscription operations will be rooted at.",
Name: "subscriptionType",
Type: &ast.Type{
NamedType: __Type.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "A list of all directives supported by this server.",
Name: "directives",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __Directive.Name,
NonNull: true,
Position: blankBuiltInPos,
},
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __Directive = &ast.Definition{
Kind: ast.Object,
Description: "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\\n\\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.",
Name: "__Directive",
Fields: ast.FieldList{
{
Name: "name",
Type: &ast.Type{
NamedType: GraphQLString.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\\n\\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.",
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "isRepeatable",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "locations",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __DirectiveLocation.Name,
NonNull: true,
Position: blankBuiltInPos,
},
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "args",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __InputValue.Name,
NonNull: true,
Position: blankBuiltInPos,
},
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __DirectiveLocation = &ast.Definition{
Kind: ast.Enum,
Description: "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.",
Name: "__DirectiveLocation",
EnumValues: ast.EnumValueList{
{
Description: "Location adjacent to a query operation.",
Name: "QUERY",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a mutation operation.",
Name: "MUTATION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a subscription operation.",
Name: "SUBSCRIPTION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a field.",
Name: "FIELD",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a fragment definition.",
Name: "FRAGMENT_DEFINITION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a fragment spread.",
Name: "FRAGMENT_SPREAD",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an inline fragment.",
Name: "INLINE_FRAGMENT",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a variable definition.",
Name: "VARIABLE_DEFINITION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a schema definition.",
Name: "SCHEMA",
Position: blankBuiltInPos,
},
{
Description: "",
Name: "",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a scalar definition.",
Name: "SCALAR",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an object type definition.",
Name: "OBJECT",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a field definition.",
Name: "FIELD_DEFINITION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an argument definition.",
Name: "ARGUMENT_DEFINITION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an interface definition.",
Name: "INTERFACE",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to a union definition.",
Name: "UNION",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an enum definition.",
Name: "ENUM",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an enum value definition.",
Name: "ENUM_VALUE",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an input object type definition.",
Name: "INPUT_OBJECT",
Position: blankBuiltInPos,
},
{
Description: "Location adjacent to an input object field definition.",
Name: "INPUT_FIELD_DEFINITION",
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __Type = &ast.Definition{
Kind: ast.Object,
Description: "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\\n\\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name, description and optional `specifiedByURL`, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.",
Name: "__Type",
Fields: ast.FieldList{
{
Name: "kind",
Type: &ast.Type{
NamedType: __TypeKind.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "name",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "specifiedByURL",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "fields",
Type: &ast.Type{
NamedType: __Field.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Arguments: ast.ArgumentDefinitionList{
{
Name: "includeDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
Position: blankBuiltInPos,
},
DefaultValue: &ast.Value{
Raw: "false",
Kind: ast.BooleanValue,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
},
{
Name: "interfaces",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: "__Type", // __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "possibleTypes",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: "__Type", // __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "enumValues",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __EnumValue.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
Arguments: ast.ArgumentDefinitionList{
{
Name: "includeDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
Position: blankBuiltInPos,
},
DefaultValue: &ast.Value{
Raw: "false",
Kind: ast.BooleanValue,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
},
{
Name: "inputFields",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __InputValue.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
Arguments: ast.ArgumentDefinitionList{
{
Name: "includeDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
Position: blankBuiltInPos,
},
DefaultValue: &ast.Value{
Raw: "false",
Kind: ast.BooleanValue,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
},
{
Name: "ofType",
Type: &ast.Type{
NamedType: "__Type", // __Type.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __Field = &ast.Definition{
Kind: ast.Object,
Description: "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.",
Name: "__Field",
Fields: ast.FieldList{
{
Name: "name",
Type: &ast.Type{
NamedType: GraphQLString.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "args",
Type: &ast.Type{
Elem: &ast.Type{
NamedType: __InputValue.Name,
NonNull: true,
Position: blankBuiltInPos,
},
NonNull: true,
Position: blankBuiltInPos,
},
Arguments: ast.ArgumentDefinitionList{
{
Name: "includeDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
Position: blankBuiltInPos,
},
DefaultValue: &ast.Value{
Raw: "false",
Kind: ast.BooleanValue,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
},
{
Name: "type",
Type: &ast.Type{
NamedType: "__Type", // __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "isDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "deprecationReason",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __InputValue = &ast.Definition{
Kind: ast.Object,
Description: "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.",
Name: "__InputValue",
Fields: ast.FieldList{
{
Name: "name",
Type: &ast.Type{
NamedType: GraphQLString.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "type",
Type: &ast.Type{
NamedType: "__Type", // __Type.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Description: "A GraphQL-formatted string representing the default value for this input value.",
Name: "defaultValue",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "isDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "deprecationReason",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __EnumValue = &ast.Definition{
Kind: ast.Object,
Description: "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.",
Name: "__EnumValue",
Fields: ast.FieldList{
{
Name: "name",
Type: &ast.Type{
NamedType: GraphQLString.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "description",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "isDeprecated",
Type: &ast.Type{
NamedType: GraphQLBoolean.Name,
NonNull: true,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
{
Name: "deprecationReason",
Type: &ast.Type{
NamedType: GraphQLString.Name,
Position: blankBuiltInPos,
},
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var __TypeKind = &ast.Definition{
Kind: ast.Enum,
Description: "An enum describing what kind of type a given `__Type` is.",
Name: "__TypeKind",
EnumValues: ast.EnumValueList{
{
Description: "Indicates this type is a scalar.",
Name: "SCALAR",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is an object. `fields` and `interfaces` are valid fields.",
Name: "OBJECT",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is an interface. `fields`, `interfaces`, and `possibleTypes` are valid fields.",
Name: "INTERFACE",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is a union. `possibleTypes` is a valid field.",
Name: "UNION",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is an enum. `enumValues` is a valid field.",
Name: "ENUM",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is an input object. `inputFields` is a valid field.",
Name: "INPUT_OBJECT",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is a list. `ofType` is a valid field.",
Name: "LIST",
Position: blankBuiltInPos,
},
{
Description: "Indicates this type is a non-null. `ofType` is a valid field.",
Name: "NON_NULL",
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
BuiltIn: true,
}
var IntrospectionTypes = ast.DefinitionList{
__Schema,
__Directive,
__DirectiveLocation,
__Type,
__Field,
__InputValue,
__EnumValue,
__TypeKind,
}
var SchemaMetaFieldDef = &ast.FieldDefinition{
Name: "__schema",
Description: "Access the current type schema of this server.",
Type: ast.NamedType("__Schema", nil),
}
var TypeMetaFieldDef = &ast.FieldDefinition{
Name: "__type",
Description: "Request the type information of a single type.",
Type: ast.NamedType("__Type", nil),
Arguments: []*ast.ArgumentDefinition{
{
Name: "name",
Type: ast.NonNullNamedType("String", nil),
Position: blankBuiltInPos,
},
},
Position: blankBuiltInPos,
}
var TypeNameMetaFieldDef = &ast.FieldDefinition{
Name: "__typename",
Type: ast.NamedType("String", nil),
Position: blankBuiltInPos,
}
func IsIntrospectionType(typeName string) bool {
for _, def := range IntrospectionTypes {
if def.Name == typeName {
return true
}
}
return false
} | internal/graphql/introspection.go | 0.568416 | 0.643357 | introspection.go | starcoder |
package types
import (
"bytes"
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"strings"
"github.com/apex/log"
)
const (
/** B-Tree Table of Contents Constants **/
BTREE_TOC_ENTRY_INCREMENT = 8
BTREE_TOC_ENTRY_MAX_UNUSED = (2 * BTREE_TOC_ENTRY_INCREMENT)
/** B-Tree Node Constants **/
BTREE_NODE_SIZE_DEFAULT = 4096 // = 4 Ki
BTREE_NODE_MIN_ENTRY_COUNT = 4
)
type btreeInfoFixedFlags uint32
const (
/** B-Tree Flags **/
BTREE_UINT64_KEYS btreeInfoFixedFlags = 0x00000001 // Code that works with the B-tree should enable optimizations to make comparison of keys fast.
BTREE_SEQUENTIAL_INSERT btreeInfoFixedFlags = 0x00000002 // Code that works with the B-tree should enable optimizations to keep the B-tree compact during sequential insertion of entries.
BTREE_ALLOW_GHOSTS btreeInfoFixedFlags = 0x00000004 // The table of contents is allowed to contain keys that have no corresponding value.
BTREE_EPHEMERAL btreeInfoFixedFlags = 0x00000008 // The nodes in the B-tree use ephemeral object identifiers to link to child nodes.
BTREE_PHYSICAL btreeInfoFixedFlags = 0x00000010 // The nodes in the B-tree use physical object identifiers to link to child nodes.
BTREE_NONPERSISTENT btreeInfoFixedFlags = 0x00000020 // The B-tree isnʼt persisted across unmounting.
BTREE_KV_NONALIGNED btreeInfoFixedFlags = 0x00000040 // The keys and values in the B-tree arenʼt required to be aligned to eight-byte boundaries.
BTREE_HASHED btreeInfoFixedFlags = 0x00000080 // The nonleaf nodes of this B-tree store a hash of their child nodes.
BTREE_NOHEADER btreeInfoFixedFlags = 0x00000100 // The nodes of this B-tree are stored without object headers.
)
type btreeNodeFlag uint16
const (
/** B-Tree Node Flags **/
BTNODE_ROOT btreeNodeFlag = 0x0001
BTNODE_LEAF btreeNodeFlag = 0x0002
BTNODE_FIXED_KV_SIZE btreeNodeFlag = 0x0004
BTNODE_HASHED btreeNodeFlag = 0x0008
BTNODE_NOHEADER btreeNodeFlag = 0x0010
BTNODE_CHECK_KOFF_INVAL btreeNodeFlag = 0x8000
)
type nloc_t struct {
Off uint16
Len uint16
}
// KVLocT is a kvloc_t struct
type KVLocT struct {
Key nloc_t
Val nloc_t
}
// KVOffT is a kvoff_t struct
type KVOffT struct {
Key uint16
Val uint16
}
// BTreeInfoFixedT is a btree_info_fixed_t struct
type BTreeInfoFixedT struct {
Flags btreeInfoFixedFlags
NodeSize uint32
KeySize uint32
ValSize uint32
}
// BTreeInfoT is a btree_info_t struct
type BTreeInfoT struct {
Fixed BTreeInfoFixedT
LongestKey uint32
LongestVal uint32
KeyCount uint64
NodeCount uint64
}
const BTREE_NODE_HASH_SIZE_MAX = 64
// BTreeNodeIndexNodeValT is a btn_index_node_val_t
type BTreeNodeIndexNodeValT struct {
ChildOid OidT
ChildHash [32]byte //BTREE_NODE_HASH_SIZE_MAX=64 acc to spec, but in reality appears to be max size of hash type used! 32 seen // FIXME: what?
// ChildHash [BTREE_NODE_HASH_SIZE_MAX]byte
}
func (v BTreeNodeIndexNodeValT) String() string {
return fmt.Sprintf("child_oid=%#x, child_hash=%s", v.ChildOid, hex.EncodeToString(v.ChildHash[:]))
}
// OMapEntry is a omap_entry_t struct
// Custom data structure used to store the key and value of an object map entry
// together.
type OMapEntry struct {
Key OMapKey
Val OMapVal
}
/**
* Custom data structure used to store a full file-system record (i.e. a single
* key–value pair from a file-system root tree) alongside each other for easier
* data access and manipulation.
*
* One can make use of an instance of this datatype by determining the strctures
* contained within its `data` field by appealing to the `obj_id_and_type` field
* of the `j_key_t` structure for the record, which is guaranteed to exist and
* start at `data[0]`. That is, a pointer to this instance of `j_key_t` can be
* obtained with `j_key_t* record_header = record->data`, where `record` is an
* instance of this type, `j_rec_t`.
*
* key_len: Length of the file-system record's key-part, in bytes.
*
* val_len: Length of the file-system record's value-part, in bytes.
*
* data: Array of `key_len + val_len` bytes of data, of which,
* index `0` through `key_len - 1` (inclusive) contain the
* key-part data, and index `key_len` through `key_len + val_len - 1`
* (inclusive) contain the value-part data.
*/
type JRecT struct {
KeyLen uint16
ValLen uint16
Data []byte
}
// BTreeNodePhysT is a btree_node_phys_t struct
type BTreeNodePhysT struct {
// Obj ObjPhysT
Flags btreeNodeFlag
Level uint16
Nkeys uint32
TableSpace nloc_t
FreeSpace nloc_t
KeyFreeList nloc_t
ValFreeList nloc_t
// Data []uint64
}
type block struct {
Addr uint64
Size uint64
Data []byte
r *bytes.Reader
}
// BTreeNodePhys is a btree_node_phys_t struct with data array
type BTreeNodePhys struct {
BTreeNodePhysT
Entries []interface{}
Parent *BTreeNodePhys
Info *BTreeInfoT
block
}
func (n *BTreeNodePhys) IsRoot() bool {
return (n.Flags & BTNODE_ROOT) != 0
}
func (n *BTreeNodePhys) IsLeaf() bool {
return (n.Flags & BTNODE_LEAF) != 0
}
func (n *BTreeNodePhys) FixedKvSize() bool {
return (n.Flags & BTNODE_FIXED_KV_SIZE) != 0
}
func (n *BTreeNodePhys) Hashed() bool {
return (n.Flags & BTNODE_HASHED) != 0
}
// ReadFextNodeEntry reads a fext node entry from reader
func (n *BTreeNodePhys) ReadFextNodeEntry(r *bytes.Reader) error {
var fent FextNodeEntry
var keyOffset uint16
var valOffset uint16
if n.FixedKvSize() {
var off KVOffT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key
valOffset = off.Val
fent.Offset = off
} else {
var off KVLocT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key.Off
valOffset = off.Val.Off
fent.Offset = off
}
pos, _ := r.Seek(0, io.SeekCurrent)
r.Seek(int64(keyOffset+n.TableSpace.Len+56), io.SeekStart) // key
if err := binary.Read(r, binary.LittleEndian, &fent.Key); err != nil {
return fmt.Errorf("failed to read %T: %v", fent.Key, err)
}
if valOffset != 0xFFFF {
r.Seek(int64(BLOCK_SIZE-uint64(valOffset)-40*uint64(n.Flags&1)), io.SeekStart) // val
if err := binary.Read(r, binary.LittleEndian, &fent.Val); err != nil {
return fmt.Errorf("failed to read %T: %v", fent.Val, err)
}
}
n.Entries = append(n.Entries, fent)
r.Seek(pos, io.SeekStart) // reset reader to right after we read the offsets
return nil
}
// ReadSpacemanFreeQueueNodeEntry reads a spaceman free queue node entry from reader
func (n *BTreeNodePhys) ReadSpacemanFreeQueueNodeEntry(r *bytes.Reader) error {
var sent SpacemanFreeQueueNodeEntry
var keyOffset uint16
var valOffset uint16
if n.FixedKvSize() {
var off KVOffT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key
valOffset = off.Val
sent.Offset = off
} else {
var off KVLocT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key.Off
valOffset = off.Val.Off
sent.Offset = off
}
pos, _ := r.Seek(0, io.SeekCurrent)
r.Seek(int64(keyOffset+n.TableSpace.Len+56), io.SeekStart) // key
if err := binary.Read(r, binary.LittleEndian, &sent.Key); err != nil {
return fmt.Errorf("failed to read %T: %v", sent.Key, err)
}
if valOffset != 0xFFFF {
r.Seek(int64(BLOCK_SIZE-uint64(valOffset)-40*uint64(n.Flags&1)), io.SeekStart) // val
if err := binary.Read(r, binary.LittleEndian, &sent.Val); err != nil {
return fmt.Errorf("failed to read %T: %v", sent.Val, err)
}
}
n.Entries = append(n.Entries, sent)
r.Seek(pos, io.SeekStart) // reset reader to right after we read the offsets
return nil
}
// ReadOMapNodeEntry reads a omap node entry from reader
func (n *BTreeNodePhys) ReadOMapNodeEntry(r *bytes.Reader) error {
var oent OMapNodeEntry
var keyOffset uint16
var valOffset uint16
if n.FixedKvSize() {
var off KVOffT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key
valOffset = off.Val
oent.Offset = off
} else {
var off KVLocT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key.Off
valOffset = off.Val.Off
oent.Offset = off
}
pos, _ := r.Seek(0, io.SeekCurrent)
r.Seek(int64(keyOffset+n.TableSpace.Len+56), io.SeekStart) // key
if err := binary.Read(r, binary.LittleEndian, &oent.Key); err != nil {
return fmt.Errorf("failed to read omap_key_t: %v", err)
}
r.Seek(int64(BLOCK_SIZE-uint64(valOffset)-40*uint64(n.Flags&1)), io.SeekStart) // val
if n.Level > 0 {
if err := binary.Read(r, binary.LittleEndian, &oent.PAddr); err != nil {
return fmt.Errorf("failed to read paddr_t: %v", err)
}
} else {
if err := binary.Read(r, binary.LittleEndian, &oent.Val); err != nil {
return fmt.Errorf("failed to read omap_key_t: %v", err)
}
}
n.Entries = append(n.Entries, oent)
r.Seek(pos, io.SeekStart) // reset reader to right after we read the offsets
return nil
}
// ReadNodeEntry reads a node entry from reader
func (n *BTreeNodePhys) ReadNodeEntry(r *bytes.Reader) error {
var nent NodeEntry
var keyOffset uint16
var valOffset uint16
if n.FixedKvSize() {
var off KVOffT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key
valOffset = off.Val
nent.Offset = off
} else {
var off KVLocT
if err := binary.Read(r, binary.LittleEndian, &off); err != nil {
return fmt.Errorf("failed to read offsets: %v", err)
}
keyOffset = off.Key.Off
valOffset = off.Val.Off
nent.Offset = off
}
pos, _ := r.Seek(0, io.SeekCurrent)
r.Seek(int64(keyOffset+n.TableSpace.Len+56), io.SeekStart) // key
if err := binary.Read(r, binary.LittleEndian, &nent.Hdr); err != nil {
return fmt.Errorf("failed to read j_key_t: %v", err)
}
switch nent.Hdr.GetType() {
case APFS_TYPE_SNAP_METADATA:
case APFS_TYPE_EXTENT:
case APFS_TYPE_INODE:
case APFS_TYPE_XATTR:
var k JXattrKeyT
if err := binary.Read(r, binary.LittleEndian, &k.NameLen); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
n := make([]byte, k.NameLen)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
k.Name = strings.Trim(string(n[:]), "\x00")
nent.Key = k
case APFS_TYPE_SIBLING_LINK:
var k SiblingKeyT
if err := binary.Read(r, binary.LittleEndian, &k); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
nent.Key = k
case APFS_TYPE_DSTREAM_ID:
case APFS_TYPE_CRYPTO_STATE:
case APFS_TYPE_FILE_EXTENT:
var k j_file_extent_key_t
if err := binary.Read(r, binary.LittleEndian, &k); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
nent.Key = k
case APFS_TYPE_DIR_REC:
var k JDrecHashedKeyT
if err := binary.Read(r, binary.LittleEndian, &k.NameLenAndHash); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
n := make([]byte, k.Length())
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
k.Name = strings.Trim(string(n[:]), "\x00")
nent.Key = k
case APFS_TYPE_DIR_STATS:
case APFS_TYPE_SNAP_NAME:
var k j_snap_name_key_t
if err := binary.Read(r, binary.LittleEndian, &k.NameLen); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
n := make([]byte, k.NameLen)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
k.Name = strings.Trim(string(n[:]), "\x00")
nent.Key = k
case APFS_TYPE_SIBLING_MAP:
case APFS_TYPE_FILE_INFO:
var k j_file_info_key_t
if err := binary.Read(r, binary.LittleEndian, &k); err != nil {
return fmt.Errorf("failed to read %T: %v", k, err)
}
nent.Key = k
default:
return fmt.Errorf("got unsupported APFS type %s", nent.Hdr.GetType())
}
r.Seek(int64(BLOCK_SIZE-uint64(valOffset)-40*uint64(n.Flags&1)), io.SeekStart) // val
if n.Level > 0 {
switch nent.Hdr.GetType() {
case APFS_TYPE_SNAP_METADATA:
case APFS_TYPE_SNAP_NAME:
case APFS_TYPE_EXTENT:
if err := binary.Read(r, binary.LittleEndian, &nent.PAddr); err != nil {
return fmt.Errorf("failed to read paddr_t: %v", err)
}
// TODO: make sure to read Obj for paddr later
case APFS_TYPE_INODE:
fallthrough
case APFS_TYPE_XATTR:
fallthrough
case APFS_TYPE_SIBLING_LINK:
fallthrough
case APFS_TYPE_DSTREAM_ID:
fallthrough
case APFS_TYPE_CRYPTO_STATE:
fallthrough
case APFS_TYPE_FILE_EXTENT:
fallthrough
case APFS_TYPE_DIR_REC:
fallthrough
case APFS_TYPE_DIR_STATS:
fallthrough
case APFS_TYPE_SIBLING_MAP:
fallthrough
case APFS_TYPE_FILE_INFO:
if n.Hashed() {
var v BTreeNodeIndexNodeValT
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read paddr_t: %v", err)
}
nent.Val = v
} else {
var v uint64
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read uint64: %v", err)
}
nent.Val = v
}
default:
return fmt.Errorf("got unsupported APFS type %s", nent.Hdr.GetType())
}
} else {
switch nent.Hdr.GetType() {
case APFS_TYPE_SNAP_METADATA:
var v j_snap_metadata_val
if err := binary.Read(r, binary.LittleEndian, &v.j_snap_metadata_val_t); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
n := make([]byte, v.NameLen)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
v.Name = strings.Trim(string(n[:]), "\x00")
nent.Val = v
case APFS_TYPE_EXTENT:
var v j_phys_ext_val_t
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_INODE:
var v JInodeVal
if err := binary.Read(r, binary.LittleEndian, &v.j_inode_val_t); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
if nent.Offset.(KVLocT).Val.Len != uint16(binary.Size(j_inode_val_t{})) {
if err := binary.Read(r, binary.LittleEndian, &v.blob.xf_blob_t); err != nil {
return fmt.Errorf("failed to read %T: %v", v.blob, err)
}
hdrs := make([]x_field_t, v.blob.XfNumExts)
if err := binary.Read(r, binary.LittleEndian, &hdrs); err != nil {
return fmt.Errorf("failed to read %T: %v", hdrs, err)
}
for _, hdr := range hdrs {
switch hdr.XType {
case INO_EXT_TYPE_SNAP_XID:
var snapXID XidT
if err := binary.Read(r, binary.LittleEndian, &snapXID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", snapXID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: snapXID,
})
case INO_EXT_TYPE_DELTA_TREE_OID:
var dtreeOID OidT
if err := binary.Read(r, binary.LittleEndian, &dtreeOID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dtreeOID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dtreeOID,
})
case INO_EXT_TYPE_DOCUMENT_ID:
var docID uint32
if err := binary.Read(r, binary.LittleEndian, &docID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", docID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: docID,
})
case INO_EXT_TYPE_NAME:
n := make([]byte, hdr.XSize)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", n, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: strings.Trim(string(n[:]), "\x00"),
})
case INO_EXT_TYPE_PREV_FSIZE:
var size uint64
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", size, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: size,
})
case INO_EXT_TYPE_DSTREAM:
var dstream JDstreamT
if err := binary.Read(r, binary.LittleEndian, &dstream); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dstream, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dstream,
})
case INO_EXT_TYPE_DIR_STATS_KEY:
var dirStats j_dir_stats_val_t
if err := binary.Read(r, binary.LittleEndian, &dirStats); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dirStats, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dirStats,
})
case INO_EXT_TYPE_FS_UUID:
var uuid UUID
if err := binary.Read(r, binary.LittleEndian, &uuid); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", uuid, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: uuid,
})
case INO_EXT_TYPE_SPARSE_BYTES:
var bs uint64
if err := binary.Read(r, binary.LittleEndian, &bs); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", bs, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: bs,
})
case INO_EXT_TYPE_RDEV:
var rdev uint32
if err := binary.Read(r, binary.LittleEndian, &rdev); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", rdev, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: rdev,
})
case INO_EXT_TYPE_ORIG_SYNC_ROOT_ID:
var inodeNum uint64
if err := binary.Read(r, binary.LittleEndian, &inodeNum); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", inodeNum, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: inodeNum,
})
case INO_EXT_TYPE_RESERVED_6:
fallthrough
case INO_EXT_TYPE_RESERVED_9:
fallthrough
case INO_EXT_TYPE_RESERVED_12:
fallthrough
case INO_EXT_TYPE_FINDER_INFO:
fallthrough
case INO_EXT_TYPE_PURGEABLE_FLAGS:
fallthrough
default:
dat := make([]byte, hdr.XSize)
if err := binary.Read(r, binary.LittleEndian, &dat); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", n, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dat,
})
}
if ((8 - hdr.XSize) % 8) > 0 {
r.Seek(int64((8-hdr.XSize)%8), io.SeekCurrent) // 8 byte align
}
}
}
nent.Val = v
case APFS_TYPE_XATTR:
var v JXattrValT
if err := binary.Read(r, binary.LittleEndian, &v.Flags); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
if err := binary.Read(r, binary.LittleEndian, &v.DataLen); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
if v.Flags.DataEmbedded() {
dat := make([]byte, v.DataLen)
if err := binary.Read(r, binary.LittleEndian, &dat); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
v.Data = dat
} else {
var val uint64
if err := binary.Read(r, binary.LittleEndian, &val); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
v.Data = val
}
nent.Val = v
case APFS_TYPE_SIBLING_LINK:
var v SiblingValT
if err := binary.Read(r, binary.LittleEndian, &v.ParentID); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
if err := binary.Read(r, binary.LittleEndian, &v.NameLen); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
n := make([]byte, v.NameLen)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
v.Name = strings.Trim(string(n[:]), "\x00")
nent.Val = v
case APFS_TYPE_DSTREAM_ID:
var v j_dstream_id_val_t
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_CRYPTO_STATE:
var v j_crypto_val_t
if err := binary.Read(r, binary.LittleEndian, &v.RefCount); err != nil {
return fmt.Errorf("failed to read %T RefCount: %v", v, err)
}
if err := binary.Read(r, binary.LittleEndian, &v.State.wrapped_crypto_state_t); err != nil {
return fmt.Errorf("failed to read %T wrapped_crypto_state_t: %v", v, err)
}
v.State.PersistentKey = make([]byte, v.State.KeyLen)
if err := binary.Read(r, binary.LittleEndian, &v.State.PersistentKey); err != nil {
return fmt.Errorf("failed to read %T PersistentKey: %v", v, err)
}
nent.Val = v
case APFS_TYPE_FILE_EXTENT:
var v JFileExtentValT
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_DIR_REC:
var v JDrecVal
if err := binary.Read(r, binary.LittleEndian, &v.j_drec_val_t); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
if nent.Offset.(KVLocT).Val.Len != uint16(binary.Size(j_drec_val_t{})) {
if err := binary.Read(r, binary.LittleEndian, &v.blob.xf_blob_t); err != nil {
return fmt.Errorf("failed to read %T: %v", v.blob, err)
}
hdrs := make([]x_field_t, v.blob.XfNumExts)
if err := binary.Read(r, binary.LittleEndian, &hdrs); err != nil {
return fmt.Errorf("failed to read %T: %v", hdrs, err)
}
for _, hdr := range hdrs {
switch hdr.XType {
case INO_EXT_TYPE_SNAP_XID:
var snapXID XidT
if err := binary.Read(r, binary.LittleEndian, &snapXID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", snapXID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: snapXID,
})
case INO_EXT_TYPE_DELTA_TREE_OID:
var dtreeOID OidT
if err := binary.Read(r, binary.LittleEndian, &dtreeOID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dtreeOID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dtreeOID,
})
case INO_EXT_TYPE_DOCUMENT_ID:
var docID uint32
if err := binary.Read(r, binary.LittleEndian, &docID); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", docID, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: docID,
})
case INO_EXT_TYPE_NAME:
n := make([]byte, hdr.XSize)
if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", n, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: strings.Trim(string(n[:]), "\x00"),
})
case INO_EXT_TYPE_PREV_FSIZE:
var size uint64
if err := binary.Read(r, binary.LittleEndian, &size); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", size, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: size,
})
case INO_EXT_TYPE_DSTREAM:
var dstream JDstreamT
if err := binary.Read(r, binary.LittleEndian, &dstream); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dstream, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dstream,
})
case INO_EXT_TYPE_DIR_STATS_KEY:
var dirStats j_dir_stats_val_t
if err := binary.Read(r, binary.LittleEndian, &dirStats); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", dirStats, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dirStats,
})
case INO_EXT_TYPE_FS_UUID:
var uuid UUID
if err := binary.Read(r, binary.LittleEndian, &uuid); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", uuid, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: uuid,
})
case INO_EXT_TYPE_SPARSE_BYTES:
var bs uint64
if err := binary.Read(r, binary.LittleEndian, &bs); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", bs, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: bs,
})
case INO_EXT_TYPE_RDEV:
var rdev uint32
if err := binary.Read(r, binary.LittleEndian, &rdev); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", rdev, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: rdev,
})
case INO_EXT_TYPE_ORIG_SYNC_ROOT_ID:
var inodeNum uint64
if err := binary.Read(r, binary.LittleEndian, &inodeNum); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", inodeNum, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: inodeNum,
})
case INO_EXT_TYPE_RESERVED_6:
fallthrough
case INO_EXT_TYPE_RESERVED_9:
fallthrough
case INO_EXT_TYPE_RESERVED_12:
fallthrough
case INO_EXT_TYPE_FINDER_INFO:
fallthrough
case INO_EXT_TYPE_PURGEABLE_FLAGS:
fallthrough
default:
dat := make([]byte, hdr.XSize)
if err := binary.Read(r, binary.LittleEndian, &dat); err != nil {
return fmt.Errorf("failed to read xfield %T: %v", n, err)
}
v.Xfields = append(v.Xfields, Xfield{
x_field_t: hdr,
Field: dat,
})
}
if ((8 - hdr.XSize) % 8) > 0 {
r.Seek(int64((8-hdr.XSize)%8), io.SeekCurrent) // 8 byte align
}
}
}
nent.Val = v
case APFS_TYPE_DIR_STATS:
var v j_dir_stats_val_t
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_SNAP_NAME:
var v j_snap_name_val_t
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_SIBLING_MAP:
var v SiblingMapValT
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
case APFS_TYPE_FILE_INFO:
var v j_file_info_val_t
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return fmt.Errorf("failed to read %T: %v", v, err)
}
nent.Val = v
default:
return fmt.Errorf("got unsupported APFS type %s", nent.Hdr.GetType())
}
}
n.Entries = append(n.Entries, nent)
r.Seek(pos, io.SeekStart) // reset reader to right after we read the offsets
return nil
}
// GetOMapEntry returns the omap entry for a given oid
func (n *BTreeNodePhys) GetOMapEntry(r io.ReaderAt, oid OidT, maxXid XidT) (*OMapNodeEntry, error) {
var entIdx int
var tocEntry OMapNodeEntry
node := n
for {
// walk entries
for idx, entry := range node.Entries {
tocEntry = entry.(OMapNodeEntry)
if tocEntry.Key.Oid > oid || (tocEntry.Key.Oid == oid && tocEntry.Key.Xid > maxXid) {
// go back one entry
idx--
if idx < 0 {
return nil, fmt.Errorf("no matching records exist in this B-tree")
}
tocEntry = node.Entries[idx].(OMapNodeEntry)
break
}
}
// handle leaf
if node.IsLeaf() {
if tocEntry.Key.Oid != oid || tocEntry.Key.Xid > maxXid {
return nil, fmt.Errorf("no matching records exist in this B-tree")
}
return &tocEntry, nil
}
// get child
if o, err := ReadObj(r, uint64(tocEntry.PAddr)); err != nil {
return nil, fmt.Errorf("failed to read child node of entry %d", entIdx)
} else if child, ok := o.Body.(BTreeNodePhys); ok {
node = &child
}
}
}
// GetFSRecordsForOid returns an array of all the file-system records with a given Virtual OID from a given file-system root tree.
func (n *BTreeNodePhys) GetFSRecordsForOid(r io.ReaderAt, volFsRootNode BTreeNodePhys, oid OidT, maxXid XidT) (FSRecords, error) {
var records FSRecords
var tocEntry NodeEntry
treeHeight := volFsRootNode.Level + 1
descPath := make([]uint32, treeHeight)
node := volFsRootNode
for i := uint16(0); i < treeHeight; i++ {
for idx, entry := range node.Entries {
tocEntry = entry.(NodeEntry)
log.Debugf("%2d) %s", idx, tocEntry)
if node.IsLeaf() {
if tocEntry.Hdr.GetID() == uint64(oid) {
/**
* This is the first matching record, and `desc_path`
* now describes the path to it in the tree.
*/
break
}
if tocEntry.Hdr.GetID() > uint64(oid) {
/**
* If a record with the desired OID existed, we would've
* encountered it by now, so no such records exist.
*/
return nil, fmt.Errorf("no records exist for oid=%#x", oid)
}
descPath[i]++
continue
}
if tocEntry.Hdr.GetID() >= uint64(oid) {
if descPath[i] != 0 {
/**
* We've encountered the first entry in this non-leaf node
* whose key states an OID that is greater than or equal to the
* desired OID. Thus, if this *isn't* the first entry in this
* node, we descend the previous entry, as a record with the
* desired OID may exist in that sub-tree.
*/
descPath[i]--
idx--
tocEntry = node.Entries[idx].(NodeEntry)
break
}
if tocEntry.Hdr.GetID() == uint64(oid) {
/**
* However, if this *is* the first entry in this node, we only
* descend it if its key's stated OID matches the desired OID;
* else it exceeds the desired OID, and thus no records with the
* desired OID exist *in the whole tree*.
*/
break
}
return nil, fmt.Errorf("no such records exist for oid=%#x", oid)
}
descPath[i]++
}
/**
* One of the following is now true about `toc_entry`:
*
* (a) it points directly after the last TOC entry, in which case:
* (i) if this is a leaf node, we're looking at it because the
* first record in the *next* leaf node has the desired
* OID, or no records with the desired OID exist in the
* whole tree. We just break from the descent loop, and the
* walk loop will handle the current value of `desc_path`
* correctly.
* (ii) if this is a non-leaf node, we should descend the last
* entry.
* (b) it points to the correct entry to descend.
*/
/**
* If this is a leaf node, then we have finished descending the tree,
* and `desc_path` describes the path to the first record with the
* desired OID. We break from this while-loop (the descent loop) and
* enter the next while-loop (the walk loop), which should behave
* correctly based on the vale of `desc_path`.
*
* This handles case (a)(i) above, and also case (b) when we're looking
* at a leaf node.
*/
if node.IsLeaf() {
break
}
/** Convert case (a)(ii) to case (b) */
if descPath[i] >= node.Nkeys {
descPath[i] = node.Nkeys - 1
}
// get child node
childNodeOmapEntry, err := n.GetOMapEntry(r, OidT(tocEntry.Val.(uint64)), maxXid)
if err != nil {
return nil, fmt.Errorf("failed to get omap entry for oid %#x: %v", tocEntry.Val.(uint64), err)
}
nodeObj, err := ReadObj(r, childNodeOmapEntry.Val.Paddr)
if err != nil {
return nil, fmt.Errorf("failed to read child node: %v", err)
}
node = nodeObj.Body.(BTreeNodePhys)
}
for {
node = volFsRootNode
for i := uint16(0); i < treeHeight; i++ {
/**
* If `desc_path[i]` isn't a valid entry index in this node, that
* means we've already looked at all the entries in this node, and
* should look at the next node on this level.
*/
if descPath[i] >= node.Nkeys {
/**
* If this is a root node, then there are no other nodes on this
* level; we've gone through the whole tree, return the results.
*/
if node.IsRoot() {
return records, nil
}
/**
* Else, we adjust the value of `desc_path` so that it refers
* to the leftmost descendant of the next node on this level.
* We then break from this for-loop so that we loop inside the
* while-loop (the walk loop), which will result in us making
* a new descent from the root based on the new value of
* `desc_path`.
*/
descPath[i-1]++
for j := uint16(i); j < treeHeight; j++ {
descPath[j] = 0
}
break
}
/**
* Handle leaf nodes:
* The entry we're looking at is the next record, so add it to the
* records array, then adjust `desc_path` and loop.
*/
if node.IsLeaf() {
for idx := descPath[i]; idx < node.Nkeys; idx++ {
tocEntry = node.Entries[idx].(NodeEntry)
log.Debugf("%2d) %s", idx, tocEntry)
if tocEntry.Hdr.GetID() != uint64(oid) {
// This record doesn't have the right OID, so we must have
// found all of the relevant records; return the results
return records, nil
}
records = append(records, tocEntry)
descPath[i]++
}
/**
* We've run off the end of this leaf node, and `desc_path` now
* refers to the first record of the next leaf node.
* Loop so that we correctly make a new descent to that record
* from the root node.
*/
break
}
tocEntry = node.Entries[descPath[i]].(NodeEntry)
// get child node
childNodeOmapEntry, err := n.GetOMapEntry(r, OidT(tocEntry.Val.(uint64)), maxXid)
if err != nil {
return nil, fmt.Errorf("failed to get omap entry for oid %#x: %v", tocEntry.Val.(uint64), err)
}
nodeObj, err := ReadObj(r, childNodeOmapEntry.Val.Paddr)
if err != nil {
return nil, fmt.Errorf("failed to read child node: %v", err)
}
node = nodeObj.Body.(BTreeNodePhys)
}
}
} | types/btree.go | 0.582729 | 0.408395 | btree.go | starcoder |
package gorgonia
import (
"fmt"
"hash"
"log"
"math"
"github.com/chewxy/hm"
"github.com/chewxy/math32"
"gorgonia.org/tensor"
)
type Reduction uint
const (
ReductionMean Reduction = iota
ReductionSum
)
// CTCLoss - implements the ctc loss operation
// This is the implementation of the following paper: http://www.cs.toronto.edu/~graves/icml_2006.pdf
func CTCLoss(logProbs, targets, inputLengths, targetLengths *Node, reduction Reduction) (*Node, error) {
op := newCTCLossOp(logProbs.Dtype(), targets.Shape().Dims(), reduction)
output, err := ApplyOp(op, logProbs, targets, inputLengths, targetLengths)
if err != nil {
return nil, err
}
return output, nil
}
type ctcLossOp struct {
dtype tensor.Dtype
targetDims int
reduction Reduction
logAlpha *tensor.Dense
negLogLikelihood *tensor.Dense
}
func newCTCLossOp(dtype tensor.Dtype, targetDims int, reduction Reduction) *ctcLossOp {
op := &ctcLossOp{
dtype: dtype,
targetDims: targetDims,
reduction: reduction,
}
return op
}
func (op *ctcLossOp) Arity() int { return 4 }
func (op *ctcLossOp) ReturnsPtr() bool { return false }
func (op *ctcLossOp) CallsExtern() bool { return false }
func (op *ctcLossOp) WriteHash(h hash.Hash) {
fmt.Fprintf(h, "CTCLoss{}()")
}
func (op *ctcLossOp) Hashcode() uint32 { return simpleHash(op) }
func (op *ctcLossOp) String() string {
return fmt.Sprintf("CTCLoss{}()")
}
func (op *ctcLossOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
return tensor.Shape{}, nil
}
func (op *ctcLossOp) Type() hm.Type {
a := hm.TypeVariable('a')
b := makeTensorType(op.targetDims, tensor.Int)
c := makeTensorType(1, tensor.Int)
d := op.dtype
return hm.NewFnType(a, b, c, c, d)
}
func (op *ctcLossOp) OverwritesInput() int { return -1 }
func (op *ctcLossOp) getPrimeTarget(targets []int, offset, stride, idx int) int {
div, mod := divmod(idx, 2)
if mod == 0 {
return 0
}
return targets[offset+stride*div]
}
func (op *ctcLossOp) UsePreallocDo(prealloc Value, inputs ...Value) (Value, error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, err
}
logProbsT := inputs[0].(*tensor.Dense)
targetsT := inputs[1].(*tensor.Dense)
if targetsT.Dtype() != tensor.Int {
return nil, fmt.Errorf("invalid type %v for targets. it should be Int", targetsT.Dtype())
}
inputLengthsT := inputs[2].(*tensor.Dense)
if inputLengthsT.Dtype() != tensor.Int {
return nil, fmt.Errorf("invalid type %v for inputLengths. it should be Int", inputLengthsT.Dtype())
}
targetLengthsT := inputs[3].(*tensor.Dense)
if targetLengthsT.Dtype() != tensor.Int {
return nil, fmt.Errorf("invalid type %v for inputLengths. it should be Int", targetLengthsT.Dtype())
}
var err error
switch logProbsT.Dtype() {
case Float64:
err = op.f64s(logProbsT, prealloc.(*tensor.Dense), targetsT, inputLengthsT, targetLengthsT)
case Float32:
err = op.f32s(logProbsT, prealloc.(*tensor.Dense), targetsT, inputLengthsT, targetLengthsT)
default:
return nil, nyi("CTCLoss Do", logProbsT.Dtype())
}
return prealloc, err
}
func (op *ctcLossOp) f64s(logProbsT, prealloc, targetsT, inputLengthsT, targetLengthsT *tensor.Dense) error {
targets := targetsT.Ints()
targetLengths := targetLengthsT.Ints()
inputLengths := inputLengthsT.Ints()
inputSize := logProbsT.Shape()[0] // rows
batchSize := logProbsT.Shape()[1] // blocks
numLabels := logProbsT.Shape()[2] // columns
spatialDim := inputSize * numLabels
maxTargetLength := 0
targetStride := 0
targetBatchOffsets := make([]int, batchSize)
if targetsT.Dims() == 1 {
pos := 0
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = pos
pos += targetLengths[i]
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[0]
} else {
batchStride := targetsT.Strides()[0]
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = i * batchStride
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[1]
}
maxInputLength := logProbsT.Shape()[0]
for i := 0; i < batchSize; i++ {
if inputLengths[i] > maxInputLength {
return fmt.Errorf("expected inputLengths to have value at most %v, but got %v", maxInputLength, inputLengths[i])
}
}
negInf := math.Inf(-1)
logAlphaWidth := 2*maxTargetLength + 1
logAlpha := tensor.New(
tensor.Of(logProbsT.Dtype()),
tensor.WithShape(batchSize, logProbsT.Shape()[0], logAlphaWidth),
)
logAlphaSpatialDim := tensor.Shape(logAlpha.Shape()[1:]).TotalSize()
logAlphaView, err := logAlpha.Narrow(1, 0, 1)
if err != nil {
return err
}
if err := logAlphaView.Memset(negInf); err != nil {
return err
}
negLogLikelihood := tensor.New(
tensor.Of(logProbsT.Dtype()),
tensor.WithShape(batchSize),
)
lpp, err := tensor.Transpose(logProbsT, 1, 0, 2)
if err != nil {
return err
}
logAlphaA := logAlpha.Float64s()
lppA := lpp.(*tensor.Dense).Float64s()
negLogLikelihoodA := negLogLikelihood.Float64s()
runInParallel(0, batchSize, func(b int) {
inputLength := inputLengths[b]
targetLength := targetLengths[b]
targetWidth := 2*targetLength + 1
targetsOffset := targetBatchOffsets[b]
initialIndex := b * spatialDim
finalIndex := (b + 1) * spatialDim
lppSection := lppA[initialIndex:finalIndex]
initialLogAlphaIndex := b * logAlphaSpatialDim
finalLogAlphaIndex := (b + 1) * logAlphaSpatialDim
logAlphaSection := logAlphaA[initialLogAlphaIndex:finalLogAlphaIndex]
logAlphaSection[0] = lppSection[0]
if targetLength > 0 {
logAlphaSection[1] = lppSection[op.getPrimeTarget(targets, targetsOffset, targetStride, 1)]
}
for t := 1; t < inputLength; t++ {
for s := 0; s < targetWidth; s++ {
currentTargetPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, s)
i := (t-1)*(targetWidth) + s
la1 := logAlphaSection[i]
lamax := la1
var la2, la3 float64
if s > 0 {
la2 = logAlphaSection[i-1]
if la2 > lamax {
lamax = la2
}
} else {
la2 = negInf
}
if s > 1 && op.getPrimeTarget(targets, targetsOffset, targetStride, s-2) != currentTargetPrime {
la3 = logAlphaSection[i-2]
if la3 > lamax {
lamax = la3
}
} else {
la3 = negInf
}
if lamax == negInf {
lamax = 0
}
logAlphaSection[t*targetWidth+s] = math.Log(math.Exp(la1-lamax)+math.Exp(la2-lamax)+math.Exp(la3-lamax)) + lamax + lppSection[t*numLabels+currentTargetPrime]
}
}
if targetLength == 0 {
negLogLikelihoodA[b] = logAlphaSection[(inputLength-1)*targetWidth]
} else {
l1 := logAlphaSection[(inputLength-1)*targetWidth+targetLength*2]
l2 := logAlphaSection[(inputLength-1)*targetWidth+targetLength*2-1]
max := l1
if l2 > max {
max = l2
}
if max == negInf {
max = 0
}
logLikelihood := math.Log(math.Exp(l1-max)+math.Exp(l2-max)) + max
negLogLikelihoodA[b] = -logLikelihood
}
})
loss := 0.0
for i, v := range targetLengths {
if op.reduction == ReductionSum {
loss += negLogLikelihoodA[i]
} else {
if v < 1 {
v = 1
}
loss += negLogLikelihoodA[i] / float64(v)
}
}
if op.reduction == ReductionMean {
loss /= float64(len(targetLengths))
}
prealloc.Set(0, loss)
op.logAlpha = logAlpha
op.negLogLikelihood = negLogLikelihood
return nil
}
func (op *ctcLossOp) f32s(logProbsT, prealloc, targetsT, inputLengthsT, targetLengthsT *tensor.Dense) error {
targets := targetsT.Ints()
targetLengths := targetLengthsT.Ints()
inputLengths := inputLengthsT.Ints()
inputSize := logProbsT.Shape()[0] // rows
batchSize := logProbsT.Shape()[1] // blocks
numLabels := logProbsT.Shape()[2] // columns
spatialDim := inputSize * numLabels
maxTargetLength := 0
targetStride := 0
targetBatchOffsets := make([]int, batchSize)
if targetsT.Dims() == 1 {
pos := 0
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = pos
pos += targetLengths[i]
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[0]
} else {
batchStride := targetsT.Strides()[0]
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = i * batchStride
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[1]
}
maxInputLength := logProbsT.Shape()[0]
for i := 0; i < batchSize; i++ {
if inputLengths[i] > maxInputLength {
return fmt.Errorf("expected inputLengths to have value at most %v, but got %v", maxInputLength, inputLengths[i])
}
}
negInf := math32.Inf(-1)
logAlphaWidth := 2*maxTargetLength + 1
logAlpha := tensor.New(
tensor.Of(logProbsT.Dtype()),
tensor.WithShape(batchSize, logProbsT.Shape()[0], logAlphaWidth),
)
logAlphaSpatialDim := tensor.Shape(logAlpha.Shape()[1:]).TotalSize()
logAlphaView, err := logAlpha.Narrow(1, 0, 1)
if err != nil {
return err
}
if err := logAlphaView.Memset(negInf); err != nil {
return err
}
negLogLikelihood := tensor.New(
tensor.Of(logProbsT.Dtype()),
tensor.WithShape(batchSize),
)
lpp, err := tensor.Transpose(logProbsT, 1, 0, 2)
if err != nil {
return err
}
logAlphaA := logAlpha.Float32s()
lppA := lpp.(*tensor.Dense).Float32s()
negLogLikelihoodA := negLogLikelihood.Float32s()
runInParallel(0, batchSize, func(b int) {
inputLength := inputLengths[b]
targetLength := targetLengths[b]
targetWidth := 2*targetLength + 1
targetsOffset := targetBatchOffsets[b]
initialIndex := b * spatialDim
finalIndex := (b + 1) * spatialDim
lppSection := lppA[initialIndex:finalIndex]
initialLogAlphaIndex := b * logAlphaSpatialDim
finalLogAlphaIndex := (b + 1) * logAlphaSpatialDim
logAlphaSection := logAlphaA[initialLogAlphaIndex:finalLogAlphaIndex]
logAlphaSection[0] = lppSection[0]
if targetLength > 0 {
logAlphaSection[1] = lppSection[op.getPrimeTarget(targets, targetsOffset, targetStride, 1)]
}
for t := 1; t < inputLength; t++ {
for s := 0; s < targetWidth; s++ {
currentTargetPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, s)
i := (t-1)*(targetWidth) + s
la1 := logAlphaSection[i]
lamax := la1
var la2, la3 float32
if s > 0 {
la2 = logAlphaSection[i-1]
if la2 > lamax {
lamax = la2
}
} else {
la2 = negInf
}
if s > 1 && op.getPrimeTarget(targets, targetsOffset, targetStride, s-2) != currentTargetPrime {
la3 = logAlphaSection[i-2]
if la3 > lamax {
lamax = la3
}
} else {
la3 = negInf
}
if lamax == negInf {
lamax = 0
}
logAlphaSection[t*targetWidth+s] = math32.Log(math32.Exp(la1-lamax)+math32.Exp(la2-lamax)+math32.Exp(la3-lamax)) + lamax + lppSection[t*numLabels+currentTargetPrime]
}
}
if targetLength == 0 {
negLogLikelihoodA[b] = logAlphaSection[(inputLength-1)*targetWidth]
} else {
l1 := logAlphaSection[(inputLength-1)*targetWidth+targetLength*2]
l2 := logAlphaSection[(inputLength-1)*targetWidth+targetLength*2-1]
max := l1
if l2 > max {
max = l2
}
if max == negInf {
max = 0
}
logLikelihood := math32.Log(math32.Exp(l1-max)+math32.Exp(l2-max)) + max
negLogLikelihoodA[b] = -logLikelihood
}
})
loss := float32(0.0)
for i, v := range targetLengths {
if op.reduction == ReductionSum {
loss += negLogLikelihoodA[i]
} else {
if v < 1 {
v = 1
}
loss += negLogLikelihoodA[i] / float32(v)
}
}
if op.reduction == ReductionMean {
loss /= float32(len(targetLengths))
}
prealloc.Set(0, loss)
op.logAlpha = logAlpha
op.negLogLikelihood = negLogLikelihood
return nil
}
func (op *ctcLossOp) Do(inputs ...Value) (retVal Value, err error) {
logProbsT := inputs[0].(*tensor.Dense)
prealloc := tensor.New(
tensor.Of(logProbsT.Dtype()),
tensor.WithShape(),
)
return op.UsePreallocDo(prealloc, inputs...)
}
// SymDiff applies the diff op. Implementation for SDOp interface.
func (op *ctcLossOp) SymDiff(inputs Nodes, output, grad *Node) (Nodes, error) {
err := checkArity(op, len(inputs))
if err != nil {
return nil, err
}
logProbs := inputs[0]
targets := inputs[1]
inputLengths := inputs[2]
targetLengths := inputs[3]
diffOp := &ctcLossDiffOp{op}
ret, err := ApplyOp(diffOp, logProbs, targets, inputLengths, targetLengths, grad)
return Nodes{ret, nil, nil, nil, nil}, err
}
// DiffWRT is an implementation for the SDOp interface
func (op *ctcLossOp) DiffWRT(inputs int) []bool {
return []bool{true, false, false, false, false}
}
type ctcLossDiffOp struct {
*ctcLossOp
}
func (op *ctcLossDiffOp) Arity() int { return 5 }
func (op *ctcLossDiffOp) WriteHash(h hash.Hash) {
fmt.Fprintf(h, op.String())
}
func (op *ctcLossDiffOp) Hashcode() uint32 { return simpleHash(op) }
func (op *ctcLossDiffOp) String() string {
return fmt.Sprintf("ctcLossDiff{}()")
}
func (op *ctcLossDiffOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
s := inputs[0].(tensor.Shape).Clone()
return s, nil
}
func (op *ctcLossDiffOp) Type() hm.Type {
a := hm.TypeVariable('a')
b := makeTensorType(op.targetDims, tensor.Int)
c := makeTensorType(1, tensor.Int)
d := hm.TypeVariable('d')
return hm.NewFnType(a, b, c, c, d, a)
}
func (op *ctcLossDiffOp) OverwritesInput() int { return -1 }
func (op *ctcLossDiffOp) Do(inputs ...Value) (Value, error) {
input := inputs[0]
prealloc := tensor.New(tensor.WithShape(input.Shape().Clone()...), tensor.Of(input.Dtype()))
return op.UsePreallocDo(prealloc, inputs...)
}
func (op *ctcLossDiffOp) UsePreallocDo(prealloc Value, inputs ...Value) (Value, error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, err
}
logProbsT := inputs[0].(*tensor.Dense)
targetsT := inputs[1].(*tensor.Dense)
inputLengthsT := inputs[2].(*tensor.Dense)
targetLengthsT := inputs[3].(*tensor.Dense)
gradOutT := inputs[4]
switch logProbsT.Dtype() {
case Float64:
op.f64s(logProbsT, targetsT, inputLengthsT, targetLengthsT, prealloc.(*tensor.Dense), gradOutT.(*F64))
case Float32:
op.f32s(logProbsT, targetsT, inputLengthsT, targetLengthsT, prealloc.(*tensor.Dense), gradOutT.(*F32))
default:
log.Panicf("%T type is not supported for CTCLoss op", logProbsT.Dtype())
}
return prealloc, nil
}
func (op *ctcLossDiffOp) f64s(logProbsT, targetsT, inputLengthsT, targetLengthsT, gradT *tensor.Dense, gradOutT *F64) error {
targets := targetsT.Ints()
targetLengths := targetLengthsT.Ints()
inputLengths := inputLengthsT.Ints()
inputSize := logProbsT.Shape()[0] // rows
batchSize := logProbsT.Shape()[1] // blocks
numLabels := logProbsT.Shape()[2] // columns
spatialDim := inputSize * numLabels
logAlphaSpatialDim := tensor.Shape(op.logAlpha.Shape()[1:]).TotalSize()
maxTargetLength := 0
targetStride := 0
targetBatchOffsets := make([]int, batchSize)
if targetsT.Dims() == 1 {
pos := 0
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = pos
pos += targetLengths[i]
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[0]
} else {
batchStride := targetsT.Strides()[0]
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = i * batchStride
}
targetStride = targetsT.Strides()[1]
maxTargetLength = targetsT.Shape()[1]
}
negInf := math.Inf(-1)
if err := gradT.Memset(negInf); err != nil {
return err
}
logBetaT := tensor.New(tensor.WithShape(op.logAlpha.Shape()...), tensor.Of(op.logAlpha.Dtype()))
if err := logBetaT.Memset(negInf); err != nil {
return err
}
lppT, err := tensor.Transpose(logProbsT, 1, 0, 2) // NOTE: I think we can optimize memory usage here
if err != nil {
return err
}
err = gradT.T(1, 0, 2)
if err != nil {
return err
}
negLogLikelihood := op.negLogLikelihood.Float64s()
logBeta := logBetaT.Float64s()
logAlpha := op.logAlpha.Float64s()
lpp := lppT.(*tensor.Dense).Float64s()
// this can be parallelized
runInParallel(0, batchSize, func(b int) {
inputLength := inputLengths[b]
targetLength := targetLengths[b]
targetsOffset := targetBatchOffsets[b]
targetWidth := 2*targetLength + 1
initialIndex := b * spatialDim
finalIndex := (b + 1) * spatialDim
lppSection := lpp[initialIndex:finalIndex]
gradSlice, err := gradT.Slice(S(b))
if err != nil {
panic(err)
}
nll := negLogLikelihood[b]
initialLogAlphaIndex := b * logAlphaSpatialDim
finalLogAlphaIndex := (b + 1) * logAlphaSpatialDim
logAlphaSection := logAlpha[initialLogAlphaIndex:finalLogAlphaIndex]
logBetaSection := logBeta[initialLogAlphaIndex:finalLogAlphaIndex]
if inputLength > 0 {
logBetaSection[(inputLength-1)*targetWidth+2*targetLength] = lppSection[(inputLength-1)*numLabels]
gradSlice.SetAt(logAlphaSection[(inputLength-1)*targetWidth+2*targetLength]+logBetaSection[(inputLength-1)*targetWidth+2*targetLength], inputLength-1, 0)
if targetLength > 0 {
currentPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, 2*targetLength-1)
logBetaSection[(inputLength-1)*targetWidth+(2*targetLength-1)] = lppSection[(inputLength-1)*numLabels+currentPrime]
gradSlice.SetAt(logAlphaSection[(inputLength-1)*targetWidth+(2*targetLength-1)]+logBetaSection[(inputLength-1)*targetWidth+(2*targetLength-1)], (inputLength - 1), currentPrime)
}
for t := inputLength - 2; t >= 0; t-- {
for s := 2 * targetLength; s >= 0; s-- {
baseIndex := (t+1)*targetWidth + s
lb1 := logBetaSection[baseIndex]
lbmax := lb1
var lb2, lb3 float64
currentTargetPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, s)
if s < 2*targetLength {
lb2 = logBetaSection[baseIndex+1]
if lb2 > lbmax {
lbmax = lb2
}
} else {
lb2 = negInf
}
if s < 2*targetLength-1 && op.getPrimeTarget(targets, targetsOffset, targetStride, s+2) != currentTargetPrime {
lb3 = logBetaSection[baseIndex+2]
if lb3 > lbmax {
lbmax = lb3
}
} else {
lb3 = negInf
}
if lbmax == negInf {
lbmax = 0
}
logBetaSection[t*targetWidth+s] = math.Log(
math.Exp(lb1-lbmax)+math.Exp(lb2-lbmax)+math.Exp(lb3-lbmax)) + lbmax + lppSection[t*numLabels+currentTargetPrime]
logAlphaBeta := logAlphaSection[t*targetWidth+s] + logBetaSection[t*targetWidth+s]
lcab := op.getOrPanicF64(gradSlice, t, currentTargetPrime)
if lcab == negInf {
gradSlice.SetAt(logAlphaBeta, t, currentTargetPrime)
} else {
max := math.Max(lcab, logAlphaBeta)
v := math.Log(math.Exp(lcab-max)+math.Exp(logAlphaBeta-max)) + max
gradSlice.SetAt(
v,
t, currentTargetPrime,
)
}
}
}
for t := 0; t < inputLength; t++ {
for c := 0; c < numLabels; c++ {
res := op.getOrPanicF64(gradSlice, t, c)
lp := lppSection[t*numLabels+c]
v := (math.Exp(lp) - math.Exp(res+nll-lp)) * float64(*gradOutT)
gradSlice.SetAt(v, t, c)
}
}
}
})
gradT.UT()
return nil
}
func (op *ctcLossDiffOp) f32s(logProbsT, targetsT, inputLengthsT, targetLengthsT, gradT *tensor.Dense, gradOutT *F32) error {
targets := targetsT.Ints()
targetLengths := targetLengthsT.Ints()
inputLengths := inputLengthsT.Ints()
inputSize := logProbsT.Shape()[0] // rows
batchSize := logProbsT.Shape()[1] // blocks
numLabels := logProbsT.Shape()[2] // columns
spatialDim := inputSize * numLabels
logAlphaSpatialDim := tensor.Shape(op.logAlpha.Shape()[1:]).TotalSize()
maxTargetLength := 0
targetStride := 0
targetBatchOffsets := make([]int, batchSize)
if targetsT.Dims() == 1 {
pos := 0
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = pos
pos += targetLengths[i]
if maxTargetLength < targetLengths[i] {
maxTargetLength = targetLengths[i]
}
}
targetStride = targetsT.Strides()[0]
} else {
batchStride := targetsT.Strides()[0]
for i := 0; i < batchSize; i++ {
targetBatchOffsets[i] = i * batchStride
}
targetStride = targetsT.Strides()[1]
maxTargetLength = targetsT.Shape()[1]
}
negInf := math32.Inf(-1)
if err := gradT.Memset(negInf); err != nil {
return err
}
logBetaT := tensor.New(tensor.WithShape(op.logAlpha.Shape()...), tensor.Of(op.logAlpha.Dtype()))
if err := logBetaT.Memset(negInf); err != nil {
return err
}
lppT, err := tensor.Transpose(logProbsT, 1, 0, 2) // NOTE: I think we can optimize memory usage here
if err != nil {
return err
}
err = gradT.T(1, 0, 2)
if err != nil {
return err
}
negLogLikelihood := op.negLogLikelihood.Float32s()
logBeta := logBetaT.Float32s()
logAlpha := op.logAlpha.Float32s()
lpp := lppT.(*tensor.Dense).Float32s()
// this can be parallelized
runInParallel(0, batchSize, func(b int) {
inputLength := inputLengths[b]
targetLength := targetLengths[b]
targetsOffset := targetBatchOffsets[b]
targetWidth := 2*targetLength + 1
initialIndex := b * spatialDim
finalIndex := (b + 1) * spatialDim
lppSection := lpp[initialIndex:finalIndex]
gradSlice, err := gradT.Slice(S(b))
if err != nil {
panic(err)
}
nll := negLogLikelihood[b]
initialLogAlphaIndex := b * logAlphaSpatialDim
finalLogAlphaIndex := (b + 1) * logAlphaSpatialDim
logAlphaSection := logAlpha[initialLogAlphaIndex:finalLogAlphaIndex]
logBetaSection := logBeta[initialLogAlphaIndex:finalLogAlphaIndex]
if inputLength > 0 {
logBetaSection[(inputLength-1)*targetWidth+2*targetLength] = lppSection[(inputLength-1)*numLabels]
gradSlice.SetAt(logAlphaSection[(inputLength-1)*targetWidth+2*targetLength]+logBetaSection[(inputLength-1)*targetWidth+2*targetLength], inputLength-1, 0)
if targetLength > 0 {
currentPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, 2*targetLength-1)
logBetaSection[(inputLength-1)*targetWidth+(2*targetLength-1)] = lppSection[(inputLength-1)*numLabels+currentPrime]
gradSlice.SetAt(logAlphaSection[(inputLength-1)*targetWidth+(2*targetLength-1)]+logBetaSection[(inputLength-1)*targetWidth+(2*targetLength-1)], (inputLength - 1), currentPrime)
}
for t := inputLength - 2; t >= 0; t-- {
for s := 2 * targetLength; s >= 0; s-- {
baseIndex := (t+1)*targetWidth + s
lb1 := logBetaSection[baseIndex]
lbmax := lb1
var lb2, lb3 float32
currentTargetPrime := op.getPrimeTarget(targets, targetsOffset, targetStride, s)
if s < 2*targetLength {
lb2 = logBetaSection[baseIndex+1]
if lb2 > lbmax {
lbmax = lb2
}
} else {
lb2 = negInf
}
if s < 2*targetLength-1 && op.getPrimeTarget(targets, targetsOffset, targetStride, s+2) != currentTargetPrime {
lb3 = logBetaSection[baseIndex+2]
if lb3 > lbmax {
lbmax = lb3
}
} else {
lb3 = negInf
}
if lbmax == negInf {
lbmax = 0
}
logBetaSection[t*targetWidth+s] = math32.Log(
math32.Exp(lb1-lbmax)+math32.Exp(lb2-lbmax)+math32.Exp(lb3-lbmax)) + lbmax + lppSection[t*numLabels+currentTargetPrime]
logAlphaBeta := logAlphaSection[t*targetWidth+s] + logBetaSection[t*targetWidth+s]
lcab := op.getOrPanicF32(gradSlice, t, currentTargetPrime)
if lcab == negInf {
gradSlice.SetAt(logAlphaBeta, t, currentTargetPrime)
} else {
max := math32.Max(lcab, logAlphaBeta)
v := math32.Log(math32.Exp(lcab-max)+math32.Exp(logAlphaBeta-max)) + max
gradSlice.SetAt(
v,
t, currentTargetPrime,
)
}
}
}
for t := 0; t < inputLength; t++ {
for c := 0; c < numLabels; c++ {
res := op.getOrPanicF32(gradSlice, t, c)
lp := lppSection[t*numLabels+c]
v := (math32.Exp(lp) - math32.Exp(res+nll-lp)) * float32(*gradOutT)
gradSlice.SetAt(v, t, c)
}
}
}
})
gradT.UT()
return nil
}
func (op ctcLossDiffOp) getOrPanic(view tensor.View, coords ...int) interface{} {
v, err := view.At(coords...)
if err != nil {
panic(err)
}
return v
}
func (op ctcLossDiffOp) getOrPanicF64(view tensor.View, coords ...int) float64 {
return op.getOrPanic(view, coords...).(float64)
}
func (op ctcLossDiffOp) getOrPanicF32(view tensor.View, coords ...int) float32 {
return op.getOrPanic(view, coords...).(float32)
}
// ensure it complies with the Op interface
var (
_ Op = &ctcLossDiffOp{}
) | op_ctc_loss.go | 0.728748 | 0.429968 | op_ctc_loss.go | starcoder |
package dht
import (
denet "github.com/hlandau/degoutils/net"
"github.com/hlandau/goutils/clock"
"time"
)
// DHT configuration.
type Config struct {
// IP address to listen on. If blank, a port is chosen randomly.
Address string `usage:"Address to bind on"`
// Number of peers that DHT will try to find for every infohash being searched.
// Default: 50.
NumTargetPeers int `usage:"Maximum number of peers to retrieve for an infohash"`
// Minimum number of nodes. Default: 16.
MinNodes int `usage:"Minimum number of nodes"`
// Maximum nodes to store in routing table. Default: 100.
MaxNodes int `usage:"Maximum number of nodes to store in the routing table"`
// How often to ping nodes in the network to see if they are reachable. Default: 15 minutes.
CleanupPeriod time.Duration `usage:"How often to ping nodes to see if they are reachable"`
// How often to rotate announce_peer tokens. Default: 10 minutes.
TokenRotatePeriod time.Duration `usage:"How often to rotate announce_peer tokens"`
// ...
SearchRetryPeriod time.Duration `usage:"Search retry period"`
// Maximum packets per second to be processed. If negative, no limit is imposed. Default: 100.
RateLimit int64 `usage:"Maximum packets per second to be processed"`
// The maximum number of infohashes for whicha a peer list should be
// maintained. Default: 2048.
MaxInfoHashes int `usage:"Maximum number of infohashes to maintain a peer list for"`
// The maximum number of peers to track for each infohash. Default: 256.
MaxInfoHashPeers int `usage:"Maximum number of values to store for a given infohash"`
// The maximum number of pending queries before a node is considered unreachable.
MaxPendingQueries int `usage:"Maximum number of pending queries before a node is considered unreachable"`
// Node ID. A random Node ID is generated if this is left blank.
NodeID NodeID `usage:"Node ID"`
// If not set, request peers only of the address family (IPv4 or IPv6) used to make
// requests. If set, request peers of all supported address families (IPv4, IPv6).
AnyPeerAF bool `usage:"Return peers of all address families"`
// If set, this is used to get a listener instead of net.ListenUDP.
ListenFunc func(cfg *Config) (denet.UDPConn, error)
// If set, use this clock. Else use a realtime clock.
Clock clock.Clock
}
func (cfg *Config) setDefaults() {
if cfg.NumTargetPeers == 0 {
cfg.NumTargetPeers = 50
}
if cfg.MinNodes == 0 {
cfg.MinNodes = 16
}
if cfg.MaxNodes == 0 {
cfg.MaxNodes = 500
}
if cfg.CleanupPeriod == 0 {
cfg.CleanupPeriod = 15 * time.Minute
}
if cfg.TokenRotatePeriod == 0 {
cfg.TokenRotatePeriod = 5 * time.Minute
}
if cfg.SearchRetryPeriod == 0 {
cfg.SearchRetryPeriod = 15 * time.Second
}
if cfg.RateLimit == 0 {
cfg.RateLimit = 100
}
if cfg.MaxInfoHashes == 0 {
cfg.MaxInfoHashes = 2048
}
if cfg.MaxInfoHashPeers == 0 {
cfg.MaxInfoHashPeers = 256
}
if cfg.MaxPendingQueries == 0 {
cfg.MaxPendingQueries = 5
}
if !cfg.NodeID.Valid() {
cfg.NodeID = GenerateNodeID()
}
} | dht-config.go | 0.757705 | 0.414366 | dht-config.go | starcoder |
package live
import (
"encoding/binary"
"encoding/json"
"math"
"github.com/edwingeng/live/internal"
)
var (
Nil Data
)
type Data struct {
v interface{}
}
func (d Data) ToBool() bool {
return d.v.(*internal.Data).N == 1
}
func (d Data) ToInt() int {
return int(d.v.(*internal.Data).N)
}
func (d Data) ToInt8() int8 {
return int8(d.v.(*internal.Data).N)
}
func (d Data) ToInt16() int16 {
return int16(d.v.(*internal.Data).N)
}
func (d Data) ToInt32() int32 {
return int32(d.v.(*internal.Data).N)
}
func (d Data) ToInt64() int64 {
return d.v.(*internal.Data).N
}
func (d Data) ToUint() uint {
v, _ := binary.Uvarint(d.v.(*internal.Data).X)
return uint(v)
}
func (d Data) ToUint8() uint8 {
return uint8(d.v.(*internal.Data).N)
}
func (d Data) ToUint16() uint16 {
return uint16(d.v.(*internal.Data).N)
}
func (d Data) ToUint32() uint32 {
return uint32(d.v.(*internal.Data).N)
}
func (d Data) ToUint64() uint64 {
v, _ := binary.Uvarint(d.v.(*internal.Data).X)
return v
}
func (d Data) ToFloat32() float32 {
return math.Float32frombits(uint32(d.v.(*internal.Data).N))
}
func (d Data) ToFloat64() float64 {
v, _ := binary.Uvarint(d.v.(*internal.Data).X)
return math.Float64frombits(v)
}
func (d Data) ToString() string {
return string(d.v.(*internal.Data).X)
}
func (d Data) ToBytes() []byte {
if d.v != nil {
return d.v.(*internal.Data).X
} else {
return nil
}
}
func (d Data) V() interface{} {
return d.v
}
func (d Data) ToProtobufObj(obj interface {
Unmarshal([]byte) error
}) {
if len(d.v.(*internal.Data).X) == 0 {
return
}
err := obj.Unmarshal(d.v.(*internal.Data).X)
if err != nil {
panic(err)
}
}
func (d Data) ToJSONObj(obj interface{}) {
if d.v == nil {
return
}
if len(d.v.(*internal.Data).X) == 0 {
return
}
x, ok := obj.(interface {
UnmarshalJSON([]byte) error
})
if ok {
err := x.UnmarshalJSON(d.v.(*internal.Data).X)
if err != nil {
panic(err)
}
} else {
err := json.Unmarshal(d.v.(*internal.Data).X, obj)
if err != nil {
panic(err)
}
}
}
func (d Data) Persistent() (Persistent, bool) {
x, ok := d.v.(*internal.Data)
if ok {
if x != nil {
return Persistent{d: *x}, true
} else {
return Persistent{}, true
}
}
return Persistent{}, d.v == nil
} | data.go | 0.572006 | 0.502136 | data.go | starcoder |
package cmd
import "github.com/MakeNowJust/heredoc"
func mergeConflictHelp(name string) string {
helpTexts := map[string]string{
"*model.Bookmark": `Bookmarks are set for each publication (i.e. a Watchtower issue or a Bible translation)
and can be placed at ten different „slots“ (the colors you see in the app). A collision
happens, if two bookmarks are placed at the same slot of the same publication. You are
able to choose if the bookmark on the left or right should be added to the merged backup.
The title and snippet may help you identify the bookmark. Also have a look at the
„Related Location“ for a more detailed insight: „BookNumber“ and „ChapterNumber“
generally relate to a Bible book, while „DocumentID“ might be a Watchtower issue or a
different publication.`,
"*model.UserMarkBlockRange": `Markings collide if they overlap at at least one point. To figure out where a marking
is located, look at the Identifier, Start- and EndToken. The Identifier generally
represents a paragraph in a publication or the verse in a Bible chapter. Start-, and
EndToken represent the beginning and the end of a marking, where words and punctuation
marks are counted as tokens (e.g. the sentence “You are my witnesses,” contains five
tokens, as four words and one comma are counted). Note that a
marking can span multiple Identifiers. ColorIndex represents the color of the marking.
After you located the collision, you can choose between the left and the right marking.
In future versions you will be able to merge them into one big marking.`,
"*model.Note": `A note collides if it exists on both sides (so they must have been synced at least once)
and it differers in the title or content. It generally makes sense to choose the note
with the newest date.`,
"*model.InputField": `InputFields are used in interactive publications where you can enter custom notes,
tick boxes, etc. An example would be the "Enjoy Life Forever!" brochure.`,
}
if text, ok := helpTexts[name]; ok {
return heredoc.Doc(text)
}
return ""
} | cmd/help.go | 0.64646 | 0.564459 | help.go | starcoder |
package als
import (
"errors"
"fmt"
"math"
"math/rand"
"sort"
"strconv"
. "github.com/skelterjohn/go.matrix"
)
var (
NA = math.NaN()
)
func errcheck(err error) {
if err != nil {
fmt.Printf("Error occured: %v", err)
}
}
// Create the W matrix for the ALS algorithm..
// Returns binary matrix indicating presence of values.
func makeWeightMatrix(mat *DenseMatrix) *DenseMatrix {
values := mat.Array()
newvalues := make([]float64, len(values))
for i := 0; i < len(values); i++ {
if values[i] == 0.0 || math.IsNaN(values[i]) {
newvalues[i] = 0
} else {
newvalues[i] = 1
}
}
return MakeDenseMatrix(newvalues, mat.Rows(), mat.Cols())
}
// Wrapper for MakeDenseMatrix. Returns rating matrix
func MakeRatingMatrix(ratings []float64, rows, cols int) *DenseMatrix {
return MakeDenseMatrix(ratings, rows, cols)
}
// creates the confidence matrix for the implicit ALS algorithm
func makeCMatrix(mat *DenseMatrix) *DenseMatrix {
values := mat.Array()
newvalues := make([]float64, len(values))
for i := 0; i < len(values); i++ {
if values[i] == 0.0 || math.IsNaN(values[i]) {
newvalues[i] = 1
} else {
newvalues[i] = 1 + 40*values[i] // the value of 20 for confidence was
} // recommended by the aforementioned paper regarding implicit ALS
}
return MakeDenseMatrix(newvalues, mat.Rows(), mat.Cols())
}
// create X and Y matrices for the ALS algorithm.
func makeXY(mat *DenseMatrix, n_factors int, max_rating float64, seed int) (X, Y *DenseMatrix) {
rand.Seed(int64(seed))
rows := mat.Rows()
cols := mat.Cols()
X_data := make([]float64, rows*n_factors)
Y_data := make([]float64, cols*n_factors)
for i := 0; i < len(X_data); i++ {
X_data[i] = max_rating * rand.Float64()
}
for j := 0; j < len(Y_data); j++ {
Y_data[j] = max_rating * rand.Float64()
}
X = MakeDenseMatrix(X_data, rows, n_factors)
Y = MakeDenseMatrix(Y_data, n_factors, cols)
return
}
// adds up all the elements of the array
func sumMatrix(mat *DenseMatrix) (sum float64) {
values := mat.Array()
sum = float64(0)
for i := 0; i < len(values); i++ {
sum += values[i]
}
return
}
// Auxilliary function for Matrix Solver.
func swapCols(mat *DenseMatrix, i, j int) *DenseMatrix {
p := mat.Copy()
trans := p.Transpose()
trans.SwapRows(i, j)
toret := trans.Transpose()
return toret
}
// Scales matrix mat by weight.
func simpleTimes(mat, weight *DenseMatrix) *DenseMatrix {
if len(mat.Array()) != len(weight.Array()) {
return nil
}
weightArray := weight.Array()
matValues := mat.Array()
for i := 0; i < len(matValues); i++ {
matValues[i] = weightArray[i] * matValues[i]
}
return MakeDenseMatrix(matValues, mat.Rows(), mat.Cols())
}
// Gets the error for the alternating least squares algorithm. Used for Explicit ALS
func getErrorInline(W, q, X, Y *DenseMatrix) float64 {
Q := q.Copy()
dot, err := X.TimesDense(Y)
errcheck(err)
err = Q.SubtractDense(dot)
errcheck(err)
Prod := simpleTimes(Q, W)
tosum := simpleTimes(Prod, Prod)
sum := sumMatrix(tosum)
return sum
}
// a function to set the values for a given row
func setRow(mat *DenseMatrix, which int, row []float64) *DenseMatrix {
if mat.Cols() != len(row) {
fmt.Println("The row to set needs to be the same dimension as the matrix")
}
// iterate over columns to set the values for a selected row
for i := 0; i < mat.Cols(); i++ {
mat.Set(which, i, row[i])
}
return mat
}
// a function to set the values for a given column
func setCol(mat *DenseMatrix, which int, col []float64) *DenseMatrix {
if mat.Rows() != len(col) {
fmt.Println("The column to set needs to be the same dimension as the matrix")
}
// iterate over rows to set the values for a selected columns
for i := 0; i < mat.Rows(); i++ {
mat.Set(i, which, col[i])
}
return mat
}
// function to substract the minimum from all elements of the matrix
func matrixMinMinus(mat *DenseMatrix) *DenseMatrix {
values := mat.Array()
min := float64(100)
for i := 0; i < len(values); i++ {
if values[i] < min {
min = values[i]
}
}
for i := 0; i < len(values); i++ {
values[i] -= min
}
return MakeDenseMatrix(values, mat.Rows(), mat.Cols())
}
// returns the max value of the (dense)matrix
func matrixMax(mat *DenseMatrix) float64 {
values := mat.Array()
max := float64(0)
for i := 0; i < len(values); i++ {
if values[i] > max {
max = values[i]
}
}
return max
}
// Params: the user/product matrix, number of factors for recommendation, iterations, and lambda value for ALS.
// Returns the trained matrix with predictions for 0 valued entries, and the final error calculation (float64)
func Train(Q *DenseMatrix, n_factors, iterations int, lambda float64) (*DenseMatrix, float64) {
W := makeWeightMatrix(Q)
maxval := matrixMax(Q)
X, Y := makeXY(Q, n_factors, maxval, 47)
// to store error values
errors := make([]float64, 0)
for ii := 0; ii < iterations; ii++ {
// scaled identity matrix
I := Eye(n_factors)
I.Scale(lambda)
// solve for X
for u := 0; u < Q.Rows(); u++ {
weightedRow := W.GetRowVector(u).Array()
w_yt, _ := Diagonal(weightedRow).TimesDense(Y.Transpose())
y_wt_yt, _ := Y.TimesDense(w_yt)
y_wt_yt.AddDense(I)
y_wt_ytInv, _ := y_wt_yt.Inverse()
q_u := Q.GetRowVector(u).Transpose()
wu_qu, _ := Diagonal(weightedRow).TimesDense(q_u)
x_tosolve, _ := Y.TimesDense(wu_qu)
new_row, _ := y_wt_ytInv.TimesDense(x_tosolve)
X = setRow(X, u, new_row.Array())
}
// now alternate to solve for Y
for i := 0; i < Q.Cols(); i++ {
weightedCol := W.GetColVector(i).Transpose().Array()
w_x, _ := Diagonal(weightedCol).TimesDense(X)
x_t_w_x, _ := X.Transpose().TimesDense(w_x)
x_t_w_x.AddDense(I)
x_t_w_xInv, _ := x_t_w_x.Inverse()
q_i := Q.GetColVector(i)
wi_qi, _ := Diagonal(weightedCol).TimesDense(q_i)
y_tosolve, _ := X.Transpose().TimesDense(wi_qi)
new_col, _ := x_t_w_xInv.TimesDense(y_tosolve)
Y = setCol(Y, i, new_col.Array())
}
// Calculate the error values at each iteration
error_value := getErrorInline(W, Q, X, Y)
errors = append(errors, error_value)
}
fmt.Printf("\nFinal Error value of: %v\n", errors[len(errors)-1])
weighted_Qhat, _ := X.TimesDense(Y)
return weighted_Qhat, errors[len(errors)-1]
}
// Params: the rating matrix, number of factors, number of iterations, and lambda for building
// recommendation matrix.
// Returns the confidence matrix on a scale from 0 to 1.
func TrainImplicit(R *DenseMatrix, n_factors, iterations int, lambda float64) *DenseMatrix {
P := makeWeightMatrix(R)
C := makeCMatrix(R)
X, Y := makeXY(R, n_factors, 5, 47)
for ii := 0; ii < iterations; ii++ {
// scaled identity matrix
I := Eye(n_factors)
I.Scale(lambda)
// solve for X
for u := 0; u < C.Rows(); u++ {
weightedRow := C.GetRowVector(u).Array()
c_yt, _ := Diagonal(weightedRow).TimesDense(Y)
y_ct_yt, _ := Y.Transpose().TimesDense(c_yt)
y_ct_yt.AddDense(I)
y_ct_ytInv, _ := y_ct_yt.Inverse()
p_u := P.GetRowVector(u).Transpose()
cu_pu, _ := Diagonal(weightedRow).TimesDense(p_u)
x_tosolve, _ := Y.Transpose().TimesDense(cu_pu)
new_row, _ := y_ct_ytInv.TimesDense(x_tosolve)
X = setRow(X, u, new_row.Array())
}
// now alternate to solve for Y
for i := 0; i < C.Cols(); i++ {
weightedCol := C.GetColVector(i).Transpose().Array()
c_x, _ := Diagonal(weightedCol).TimesDense(X)
x_t_c_x, _ := X.Transpose().TimesDense(c_x)
x_t_c_x.AddDense(I)
x_t_c_xInv, _ := x_t_c_x.Inverse()
p_i := P.GetColVector(i)
ci_pi, _ := Diagonal(weightedCol).TimesDense(p_i)
y_tosolve, _ := X.Transpose().TimesDense(ci_pi)
new_col, _ := x_t_c_xInv.TimesDense(y_tosolve)
Y = setCol(Y, i, new_col.Array())
}
}
weighted_Qhat, _ := X.TimesDense(Y)
return weighted_Qhat
}
// Returns recommended value for a given user-product indices. Error if out of range.
func Predict(Qhat *DenseMatrix, user, product int) (float64, error) {
if user > Qhat.Rows() || product > Qhat.Cols() {
return 0.0, errors.New("User/Product index out of range")
}
return Qhat.Get(user, product), nil
}
func oppositeWeights(Q *DenseMatrix) *DenseMatrix {
mat := Q.Array()
for i := 0; i < len(mat); i++ {
if mat[i] > 0 {
mat[i] = 0
} else {
mat[i] = 1
}
}
return MakeDenseMatrix(mat, Q.Rows(), Q.Cols())
}
// looks at the model generated by ALS and makes a user/product prediction
// Returns best n recommendations for a user index in the matrix.
// If products is nil, returns top indices. Else returns names of top products.
func GetTopNRecommendations(Q, Qhat *DenseMatrix, user, n int, products []string) ([]string, error) {
qhat := Qhat.Copy()
inverseWeights := oppositeWeights(Q)
qhat = simpleTimes(qhat, inverseWeights)
if user > qhat.Rows() || n > qhat.Cols() {
return nil, errors.New("User/Product index out of range")
} else {
user_row := qhat.GetRowVector(user).Array()
productScores := make(map[float64]string, 0)
// make score - product map if product list is present. Else use indices.
if products != nil {
for idx, val := range user_row {
productScores[val] = products[idx]
}
} else {
for idx, val := range user_row {
productScores[val] = strconv.Itoa(idx)
}
}
// Sort user row
sort.Sort(sort.Reverse(sort.Float64Slice(user_row)))
// get top-N recommendations
var recommendations []string
for i := 0; i < n; i++ {
recommendations = append(recommendations, productScores[user_row[i]])
}
return recommendations, nil
}
} | plugins/data/learn/ml-filters-als/als.go | 0.661923 | 0.557905 | als.go | starcoder |
package main
import (
"github.com/kindermoumoute/adventofcode/pkg/execute"
)
var tests = execute.TestCases{
{
`light red bags contain 1 bright white bag, 2 muted yellow bags.
dark orange bags contain 3 bright white bags, 4 muted yellow bags.
bright white bags contain 1 shiny gold bag.
muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.
shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.
dark olive bags contain 3 faded blue bags, 4 dotted black bags.
vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.
faded blue bags contain no other bags.
dotted black bags contain no other bags.`,
`4`,
``,
},
{
`shiny gold bags contain 2 dark red bags.
dark red bags contain 2 dark orange bags.
dark orange bags contain 2 dark yellow bags.
dark yellow bags contain 2 dark green bags.
dark green bags contain 2 dark blue bags.
dark blue bags contain 2 dark violet bags.
dark violet bags contain no other bags.`,
``,
`126`,
},
{
puzzle,
`211`,
`12414`,
},
}
var puzzle = `shiny lime bags contain 3 muted magenta bags, 3 clear cyan bags.
shiny violet bags contain 1 faded brown bag, 1 dull red bag.
muted maroon bags contain 4 pale lime bags.
pale magenta bags contain 2 striped coral bags, 1 shiny orange bag, 3 vibrant white bags, 4 posh cyan bags.
vibrant crimson bags contain 4 bright white bags, 3 dark brown bags, 4 plaid crimson bags.
mirrored red bags contain 2 bright orange bags, 3 dull brown bags, 4 dotted brown bags.
muted red bags contain 2 bright green bags.
faded chartreuse bags contain 5 bright cyan bags.
wavy red bags contain 4 drab white bags, 1 plaid silver bag.
pale purple bags contain 4 muted yellow bags, 2 mirrored chartreuse bags, 5 mirrored purple bags, 2 mirrored red bags.
dull blue bags contain 4 dark brown bags, 2 faded blue bags, 4 dim aqua bags.
mirrored tomato bags contain 1 posh turquoise bag, 2 bright aqua bags.
clear lavender bags contain 3 plaid bronze bags, 4 faded plum bags, 2 muted coral bags, 1 posh chartreuse bag.
light cyan bags contain 1 plaid tan bag, 2 muted cyan bags, 3 dim cyan bags, 1 pale gray bag.
plaid lavender bags contain 4 bright cyan bags, 1 dim aqua bag, 3 muted orange bags.
dotted bronze bags contain 5 drab lime bags, 3 striped plum bags, 3 faded blue bags, 5 faded purple bags.
clear indigo bags contain 3 dotted purple bags.
vibrant cyan bags contain 4 dim tomato bags, 1 dull green bag, 5 light silver bags, 2 striped gold bags.
pale yellow bags contain no other bags.
drab gray bags contain 4 shiny maroon bags.
clear turquoise bags contain 3 dotted blue bags, 3 faded cyan bags.
plaid bronze bags contain 3 light tomato bags, 2 faded chartreuse bags.
mirrored turquoise bags contain 2 plaid purple bags, 5 mirrored tomato bags, 2 drab tan bags.
wavy turquoise bags contain 2 plaid salmon bags.
shiny yellow bags contain 2 striped aqua bags, 5 drab gray bags, 4 pale aqua bags, 5 dim purple bags.
clear magenta bags contain 1 striped brown bag, 1 dull black bag, 5 light lime bags.
plaid indigo bags contain 5 wavy purple bags, 2 pale blue bags.
plaid brown bags contain 4 striped salmon bags.
dull gold bags contain 2 mirrored green bags, 1 shiny coral bag, 4 shiny red bags.
dark white bags contain 3 muted black bags, 1 vibrant yellow bag, 4 dotted chartreuse bags, 3 mirrored white bags.
drab blue bags contain 3 wavy tomato bags.
dull teal bags contain 5 drab fuchsia bags, 4 dim black bags.
drab aqua bags contain 3 dark red bags.
faded gold bags contain 1 pale bronze bag, 4 dim gold bags, 1 vibrant aqua bag, 2 bright aqua bags.
drab black bags contain 3 light cyan bags.
dim purple bags contain 5 mirrored bronze bags, 3 shiny bronze bags, 3 shiny turquoise bags, 4 clear maroon bags.
dim lime bags contain 4 vibrant yellow bags.
dotted white bags contain 5 faded lime bags.
muted lavender bags contain 3 dotted bronze bags, 2 faded chartreuse bags, 5 drab gold bags, 5 dark white bags.
drab white bags contain 1 clear lavender bag, 3 posh maroon bags.
bright tan bags contain 5 mirrored gray bags, 4 posh plum bags, 4 dull brown bags.
pale beige bags contain 4 muted indigo bags.
bright turquoise bags contain 5 wavy violet bags, 4 wavy indigo bags, 2 faded beige bags, 2 dim yellow bags.
muted violet bags contain 4 drab lime bags.
posh chartreuse bags contain 5 wavy silver bags, 4 light aqua bags.
faded green bags contain 5 muted magenta bags, 4 dark fuchsia bags.
dull orange bags contain 2 mirrored tomato bags, 4 wavy orange bags.
posh red bags contain 5 dim chartreuse bags, 1 shiny aqua bag, 1 wavy black bag.
clear crimson bags contain 5 dotted tan bags, 2 wavy crimson bags, 4 dim orange bags, 5 drab turquoise bags.
striped lavender bags contain 5 plaid teal bags, 4 dull crimson bags, 4 posh lavender bags.
faded lavender bags contain 2 vibrant coral bags.
posh lavender bags contain 2 drab silver bags, 1 drab cyan bag.
plaid olive bags contain 5 vibrant yellow bags, 1 striped bronze bag.
light indigo bags contain 3 shiny cyan bags, 3 vibrant yellow bags.
light orange bags contain 2 dark indigo bags.
dim teal bags contain 3 bright lavender bags, 4 wavy tomato bags, 2 shiny gray bags, 5 bright blue bags.
dull tomato bags contain 5 drab lime bags, 3 dark olive bags, 4 drab turquoise bags.
muted indigo bags contain 5 posh chartreuse bags, 1 mirrored green bag, 3 dark brown bags, 1 dark orange bag.
wavy fuchsia bags contain 3 mirrored magenta bags, 5 drab fuchsia bags, 5 dull green bags.
dim violet bags contain 2 posh crimson bags.
faded lime bags contain no other bags.
wavy salmon bags contain 5 posh red bags.
drab cyan bags contain 3 dull indigo bags, 1 vibrant indigo bag.
striped purple bags contain 1 faded blue bag, 3 faded fuchsia bags, 3 pale maroon bags.
dim aqua bags contain no other bags.
pale olive bags contain 1 muted indigo bag.
striped tan bags contain 4 light coral bags, 4 dull violet bags, 3 dim purple bags, 5 dull yellow bags.
dull black bags contain 1 dotted beige bag.
bright lime bags contain 2 light yellow bags.
faded red bags contain 2 drab teal bags, 2 pale coral bags, 5 dotted black bags.
vibrant green bags contain 4 drab bronze bags.
posh gold bags contain 2 pale cyan bags, 3 clear lavender bags, 2 plaid bronze bags.
muted magenta bags contain 5 shiny blue bags, 1 faded olive bag, 4 drab brown bags, 3 dull violet bags.
muted lime bags contain 5 light blue bags, 1 vibrant lavender bag.
muted tan bags contain 3 vibrant coral bags, 5 muted coral bags, 3 light bronze bags.
drab teal bags contain 3 dotted fuchsia bags, 4 drab aqua bags, 3 dim aqua bags.
muted purple bags contain 1 drab tan bag.
dim brown bags contain 4 dotted tan bags.
drab lime bags contain 1 mirrored green bag, 5 clear lime bags, 3 posh yellow bags, 5 pale yellow bags.
wavy aqua bags contain 1 faded silver bag.
striped magenta bags contain 2 mirrored aqua bags, 1 dotted gold bag.
dull maroon bags contain 2 drab maroon bags, 3 shiny lavender bags.
dotted yellow bags contain 2 dark indigo bags, 3 shiny gold bags, 2 muted coral bags, 5 pale maroon bags.
shiny plum bags contain 5 vibrant coral bags, 3 dotted gray bags, 1 pale lime bag, 4 plaid green bags.
faded violet bags contain 1 dark crimson bag, 5 pale gray bags, 1 pale olive bag.
faded olive bags contain 4 wavy indigo bags.
drab green bags contain 1 vibrant yellow bag, 1 posh tomato bag, 1 dull yellow bag, 5 shiny bronze bags.
shiny black bags contain 3 clear gray bags, 4 dim tomato bags.
dull coral bags contain 1 pale cyan bag, 1 light brown bag.
mirrored coral bags contain 1 dark tomato bag, 3 vibrant coral bags, 3 posh lime bags, 3 pale fuchsia bags.
faded white bags contain 1 light silver bag, 3 striped turquoise bags, 3 dark green bags, 3 posh orange bags.
wavy orange bags contain 2 dotted indigo bags, 1 vibrant indigo bag, 4 dull teal bags, 3 striped gold bags.
plaid beige bags contain 4 pale blue bags.
plaid green bags contain 1 mirrored bronze bag, 3 mirrored purple bags, 5 shiny coral bags, 5 posh yellow bags.
mirrored tan bags contain 1 faded blue bag.
plaid violet bags contain 2 faded blue bags.
shiny coral bags contain 3 vibrant bronze bags, 5 dull salmon bags.
dim beige bags contain 3 plaid magenta bags, 2 light gold bags, 1 shiny blue bag, 5 bright teal bags.
pale coral bags contain 1 faded orange bag, 3 dark turquoise bags.
mirrored crimson bags contain 2 dull silver bags.
dim tan bags contain 3 light tomato bags, 4 dotted chartreuse bags.
striped cyan bags contain 1 faded violet bag, 3 dotted lavender bags, 4 light lavender bags, 1 drab fuchsia bag.
dull turquoise bags contain 3 bright teal bags, 1 faded blue bag, 5 bright magenta bags.
bright magenta bags contain 2 wavy indigo bags.
wavy magenta bags contain 2 dim cyan bags, 2 dim violet bags, 4 dark salmon bags, 2 vibrant bronze bags.
wavy lavender bags contain 4 pale cyan bags, 4 vibrant yellow bags, 1 vibrant white bag.
pale green bags contain 3 dark indigo bags, 3 shiny gray bags.
dark aqua bags contain 3 faded turquoise bags, 3 vibrant indigo bags, 3 dull salmon bags, 1 dotted gray bag.
dim gray bags contain 1 faded silver bag, 3 muted red bags, 2 wavy orange bags, 1 posh beige bag.
clear black bags contain 4 vibrant plum bags, 3 bright brown bags, 2 dark gray bags, 1 clear teal bag.
drab indigo bags contain 2 dim aqua bags, 5 vibrant blue bags, 5 dull salmon bags, 5 drab violet bags.
mirrored orange bags contain 5 posh olive bags, 5 dotted tan bags, 5 mirrored salmon bags, 4 posh red bags.
plaid fuchsia bags contain 4 drab green bags, 3 mirrored tomato bags, 5 light white bags, 5 muted cyan bags.
shiny teal bags contain 5 bright white bags, 5 mirrored red bags.
light maroon bags contain 1 drab cyan bag.
vibrant fuchsia bags contain 5 drab crimson bags.
faded gray bags contain 3 mirrored aqua bags.
wavy chartreuse bags contain 4 muted orange bags, 2 clear blue bags, 1 muted tan bag, 2 clear lime bags.
dark yellow bags contain 5 wavy orange bags, 4 mirrored beige bags.
wavy green bags contain 5 vibrant tan bags.
dotted magenta bags contain 2 shiny red bags.
faded crimson bags contain 4 muted gray bags, 3 dim black bags.
dull cyan bags contain 3 posh tomato bags, 3 drab indigo bags, 2 vibrant blue bags.
posh aqua bags contain 2 posh yellow bags.
muted white bags contain 1 faded tomato bag.
vibrant tomato bags contain 5 vibrant white bags.
dim black bags contain 2 drab indigo bags.
mirrored lime bags contain 4 dull brown bags.
drab plum bags contain 5 plaid green bags, 5 striped lime bags, 1 dotted teal bag.
dark olive bags contain 2 wavy plum bags.
pale lime bags contain 4 dark orange bags, 5 dim black bags, 1 dull white bag.
striped fuchsia bags contain 4 bright orange bags, 3 pale maroon bags.
plaid maroon bags contain 1 dull coral bag.
clear white bags contain 5 plaid lavender bags, 1 faded turquoise bag, 1 mirrored tomato bag, 5 faded silver bags.
dark tan bags contain 4 striped gold bags.
muted beige bags contain 3 dark gold bags.
faded cyan bags contain 5 vibrant yellow bags, 2 shiny plum bags.
drab beige bags contain 4 dotted orange bags, 2 dark black bags, 2 bright olive bags, 4 dark gold bags.
drab turquoise bags contain 2 clear blue bags, 2 muted cyan bags, 4 faded turquoise bags.
striped green bags contain 4 striped plum bags, 5 dark gold bags.
bright coral bags contain 4 posh teal bags, 3 shiny crimson bags, 5 dim magenta bags.
plaid plum bags contain 5 wavy teal bags, 3 mirrored beige bags, 3 faded silver bags.
drab red bags contain 3 light magenta bags, 1 drab salmon bag, 4 shiny tan bags.
dotted coral bags contain 1 faded black bag.
wavy coral bags contain 2 posh white bags, 1 shiny gold bag, 1 striped aqua bag.
bright aqua bags contain 1 dotted olive bag, 1 striped gold bag.
bright purple bags contain 1 vibrant purple bag, 2 clear orange bags.
vibrant lime bags contain 1 dotted turquoise bag, 5 dotted magenta bags, 5 light black bags.
pale indigo bags contain 5 vibrant chartreuse bags, 5 clear white bags, 1 light lime bag, 3 dull silver bags.
dull bronze bags contain 4 dark indigo bags, 3 plaid bronze bags, 2 pale yellow bags.
wavy plum bags contain 5 bright chartreuse bags, 5 pale maroon bags, 1 clear lime bag.
dull purple bags contain 4 muted gray bags.
drab brown bags contain 3 drab lime bags, 3 dull silver bags, 2 dark gold bags, 3 drab beige bags.
dotted crimson bags contain 1 dotted black bag.
bright violet bags contain 5 dull olive bags, 2 striped turquoise bags, 3 vibrant aqua bags, 4 clear maroon bags.
dark fuchsia bags contain 2 wavy purple bags, 4 pale tan bags, 2 vibrant coral bags, 5 dark brown bags.
wavy maroon bags contain 2 striped fuchsia bags.
light white bags contain 2 dull green bags.
dotted indigo bags contain 2 clear lavender bags, 4 shiny coral bags.
shiny turquoise bags contain 1 posh crimson bag, 1 posh salmon bag, 4 vibrant bronze bags.
dull fuchsia bags contain 1 mirrored red bag, 3 posh gray bags, 4 plaid maroon bags, 4 clear gold bags.
light magenta bags contain 3 plaid tan bags.
faded beige bags contain 5 bright indigo bags, 1 pale yellow bag, 2 vibrant yellow bags.
mirrored gold bags contain 1 mirrored blue bag, 2 posh fuchsia bags, 5 dark red bags.
light salmon bags contain 5 muted black bags, 5 dull blue bags, 2 light aqua bags, 1 pale tomato bag.
wavy gray bags contain 3 light red bags.
bright gray bags contain 4 dotted blue bags.
pale white bags contain 3 drab aqua bags, 3 wavy maroon bags, 4 shiny blue bags, 4 dotted lime bags.
dim turquoise bags contain 3 striped crimson bags, 2 faded silver bags.
clear silver bags contain 4 plaid gray bags, 1 dark black bag.
bright brown bags contain 5 faded chartreuse bags.
muted olive bags contain 1 dotted red bag, 1 posh green bag.
striped silver bags contain 3 dotted tomato bags, 1 plaid cyan bag, 4 clear white bags, 5 mirrored indigo bags.
drab yellow bags contain 5 wavy silver bags, 5 dark orange bags, 3 dark brown bags, 2 bright magenta bags.
vibrant red bags contain 5 vibrant bronze bags, 2 posh tomato bags, 3 dull lime bags, 2 striped violet bags.
dotted maroon bags contain 2 vibrant silver bags.
faded teal bags contain 4 clear lavender bags.
dim crimson bags contain 1 faded chartreuse bag.
faded orange bags contain 3 bright magenta bags, 4 mirrored brown bags.
dotted lime bags contain 4 light brown bags, 1 bright white bag, 5 dim lime bags.
posh turquoise bags contain 4 striped indigo bags, 2 dim white bags.
dark beige bags contain 1 dark olive bag.
dotted black bags contain 2 posh turquoise bags, 3 wavy indigo bags, 4 dotted violet bags.
dotted fuchsia bags contain 5 dark black bags, 1 clear lime bag.
plaid magenta bags contain 3 posh maroon bags, 4 drab green bags.
muted coral bags contain 5 light brown bags, 2 posh chartreuse bags, 1 vibrant tan bag.
striped yellow bags contain 1 light brown bag.
posh violet bags contain 3 bright magenta bags.
dark brown bags contain 2 faded blue bags, 2 dim aqua bags, 5 posh yellow bags, 5 drab violet bags.
drab fuchsia bags contain 3 shiny aqua bags, 2 plaid lavender bags, 1 muted cyan bag.
striped maroon bags contain 4 clear lime bags, 5 striped gold bags, 3 clear plum bags, 3 dull tan bags.
muted yellow bags contain 1 shiny turquoise bag.
bright silver bags contain 5 striped red bags, 4 dark aqua bags.
light beige bags contain 1 shiny gold bag, 4 dark orange bags.
light coral bags contain 3 clear beige bags, 4 shiny maroon bags.
plaid cyan bags contain 4 pale yellow bags, 1 drab beige bag, 1 bright chartreuse bag, 1 clear coral bag.
shiny tomato bags contain 3 muted indigo bags, 2 faded indigo bags.
dark maroon bags contain 1 posh white bag, 2 wavy aqua bags, 5 muted brown bags.
plaid teal bags contain 1 dull lime bag, 3 faded blue bags, 4 drab cyan bags, 3 clear cyan bags.
vibrant magenta bags contain 2 wavy tan bags.
pale tomato bags contain 2 bright silver bags, 1 dull teal bag, 5 dull lime bags, 5 muted aqua bags.
dim orange bags contain 1 vibrant white bag, 5 bright tomato bags.
plaid crimson bags contain 2 wavy brown bags, 2 striped maroon bags, 4 dark magenta bags.
posh beige bags contain 1 faded blue bag.
dotted gray bags contain 3 drab silver bags, 4 faded silver bags, 2 light coral bags.
shiny blue bags contain 1 bright magenta bag.
pale gold bags contain 1 light coral bag, 5 vibrant aqua bags, 2 wavy plum bags, 5 dim lavender bags.
striped chartreuse bags contain 3 wavy bronze bags.
vibrant lavender bags contain 3 clear bronze bags, 4 dull yellow bags.
drab tomato bags contain 1 mirrored aqua bag, 3 drab yellow bags, 2 muted white bags.
vibrant brown bags contain 3 pale white bags.
dark salmon bags contain 3 dull salmon bags, 5 drab violet bags, 4 striped indigo bags.
dotted tomato bags contain 5 muted fuchsia bags, 3 pale aqua bags, 1 dim aqua bag, 3 dull yellow bags.
wavy gold bags contain 2 light aqua bags, 3 dull white bags.
pale salmon bags contain 2 dotted lime bags, 2 shiny gold bags.
bright cyan bags contain no other bags.
clear maroon bags contain 3 mirrored white bags, 5 faded blue bags, 3 drab yellow bags, 4 light aqua bags.
dull indigo bags contain 3 vibrant indigo bags, 4 pale yellow bags.
plaid black bags contain 2 faded orange bags, 2 drab aqua bags.
striped salmon bags contain 5 drab violet bags, 3 dark brown bags, 4 dull white bags, 4 clear lime bags.
wavy bronze bags contain 4 wavy silver bags, 4 light bronze bags, 2 shiny coral bags.
dull lime bags contain 1 dull green bag, 3 dark orange bags, 4 shiny maroon bags.
drab chartreuse bags contain 2 clear lime bags, 3 dim cyan bags, 3 faded cyan bags.
dotted lavender bags contain 5 dull indigo bags.
striped aqua bags contain 4 dotted lime bags, 4 dotted brown bags.
plaid gold bags contain 2 dim lime bags, 1 dull bronze bag, 5 faded fuchsia bags, 2 drab yellow bags.
faded coral bags contain 1 vibrant green bag, 1 drab yellow bag, 4 wavy teal bags.
faded bronze bags contain 2 dim lime bags, 4 wavy magenta bags.
striped turquoise bags contain 5 mirrored magenta bags.
plaid yellow bags contain 4 mirrored maroon bags, 1 dim silver bag, 3 striped gold bags.
vibrant coral bags contain 2 mirrored white bags, 1 dull lime bag.
posh black bags contain 3 clear beige bags, 2 drab gold bags, 3 mirrored indigo bags, 1 dim black bag.
wavy beige bags contain 1 vibrant purple bag, 4 light green bags, 3 light red bags.
pale blue bags contain no other bags.
light silver bags contain 1 posh chartreuse bag, 5 vibrant tan bags.
mirrored teal bags contain 1 dark tomato bag, 1 wavy teal bag, 3 vibrant maroon bags, 5 pale red bags.
pale gray bags contain 3 mirrored bronze bags, 3 faded crimson bags, 3 dotted red bags, 2 striped yellow bags.
vibrant chartreuse bags contain 4 mirrored yellow bags.
clear gold bags contain 1 bright magenta bag, 3 dotted olive bags, 2 posh yellow bags, 2 dull blue bags.
wavy tomato bags contain 3 shiny coral bags, 4 posh chartreuse bags, 2 light aqua bags, 3 dark orange bags.
posh yellow bags contain 5 vibrant bronze bags, 5 faded lime bags, 4 posh chartreuse bags, 2 bright cyan bags.
dark lime bags contain 3 drab tomato bags.
pale crimson bags contain 2 pale maroon bags, 5 clear lime bags.
mirrored beige bags contain 5 clear maroon bags, 2 wavy maroon bags, 2 drab indigo bags.
light lime bags contain 1 light bronze bag.
vibrant silver bags contain 4 pale tan bags.
dull plum bags contain 3 vibrant yellow bags, 1 striped plum bag, 2 dim aqua bags.
mirrored chartreuse bags contain 3 wavy black bags.
plaid white bags contain 5 dark aqua bags, 1 muted orange bag.
bright fuchsia bags contain 5 vibrant tan bags.
drab gold bags contain 1 faded blue bag.
bright tomato bags contain 1 dim white bag, 1 drab yellow bag.
dotted chartreuse bags contain 4 dim aqua bags.
faded fuchsia bags contain 2 light tomato bags, 1 posh yellow bag, 4 faded chartreuse bags, 1 pale blue bag.
pale silver bags contain 1 drab orange bag, 5 clear salmon bags, 2 plaid violet bags.
mirrored purple bags contain 4 clear lime bags.
mirrored green bags contain 4 dim aqua bags.
vibrant violet bags contain 3 dark orange bags.
plaid blue bags contain 4 striped crimson bags, 2 vibrant lavender bags, 4 faded plum bags, 5 dull salmon bags.
muted orange bags contain 1 bright indigo bag, 4 posh yellow bags, 4 bright cyan bags.
vibrant blue bags contain no other bags.
light gray bags contain 2 striped green bags, 3 clear blue bags, 3 shiny coral bags.
dark plum bags contain 2 faded crimson bags, 2 dark salmon bags, 2 shiny gray bags.
drab violet bags contain no other bags.
dim chartreuse bags contain 3 dotted brown bags, 5 faded plum bags, 4 wavy tomato bags.
wavy cyan bags contain 5 faded aqua bags, 1 striped blue bag, 5 posh olive bags.
clear teal bags contain 1 mirrored olive bag, 5 dim tomato bags.
light violet bags contain 2 posh gray bags, 5 dotted cyan bags.
dark tomato bags contain 5 plaid white bags, 1 wavy indigo bag.
wavy purple bags contain 3 drab green bags.
pale black bags contain 3 drab indigo bags.
drab orange bags contain 5 striped turquoise bags.
mirrored white bags contain 4 dim aqua bags.
light purple bags contain 2 wavy green bags, 5 bright brown bags, 5 muted crimson bags, 3 dotted purple bags.
shiny orange bags contain 2 muted indigo bags.
bright green bags contain 3 mirrored tomato bags, 3 dim bronze bags.
light black bags contain 4 dotted yellow bags, 3 dark turquoise bags, 3 vibrant blue bags, 4 clear maroon bags.
shiny fuchsia bags contain 1 clear cyan bag, 1 striped silver bag, 2 bright black bags.
striped bronze bags contain 1 dotted lime bag, 4 dark chartreuse bags, 3 shiny bronze bags.
light tomato bags contain 5 dull salmon bags.
drab crimson bags contain 3 clear coral bags, 5 drab bronze bags, 3 clear black bags, 2 pale maroon bags.
vibrant aqua bags contain 2 dark black bags, 2 mirrored green bags.
clear bronze bags contain 2 mirrored aqua bags, 4 dark green bags, 5 dotted red bags.
bright chartreuse bags contain 1 dim lime bag.
mirrored fuchsia bags contain 1 faded chartreuse bag, 3 pale olive bags, 4 pale cyan bags, 2 muted beige bags.
dim plum bags contain 5 dotted bronze bags, 2 drab yellow bags, 5 shiny blue bags, 5 dotted cyan bags.
dark indigo bags contain 4 dark brown bags, 2 dull cyan bags, 4 faded chartreuse bags.
pale fuchsia bags contain 1 dim purple bag, 5 dark gray bags, 2 dim brown bags, 3 wavy tomato bags.
dull green bags contain 2 dark brown bags, 4 drab indigo bags, 1 mirrored green bag, 2 drab lime bags.
dotted aqua bags contain 2 plaid lavender bags.
striped white bags contain 2 drab cyan bags.
dim maroon bags contain 5 dull indigo bags, 3 pale aqua bags.
shiny olive bags contain 2 wavy aqua bags, 4 mirrored crimson bags.
dotted blue bags contain 4 bright brown bags, 5 dotted black bags, 2 faded teal bags, 3 pale yellow bags.
plaid salmon bags contain 5 dull purple bags, 4 faded lime bags, 2 striped aqua bags.
dotted gold bags contain 4 bright black bags.
dark black bags contain 2 dull blue bags.
posh cyan bags contain 5 shiny yellow bags.
posh green bags contain 4 plaid beige bags.
dim fuchsia bags contain 1 dull salmon bag, 1 dull olive bag, 5 shiny yellow bags.
wavy brown bags contain 1 clear plum bag.
plaid red bags contain 1 dim indigo bag, 4 muted blue bags.
light chartreuse bags contain 1 dotted yellow bag, 4 clear gold bags, 1 bright magenta bag, 5 dark tan bags.
vibrant white bags contain 1 muted gray bag.
light red bags contain 1 dark bronze bag, 3 shiny lavender bags.
light bronze bags contain 1 wavy silver bag, 3 plaid lavender bags, 4 drab violet bags, 5 mirrored green bags.
dark magenta bags contain 5 pale aqua bags.
striped gray bags contain 1 posh cyan bag.
dark cyan bags contain 2 drab fuchsia bags, 1 clear cyan bag, 2 plaid gold bags.
plaid silver bags contain 3 bright beige bags.
shiny gray bags contain 2 dull blue bags, 2 faded chartreuse bags.
shiny cyan bags contain 1 striped salmon bag, 4 vibrant tan bags.
plaid gray bags contain 5 wavy aqua bags, 5 vibrant yellow bags, 1 mirrored indigo bag, 1 faded silver bag.
drab salmon bags contain 2 muted aqua bags.
posh gray bags contain 3 posh tan bags, 4 wavy indigo bags, 5 dark gray bags.
shiny brown bags contain 3 muted gray bags, 2 muted tomato bags.
drab maroon bags contain 1 bright chartreuse bag, 2 dark aqua bags, 3 dim black bags, 5 wavy silver bags.
light yellow bags contain 5 striped purple bags, 1 faded fuchsia bag, 2 plaid gold bags, 2 dotted olive bags.
clear salmon bags contain 3 faded chartreuse bags, 5 posh salmon bags, 5 mirrored red bags.
bright orange bags contain 2 posh tomato bags.
clear purple bags contain 3 dim olive bags, 2 mirrored violet bags, 1 muted tomato bag.
dull brown bags contain 2 faded lime bags, 5 drab violet bags, 1 mirrored green bag.
vibrant salmon bags contain 5 muted green bags, 4 faded bronze bags, 1 vibrant indigo bag.
dull silver bags contain 2 light green bags.
vibrant beige bags contain 2 dull cyan bags.
muted salmon bags contain 3 bright aqua bags, 2 pale maroon bags, 1 light aqua bag.
clear tomato bags contain 5 clear lavender bags, 4 dull tan bags, 2 dotted turquoise bags.
striped brown bags contain 3 drab lavender bags, 5 clear lavender bags.
dotted turquoise bags contain 1 light tan bag, 2 dull tomato bags.
posh orange bags contain 1 mirrored cyan bag, 3 shiny cyan bags, 5 bright beige bags, 4 striped lime bags.
dull olive bags contain 5 light silver bags, 3 wavy olive bags, 3 bright magenta bags, 4 mirrored bronze bags.
posh maroon bags contain 2 striped violet bags, 3 plaid lavender bags, 2 clear beige bags.
pale chartreuse bags contain 1 faded lime bag, 2 light aqua bags, 1 muted coral bag.
dim magenta bags contain 1 faded chartreuse bag.
striped tomato bags contain 5 plaid lavender bags, 1 posh beige bag, 1 clear lavender bag, 4 muted indigo bags.
shiny aqua bags contain 1 wavy gold bag, 1 plaid gold bag.
posh magenta bags contain 3 shiny beige bags, 3 clear gold bags.
muted black bags contain 5 dull yellow bags, 4 faded beige bags.
plaid tomato bags contain 3 dotted crimson bags, 3 shiny tan bags.
dim red bags contain 4 light beige bags, 5 shiny gold bags, 5 posh blue bags, 4 dotted indigo bags.
drab lavender bags contain 4 striped indigo bags, 3 vibrant tan bags, 3 plaid aqua bags, 3 plaid bronze bags.
vibrant orange bags contain 3 dotted lavender bags, 1 posh purple bag, 5 dull cyan bags.
light lavender bags contain 4 drab cyan bags, 5 posh maroon bags, 3 dotted red bags.
dark red bags contain 4 faded plum bags.
plaid chartreuse bags contain 1 posh blue bag.
clear tan bags contain 2 dull purple bags, 2 plaid purple bags.
muted bronze bags contain 2 plaid salmon bags, 2 muted crimson bags, 4 dotted olive bags.
pale lavender bags contain 1 vibrant plum bag, 5 vibrant yellow bags.
posh crimson bags contain 2 light brown bags.
dark blue bags contain 5 mirrored tomato bags, 1 drab coral bag, 3 wavy purple bags.
dim silver bags contain 5 drab yellow bags, 5 posh purple bags, 3 light tomato bags, 3 wavy blue bags.
faded maroon bags contain 3 dark salmon bags, 3 faded aqua bags, 1 clear olive bag, 2 clear brown bags.
dark bronze bags contain 3 dim white bags, 3 bright cyan bags, 4 clear olive bags, 2 faded crimson bags.
pale orange bags contain 1 light yellow bag, 5 bright olive bags, 2 pale olive bags.
bright maroon bags contain 3 bright white bags, 1 dotted orange bag.
dull yellow bags contain 5 light bronze bags, 4 faded beige bags, 2 dark orange bags, 2 dull cyan bags.
wavy violet bags contain 2 dim aqua bags, 5 posh chartreuse bags.
dull beige bags contain 1 shiny bronze bag, 1 striped crimson bag, 5 plaid indigo bags.
clear yellow bags contain 3 dim brown bags, 4 dotted lavender bags.
clear red bags contain 3 shiny indigo bags, 1 vibrant plum bag, 1 dim fuchsia bag, 5 striped teal bags.
dotted silver bags contain 5 wavy bronze bags, 4 vibrant cyan bags, 2 dull blue bags, 2 posh yellow bags.
shiny maroon bags contain 3 muted indigo bags, 5 light white bags, 3 posh yellow bags, 4 posh tomato bags.
dark lavender bags contain 1 vibrant tan bag, 5 plaid lime bags.
vibrant plum bags contain 1 faded teal bag, 5 shiny plum bags, 3 bright fuchsia bags, 1 shiny coral bag.
bright beige bags contain 2 mirrored green bags.
clear beige bags contain 1 posh chartreuse bag, 4 drab cyan bags, 3 light beige bags.
mirrored yellow bags contain 3 faded beige bags, 2 shiny cyan bags, 2 wavy silver bags, 2 dull yellow bags.
shiny lavender bags contain 4 clear cyan bags.
muted cyan bags contain 5 posh tomato bags, 2 drab gray bags, 1 dull indigo bag, 3 pale blue bags.
light teal bags contain 3 dull yellow bags, 2 striped fuchsia bags.
vibrant black bags contain 3 bright white bags.
light turquoise bags contain 4 muted indigo bags, 3 mirrored silver bags, 5 dark tomato bags.
light gold bags contain 5 mirrored magenta bags.
dull lavender bags contain 2 wavy chartreuse bags, 3 dull crimson bags, 3 pale tomato bags.
light olive bags contain 4 dark orange bags, 4 clear olive bags.
pale red bags contain 3 plaid lavender bags, 4 plaid beige bags, 1 plaid coral bag, 1 shiny chartreuse bag.
wavy lime bags contain 2 muted coral bags, 2 clear teal bags, 3 dull maroon bags, 4 dim lime bags.
pale bronze bags contain 4 vibrant cyan bags.
wavy tan bags contain 3 drab lavender bags, 2 dotted aqua bags, 2 bright white bags.
faded aqua bags contain 4 wavy black bags.
mirrored maroon bags contain 2 bright white bags, 4 dotted maroon bags, 5 light coral bags, 5 striped turquoise bags.
striped teal bags contain 4 bright chartreuse bags, 3 striped indigo bags, 5 dark lavender bags, 4 posh white bags.
striped coral bags contain 2 striped lime bags, 2 wavy purple bags, 1 striped plum bag.
mirrored brown bags contain 1 faded lime bag, 5 drab indigo bags, 4 bright white bags.
faded tomato bags contain 1 dim indigo bag, 5 shiny plum bags, 1 drab yellow bag, 4 drab indigo bags.
shiny tan bags contain 3 dark turquoise bags, 3 muted aqua bags.
posh tomato bags contain no other bags.
clear chartreuse bags contain 4 shiny indigo bags, 2 dim cyan bags.
bright olive bags contain 2 faded blue bags.
dull tan bags contain 4 striped red bags.
muted aqua bags contain 3 dark black bags, 4 faded crimson bags, 2 plaid white bags, 3 bright black bags.
shiny red bags contain 1 dull blue bag, 2 bright white bags.
bright white bags contain 2 faded blue bags.
dotted salmon bags contain 1 dotted indigo bag.
faded turquoise bags contain 5 dark orange bags.
posh plum bags contain 5 faded white bags.
wavy olive bags contain 5 light aqua bags, 1 pale lavender bag, 5 pale green bags.
light crimson bags contain 1 muted indigo bag, 2 plaid beige bags.
mirrored gray bags contain 1 posh lavender bag, 5 wavy indigo bags, 4 dotted crimson bags.
striped crimson bags contain 2 striped salmon bags, 5 mirrored magenta bags, 4 drab fuchsia bags.
posh olive bags contain 4 posh fuchsia bags, 1 drab brown bag, 4 dotted red bags.
clear brown bags contain 1 striped plum bag.
dark coral bags contain 1 shiny gold bag, 2 faded turquoise bags.
mirrored violet bags contain 1 dotted gold bag, 3 striped salmon bags, 3 faded crimson bags.
mirrored black bags contain 5 dull cyan bags, 3 wavy silver bags, 1 posh fuchsia bag.
bright blue bags contain 3 shiny coral bags.
light blue bags contain 1 striped indigo bag, 4 dark aqua bags, 3 mirrored tomato bags, 2 vibrant blue bags.
dark gray bags contain 2 posh chartreuse bags, 2 mirrored tan bags.
wavy silver bags contain no other bags.
dark crimson bags contain 5 mirrored blue bags, 4 drab green bags, 5 plaid purple bags, 4 clear beige bags.
wavy indigo bags contain 4 faded lime bags, 4 mirrored green bags, 2 posh tomato bags.
shiny purple bags contain 1 pale gold bag, 1 dull tomato bag.
dotted olive bags contain 1 drab cyan bag, 4 shiny coral bags.
faded plum bags contain 5 dotted cyan bags.
striped orange bags contain 2 dotted purple bags, 2 dotted indigo bags.
dim tomato bags contain 5 mirrored green bags.
shiny gold bags contain 1 dull white bag, 4 dark orange bags.
posh coral bags contain 2 pale olive bags, 5 clear gold bags, 5 posh turquoise bags, 5 wavy olive bags.
muted crimson bags contain 3 dim black bags, 1 vibrant bronze bag, 3 light black bags.
mirrored magenta bags contain 5 wavy silver bags, 5 shiny red bags, 5 pale black bags.
dull white bags contain 5 bright indigo bags, 3 posh tomato bags, 2 clear lime bags, 5 drab lime bags.
pale tan bags contain 5 wavy purple bags, 5 muted orange bags, 5 dark red bags.
muted tomato bags contain 1 dim maroon bag, 2 dull yellow bags, 3 vibrant bronze bags, 5 dull blue bags.
pale cyan bags contain 2 wavy violet bags, 1 clear maroon bag, 2 bright orange bags.
posh white bags contain 4 dim aqua bags, 5 posh chartreuse bags, 4 drab gold bags.
wavy crimson bags contain 2 posh maroon bags, 5 clear cyan bags, 1 shiny maroon bag, 2 plaid bronze bags.
drab purple bags contain 1 shiny gold bag, 5 dotted bronze bags, 4 drab gray bags.
dark green bags contain 3 bright chartreuse bags, 3 pale green bags, 5 dotted crimson bags, 2 clear plum bags.
dim cyan bags contain 2 shiny gold bags, 3 light silver bags, 2 vibrant blue bags.
pale maroon bags contain 5 mirrored brown bags, 3 vibrant indigo bags.
vibrant turquoise bags contain 3 muted cyan bags, 3 dull white bags.
dim olive bags contain 5 drab salmon bags.
bright salmon bags contain 3 striped turquoise bags, 2 dark salmon bags.
faded black bags contain 2 striped plum bags, 3 muted indigo bags.
clear aqua bags contain 2 vibrant bronze bags, 1 vibrant indigo bag, 4 mirrored tomato bags.
muted turquoise bags contain 3 muted cyan bags, 2 light brown bags, 4 light violet bags, 1 posh salmon bag.
shiny green bags contain 2 posh tomato bags.
mirrored indigo bags contain 3 clear lavender bags, 3 muted magenta bags, 3 posh yellow bags.
dim white bags contain 1 clear gold bag.
dark teal bags contain 4 light teal bags, 3 mirrored aqua bags, 5 faded teal bags.
wavy yellow bags contain 2 dull chartreuse bags, 5 dull yellow bags, 3 vibrant gold bags.
light fuchsia bags contain 5 plaid white bags, 2 mirrored magenta bags, 5 striped turquoise bags, 5 light bronze bags.
dotted plum bags contain 1 muted orange bag.
dark purple bags contain 3 dark brown bags.
vibrant yellow bags contain 4 posh chartreuse bags, 1 vibrant bronze bag.
bright gold bags contain 4 dotted tomato bags.
faded brown bags contain 1 clear cyan bag, 4 faded purple bags.
dotted orange bags contain 5 vibrant tan bags.
light green bags contain 3 muted orange bags, 4 muted gray bags, 3 faded silver bags, 3 shiny blue bags.
dark silver bags contain 5 dark tan bags, 4 light silver bags.
vibrant teal bags contain 3 shiny bronze bags, 5 mirrored green bags, 3 plaid aqua bags, 1 bright olive bag.
mirrored plum bags contain 1 dark crimson bag, 2 striped gray bags, 3 posh white bags.
striped red bags contain 1 dotted black bag, 1 mirrored magenta bag, 3 shiny blue bags.
shiny beige bags contain 5 striped salmon bags, 2 bright cyan bags, 4 striped crimson bags.
clear orange bags contain 1 faded chartreuse bag.
striped beige bags contain 2 plaid green bags, 1 bright turquoise bag, 1 drab chartreuse bag.
muted fuchsia bags contain 4 dotted violet bags, 1 vibrant silver bag, 1 shiny cyan bag, 1 vibrant yellow bag.
dim lavender bags contain 4 muted cyan bags, 1 striped violet bag.
dull red bags contain 5 posh bronze bags, 3 clear turquoise bags, 1 bright green bag, 1 vibrant white bag.
mirrored blue bags contain 5 faded olive bags, 5 light aqua bags.
bright crimson bags contain 1 posh olive bag, 5 faded white bags.
drab silver bags contain 1 muted orange bag, 3 dull green bags, 3 dim aqua bags, 2 striped fuchsia bags.
clear fuchsia bags contain 4 drab gold bags.
vibrant olive bags contain 2 light brown bags, 1 vibrant red bag.
dim salmon bags contain 3 dark red bags, 5 dark lavender bags, 2 dotted turquoise bags, 2 light magenta bags.
dull crimson bags contain 5 vibrant silver bags.
wavy blue bags contain 5 faded turquoise bags, 4 clear beige bags, 4 light green bags, 5 dark gray bags.
mirrored aqua bags contain 4 plaid bronze bags, 2 light coral bags, 4 faded orange bags, 5 posh tomato bags.
dim blue bags contain 1 bright silver bag, 4 bright cyan bags.
muted green bags contain 4 dotted tomato bags.
pale violet bags contain 3 plaid fuchsia bags, 3 light coral bags, 4 dark gold bags.
vibrant purple bags contain 3 vibrant white bags, 1 posh beige bag.
light tan bags contain 4 pale maroon bags, 4 muted fuchsia bags, 3 mirrored aqua bags.
dotted green bags contain 4 faded green bags, 2 striped black bags, 4 dull brown bags, 3 faded aqua bags.
bright bronze bags contain 2 drab bronze bags, 2 pale beige bags.
dull salmon bags contain 1 faded blue bag, 2 wavy silver bags, 3 posh chartreuse bags.
pale teal bags contain 2 dotted beige bags.
vibrant indigo bags contain 3 pale yellow bags, 4 vibrant bronze bags, 4 bright cyan bags.
shiny silver bags contain 2 faded brown bags, 2 dotted lime bags, 5 faded chartreuse bags.
bright red bags contain 1 posh turquoise bag, 4 clear aqua bags.
posh teal bags contain 2 pale beige bags.
clear lime bags contain 4 vibrant blue bags, 2 wavy silver bags, 5 pale yellow bags.
faded purple bags contain 4 faded black bags.
plaid aqua bags contain 4 bright cyan bags, 2 pale black bags, 3 dull salmon bags.
dark violet bags contain 4 muted green bags, 2 dotted bronze bags.
dull aqua bags contain 5 posh aqua bags, 2 clear plum bags, 2 dim maroon bags.
shiny chartreuse bags contain 4 dim turquoise bags, 2 posh fuchsia bags, 3 dark blue bags, 4 shiny aqua bags.
pale plum bags contain 4 drab black bags, 4 vibrant purple bags, 1 muted turquoise bag.
shiny indigo bags contain 2 wavy magenta bags.
pale brown bags contain 3 posh purple bags, 5 light aqua bags, 3 striped maroon bags.
posh tan bags contain 4 striped salmon bags, 4 dark brown bags.
plaid lime bags contain 3 drab green bags.
clear olive bags contain 4 dark black bags, 4 drab indigo bags, 3 clear lime bags.
striped olive bags contain 3 bright plum bags, 5 dotted gray bags.
plaid turquoise bags contain 4 mirrored magenta bags, 3 clear lime bags, 5 dark turquoise bags, 4 dotted olive bags.
dotted violet bags contain 4 striped fuchsia bags, 4 wavy tomato bags, 3 dim white bags, 2 clear gold bags.
muted silver bags contain 5 plaid tan bags, 2 dim olive bags, 4 dull cyan bags, 5 posh violet bags.
striped blue bags contain 1 striped chartreuse bag.
muted plum bags contain 2 posh tomato bags, 4 faded lime bags, 5 dull bronze bags, 5 wavy lime bags.
faded yellow bags contain 2 dotted tomato bags, 3 muted yellow bags, 1 dim bronze bag.
dull chartreuse bags contain 2 striped red bags.
dotted tan bags contain 4 light aqua bags.
bright indigo bags contain 2 posh tomato bags, 5 vibrant indigo bags, 2 posh chartreuse bags, 1 wavy silver bag.
bright plum bags contain 3 light green bags, 4 wavy gray bags, 3 mirrored red bags.
wavy teal bags contain 4 vibrant blue bags, 4 posh turquoise bags.
striped indigo bags contain 4 shiny gold bags, 3 dim aqua bags, 5 pale black bags.
posh brown bags contain 4 muted crimson bags.
dotted teal bags contain 4 bright beige bags, 3 posh olive bags, 2 dull coral bags.
dim yellow bags contain 2 shiny gray bags, 3 faded fuchsia bags, 3 wavy tomato bags, 2 light brown bags.
faded magenta bags contain 5 pale silver bags.
dull magenta bags contain 5 dull fuchsia bags, 1 drab green bag, 1 wavy red bag, 1 wavy fuchsia bag.
faded salmon bags contain 2 faded gray bags.
dim bronze bags contain 4 dark gold bags, 3 bright orange bags, 2 striped indigo bags.
faded tan bags contain 2 striped salmon bags, 5 muted violet bags, 4 dotted violet bags, 4 light green bags.
posh salmon bags contain 1 dull green bag, 2 bright cyan bags, 1 mirrored bronze bag.
light brown bags contain 3 posh chartreuse bags, 5 mirrored bronze bags.
posh silver bags contain 1 dim yellow bag, 1 clear tan bag.
muted brown bags contain 5 dotted maroon bags, 4 shiny yellow bags, 5 dark orange bags.
drab coral bags contain 4 faded chartreuse bags, 1 vibrant bronze bag, 5 shiny bronze bags, 3 vibrant tan bags.
vibrant maroon bags contain 2 striped yellow bags, 2 muted indigo bags, 3 muted aqua bags.
dim gold bags contain 2 bright violet bags.
plaid tan bags contain 5 muted gray bags, 5 muted coral bags, 2 wavy green bags.
dim coral bags contain 1 clear bronze bag, 2 dark gold bags, 3 drab teal bags.
vibrant gold bags contain 3 dark chartreuse bags, 1 posh purple bag, 1 striped white bag, 2 dotted chartreuse bags.
mirrored olive bags contain 4 striped fuchsia bags, 2 wavy indigo bags, 3 drab gold bags.
pale aqua bags contain 5 faded chartreuse bags, 3 faded crimson bags, 5 dotted orange bags, 3 light brown bags.
mirrored cyan bags contain 4 dull blue bags, 4 striped gold bags, 2 plaid lavender bags, 4 light silver bags.
bright yellow bags contain 4 posh yellow bags, 5 mirrored tan bags, 2 posh tomato bags, 4 light indigo bags.
clear cyan bags contain 1 dark gray bag, 4 vibrant indigo bags.
bright teal bags contain 3 mirrored tan bags.
muted blue bags contain 2 clear white bags, 4 vibrant red bags, 2 faded orange bags, 2 clear plum bags.
dark turquoise bags contain 1 drab violet bag, 5 drab gold bags, 1 mirrored green bag.
vibrant bronze bags contain 3 dim aqua bags, 3 light aqua bags, 3 wavy silver bags, 2 posh tomato bags.
posh fuchsia bags contain 3 dark brown bags, 5 striped indigo bags, 1 muted indigo bag, 4 mirrored bronze bags.
posh blue bags contain 4 clear black bags.
mirrored salmon bags contain 1 dotted gray bag, 3 clear gold bags, 5 dark indigo bags, 2 striped gold bags.
dark gold bags contain 2 vibrant blue bags, 3 muted indigo bags.
dark chartreuse bags contain 4 shiny tan bags, 1 wavy lavender bag, 3 vibrant olive bags, 3 light green bags.
drab olive bags contain 4 faded salmon bags, 4 drab white bags.
posh purple bags contain 3 wavy gold bags.
plaid coral bags contain 3 pale bronze bags, 5 mirrored green bags, 2 muted tan bags, 2 wavy silver bags.
shiny magenta bags contain 4 striped green bags, 5 mirrored brown bags.
dull gray bags contain 5 vibrant indigo bags, 3 clear fuchsia bags, 4 dotted teal bags, 4 dim bronze bags.
clear blue bags contain 4 shiny cyan bags, 1 striped fuchsia bag.
shiny white bags contain 4 faded tan bags, 2 shiny gold bags, 1 shiny bronze bag, 1 dim coral bag.
bright black bags contain 5 shiny gray bags, 3 dull bronze bags, 4 striped gold bags.
faded silver bags contain 3 clear gold bags, 4 dotted cyan bags, 1 light white bag, 4 dull green bags.
clear plum bags contain 2 drab silver bags.
dark orange bags contain 4 dim aqua bags, 4 drab violet bags.
shiny salmon bags contain 5 faded violet bags, 3 muted fuchsia bags.
plaid orange bags contain 5 light beige bags, 1 dull salmon bag.
posh bronze bags contain 4 faded blue bags, 5 bright orange bags, 3 dark gold bags.
striped plum bags contain 2 vibrant bronze bags.
vibrant gray bags contain 1 clear magenta bag.
plaid purple bags contain 1 light aqua bag, 5 vibrant tan bags, 4 pale tan bags, 4 wavy bronze bags.
faded blue bags contain no other bags.
pale turquoise bags contain 1 vibrant gray bag, 3 plaid purple bags, 5 drab coral bags, 5 plaid indigo bags.
vibrant tan bags contain 4 pale yellow bags.
striped gold bags contain 1 dull salmon bag.
faded indigo bags contain 4 faded chartreuse bags, 2 wavy silver bags, 1 shiny green bag.
light plum bags contain 2 faded chartreuse bags, 1 plaid lime bag, 1 posh violet bag, 5 faded plum bags.
dim indigo bags contain 2 dim black bags, 1 wavy plum bag, 4 dark blue bags.
mirrored bronze bags contain 4 dark gold bags, 4 posh tomato bags, 2 plaid aqua bags.
posh indigo bags contain 2 dim maroon bags, 1 dotted gray bag, 4 dark brown bags, 3 wavy indigo bags.
mirrored silver bags contain 3 dull aqua bags, 5 mirrored purple bags.
drab magenta bags contain 5 pale beige bags, 4 dotted indigo bags.
drab bronze bags contain 1 dotted brown bag, 5 clear beige bags.
mirrored lavender bags contain 4 posh purple bags, 2 mirrored cyan bags, 3 drab gold bags.
dotted purple bags contain 2 striped teal bags, 5 clear plum bags, 2 striped lavender bags, 2 dull violet bags.
light aqua bags contain 2 faded blue bags, 4 drab violet bags, 5 dim aqua bags.
dim green bags contain 5 dark bronze bags, 3 light crimson bags, 2 bright yellow bags.
dotted cyan bags contain 3 light aqua bags.
muted gold bags contain 5 shiny maroon bags.
shiny crimson bags contain 5 pale aqua bags, 4 dull salmon bags, 4 dark turquoise bags.
dotted beige bags contain 2 drab cyan bags, 5 mirrored bronze bags, 4 vibrant bronze bags, 5 shiny blue bags.
muted chartreuse bags contain 3 plaid beige bags.
striped lime bags contain 2 muted orange bags.
dull violet bags contain 2 dark gold bags, 4 posh maroon bags, 2 vibrant teal bags, 4 drab teal bags.
shiny bronze bags contain 5 light tomato bags, 1 dull blue bag, 4 dark black bags, 1 posh chartreuse bag.
clear green bags contain 1 light green bag, 5 dim plum bags.
wavy black bags contain 3 clear fuchsia bags, 2 striped violet bags, 1 vibrant indigo bag.
posh lime bags contain 5 pale cyan bags, 3 clear fuchsia bags, 1 posh white bag, 4 dark turquoise bags.
striped violet bags contain 5 bright white bags, 5 dull blue bags, 3 light tomato bags, 3 mirrored green bags.
dotted brown bags contain 5 posh tomato bags.
dotted red bags contain 4 dim tomato bags, 1 drab beige bag.
wavy white bags contain 5 plaid crimson bags, 2 light magenta bags.
muted teal bags contain 5 dim crimson bags, 1 dim cyan bag.
bright lavender bags contain 2 dark lavender bags, 2 mirrored cyan bags, 1 dim yellow bag, 5 vibrant teal bags.
clear coral bags contain 2 drab beige bags, 1 drab yellow bag, 1 dotted tan bag.
clear gray bags contain 3 plaid lime bags, 1 dull beige bag, 5 light beige bags.
drab tan bags contain 3 dull salmon bags, 3 wavy tomato bags, 2 muted orange bags, 5 clear cyan bags.
muted gray bags contain 5 dull cyan bags, 4 clear olive bags.
striped black bags contain 4 dull plum bags, 3 faded gray bags, 3 faded cyan bags.
clear violet bags contain 4 posh bronze bags, 1 pale gold bag.` | 2020/day7/puzzle.go | 0.58676 | 0.70124 | puzzle.go | starcoder |
// Package lifecycle implements a simple helper object to make
// synchronization of worker goroutines a little less verbose.
package lifecycle
// Stdlib imports.
import (
"sync"
"time"
)
// Lifecycle provides a simple way to coordinate the lifecycle of
// worker goroutines that loop infinitely
type Lifecycle struct {
run bool
heartbeatChan chan bool
heartbeatDur time.Duration
lock sync.RWMutex
shutdownChan chan bool
waitChan chan bool
}
// New is a helper function which creates a newly initialized Lifecycle
// object and returns a pointer to it for use.
func New() *Lifecycle {
newObj := Lifecycle {
run: true,
heartbeatChan: make(chan bool, 1),
heartbeatDur: time.Duration(0),
shutdownChan: make(chan bool, 1),
waitChan: make(chan bool, 1),
}
return &newObj
}
// QueryHeartbeat returns the read-only channel on which a user should
// listen for a periodic heartbeat signal.
func (this *Lifecycle) QueryHeartbeat() <-chan bool {
return this.heartbeatChan
}
// QueryRun returns the current status of the Sync object. It will return
// true until Shutdown() is called. Client code can use this function as
// their criteria in a typical for-select work loop.
func (this *Lifecycle) QueryRun() bool {
this.lock.RLock()
defer this.lock.RUnlock()
return this.run
}
// QueryShutdown returns the read-only channel on which a user should
// listen for a signal to shut down.
func (this *Lifecycle) QueryShutdown() <-chan bool {
return this.shutdownChan
}
// Shutdown sets the run flag to false, sends the shutdown signal back
// to the client on the shutdown channel, and then blocks until the client
// calls ShutdownComplete()
func (this *Lifecycle) Shutdown() {
this.lock.Lock()
if !this.run {
this.lock.Unlock()
return
}
this.StopHeart()
this.run = false
this.lock.Unlock()
close(this.shutdownChan)
<-this.waitChan
}
// ShutdownComplete sends a signal which unblocks the call to Shutdown(). Client
// code should call this function once its shutdown procedures are completed.
func (this *Lifecycle) ShutdownComplete() {
close(this.waitChan)
}
// StartHeart sets the heartbeat cycle time in milliseconds and starts
// the heartbeat timer.
func (this *Lifecycle) StartHeart(heartbeatMs int) {
this.heartbeatDur = time.Duration(heartbeatMs) * time.Millisecond
go this.heartbeat()
}
// StopHeart sets the heartbeat cyle time to zero. If any timers are outstanding
// they will stil fire, but a signal will not be sent back to clients.
func (this *Lifecycle) StopHeart() {
this.heartbeatDur = time.Duration(0)
}
// heartbeat sends the heartbeat signal back to the client on the heartbeat
// channel and schedules the next heartbeat for heartbeatMs milliseconds in
// the future. If heartbeatMs is less than 1, the heartbeat is disabled.
func (this *Lifecycle) heartbeat() {
this.lock.RLock()
if !this.run || this.heartbeatDur == 0 {
this.lock.RUnlock()
return
}
this.lock.RUnlock()
this.heartbeatChan<- true
time.AfterFunc(
this.heartbeatDur,
this.heartbeat,
)
} | lib/lifecycle/lifecycle.go | 0.764979 | 0.401189 | lifecycle.go | starcoder |
package ast
import (
"fmt"
"strconv"
)
// Constants for different types
// of numbers
const (
IntType = iota
FloatType
)
// NumberValue is for any value that
// can stand in place of a number
// Current implementers:
// Number
// NumberSymbol
type NumberValue interface {
GetDataType() int
GetIntValue() int64
GetFloatValue() float64
SetValue(value interface{})
AsString(indent string) string
ToString() string
Add(n2 NumberValue) Number
Sub(n2 NumberValue) Number
Mult(n2 NumberValue) Number
Div(n2 NumberValue) Number
}
// Number represents both a float
// and a integer type
type Number struct {
valueInt int64
valueFloat float64
dataType int
numberType int
}
// NewIntNumber creates an int number
func NewIntNumber(v int64) *Number {
return &Number{valueInt: v, dataType: TypeNumber, numberType: IntType}
}
// NewFloatNumber creates a float number
func NewFloatNumber(v float64) *Number {
return &Number{valueFloat: v, dataType: TypeNumber, numberType: FloatType}
}
// GetIntValue returns the value as an int64, regardless of type
// Note that if the underlying value is float, this will
// truncate it to an int
func (n *Number) GetIntValue() int64 {
if n.numberType == IntType {
return n.valueInt
}
return int64(n.valueFloat)
}
// GetFloatValue returns the value as a float64, regardless
// of the underlying type.
func (n *Number) GetFloatValue() float64 {
if n.numberType == IntType {
return float64(n.valueInt)
}
return n.valueFloat
}
// GetDataType ...
func (n *Number) GetDataType() int {
return n.dataType
}
// SetValue ...
func (n *Number) SetValue(value interface{}) {
switch value.(type) {
case *Number:
n.valueInt = value.(*Number).valueInt
n.valueFloat = value.(*Number).valueFloat
n.numberType = value.(*Number).numberType
case Number:
n.valueInt = value.(Number).valueInt
n.valueFloat = value.(Number).valueFloat
n.numberType = value.(Number).numberType
case int:
n.valueInt = int64(value.(int))
n.numberType = IntType
case byte:
n.valueInt = int64(value.(byte))
n.numberType = IntType
case int8:
n.valueInt = int64(value.(int8))
n.numberType = IntType
case int16:
n.valueInt = int64(value.(int16))
n.numberType = IntType
case int32:
n.valueInt = int64(value.(int32))
n.numberType = IntType
case int64:
n.valueInt = value.(int64)
n.numberType = IntType
case float32:
n.valueFloat = float64(value.(float32))
case float64:
n.valueFloat = value.(float64)
default:
panic("Invalid data for number")
}
}
// ToString returns the number formatted as a string
func (n *Number) ToString() string {
if n.numberType == IntType {
return strconv.FormatInt(n.valueInt, 10)
}
return strconv.FormatFloat(n.valueFloat, 'f', -1, 64)
}
// Add adds two numbers, irrespective of type
func (n Number) Add(n2 NumberValue) Number {
if n.GetDataType() == FloatType || n2.GetDataType() == FloatType {
return *NewFloatNumber(n.GetFloatValue() + n2.GetFloatValue())
}
return *NewIntNumber(n.GetIntValue() + n2.GetIntValue())
}
// Sub subtracts two numbers
func (n Number) Sub(n2 NumberValue) Number {
if n.GetDataType() == FloatType || n2.GetDataType() == FloatType {
return *NewFloatNumber(n.GetFloatValue() - n2.GetFloatValue())
}
return *NewIntNumber(n.GetIntValue() - n2.GetIntValue())
}
// Mult multiplies two numbers
func (n Number) Mult(n2 NumberValue) Number {
if n.GetDataType() == FloatType || n2.GetDataType() == FloatType {
return *NewFloatNumber(n.GetFloatValue() * n2.GetFloatValue())
}
return *NewIntNumber(n.GetIntValue() * n2.GetIntValue())
}
// Div divides two numbers
func (n Number) Div(n2 NumberValue) Number {
return *NewFloatNumber(n.GetFloatValue() / n2.GetFloatValue())
}
// AsString returns a string representation of the node
func (n *Number) AsString(indent string) string {
return indent + fmt.Sprintf("Signed number: '%s'", n.ToString())
} | ast/number.go | 0.756537 | 0.452294 | number.go | starcoder |
package url
import (
"unicode"
"github.com/bits-and-blooms/bitset"
)
type PercentEncodeSet struct {
bs *bitset.BitSet
allBelow int32
}
func NewPercentEncodeSet(allBelow int32, bytes ...uint) *PercentEncodeSet {
p := &PercentEncodeSet{allBelow: allBelow, bs: bitset.New(0x7f)}
for _, b := range bytes {
p.bs.Set(b)
}
return p
}
func (p *PercentEncodeSet) Set(bytes ...uint) *PercentEncodeSet {
r := &PercentEncodeSet{
allBelow: p.allBelow,
bs: p.bs.Clone(),
}
for _, b := range bytes {
r.bs.Set(b)
}
return r
}
func (p *PercentEncodeSet) Clear(bytes ...uint) *PercentEncodeSet {
r := &PercentEncodeSet{
allBelow: p.allBelow,
bs: p.bs.Clone(),
}
for _, b := range bytes {
r.bs.Clear(b)
}
return r
}
func (p *PercentEncodeSet) RuneShouldBeEncoded(r rune) bool {
if r < p.allBelow || r > 0x007E || p.bs.Test(uint(r)) {
return true
}
return false
}
func (p *PercentEncodeSet) ByteShouldBeEncoded(b byte) bool {
if int32(b) < p.allBelow || b > 0x007E || p.bs.Test(uint(b)) {
return true
}
return false
}
func (p *PercentEncodeSet) RuneNotInSet(r rune) bool {
if r < p.allBelow || p.bs.Test(uint(r)) {
return false
}
return true
}
func isURLCodePoint(r rune) bool {
if ASCIIAlphanumeric.Test(uint(r)) {
return true
}
if someURLCodePoints.Test(uint(r)) {
return true
}
if r >= 0xa0 && r <= 0x10fffd {
if unicode.Is(unicode.Noncharacter_Code_Point, r) {
return false
}
if unicode.Is(unicode.Cs, r) {
return false
}
return true
}
return false
}
var ASCIITabOrNewline = bitset.New(0x0d).Set(0x09).Set(0x0a).Set(0x0d)
var ASCIIAlpha = bitset.New(0x7a)
var ASCIIDigit = bitset.New(0x39)
var ASCIIHexDigit = bitset.New(0x66)
var ASCIIAlphanumeric = bitset.New(0x7a)
var ForbiddenHostCodePoint = bitset.New(0x5d).Set(0x00).Set(0x09).Set(0x0a).Set(0x0d).Set(0x20).
Set(0x23).Set(0x25).Set(0x2f).Set(0x3a).Set(0x3c).Set(0x3e).Set(0x3f).Set(0x40).Set(0x5b).
Set(0x5c).Set(0x5d).Set(0x5e)
var someURLCodePoints = bitset.New(0x7e).Set(0x24).Set(0x26).Set(0x27).Set(0x28).Set(0x29).
Set(0x2a).Set(0x2b).Set(0x2c).Set(0x2d).Set(0x2e).Set(0x2f).Set(0x3a).Set(0x3b).Set(0x3d).
Set(0x3f).Set(0x40).Set(0x5f).Set(0x7e)
var C0PercentEncodeSet = NewPercentEncodeSet(0x20)
var C0OrSpacePercentEncodeSet = NewPercentEncodeSet(0x21)
var FragmentPercentEncodeSet = C0OrSpacePercentEncodeSet.Set(0x22, 0x3c, 0x3e, 0x60)
var QueryPercentEncodeSet = C0OrSpacePercentEncodeSet.Set(0x22, 0x23, 0x3C, 0x3E)
var SpecialQueryPercentEncodeSet = QueryPercentEncodeSet.Set(0x27)
var PathPercentEncodeSet = QueryPercentEncodeSet.Set(0x3f, 0x60, 0x7b, 0x7d)
var UserInfoPercentEncodeSet = PathPercentEncodeSet.Set(0x2f, 0x3a, 0x3b, 0x3d, 0x40, 0x5b, 0x5c, 0x5d, 0x5e, 0x7c)
var HostPercentEncodeSet = C0OrSpacePercentEncodeSet.Set(0x23)
func init() {
for i := 'a'; i <= 'z'; i++ {
ASCIIAlpha.Set(uint(i))
}
for i := 'A'; i <= 'Z'; i++ {
ASCIIAlpha.Set(uint(i))
}
for i := '0'; i <= '9'; i++ {
ASCIIDigit.Set(uint(i))
}
ASCIIAlphanumeric.InPlaceUnion(ASCIIAlpha)
ASCIIAlphanumeric.InPlaceUnion(ASCIIDigit)
ASCIIHexDigit.InPlaceUnion(ASCIIDigit)
for i := 'A'; i <= 'F'; i++ {
ASCIIHexDigit.Set(uint(i))
}
for i := 'a'; i <= 'f'; i++ {
ASCIIHexDigit.Set(uint(i))
}
} | url/codesets.go | 0.553023 | 0.526525 | codesets.go | starcoder |
package shapes
import (
"fmt"
"image"
"image/color"
"image/draw"
"image/jpeg"
"image/png"
"math"
"os"
"path/filepath"
"runtime"
"strings"
)
func clamp(minimum, x, maximum int) int {
switch {
case x < minimum:
return minimum
case x > maximum:
return maximum
}
return x
}
func validFillColor(fill color.Color) color.Color {
if fill == nil { // We silently treat a nil color as black
return color.Black
}
return fill
}
type Drawer interface {
Draw(img draw.Image, x, y int) error
}
type Circle struct {
color.Color
Radius int
}
func (circle Circle) Draw(img draw.Image, x, y int) error {
// Algorithm taken from
// http://en.wikipedia.org/wiki/Midpoint_circle_algorithm
// No need to check the radius is in bounds because you can only
// create circles using NewCircle() which guarantees it is within
// bounds. But the x, y might be outside the image so we check.
if err := checkBounds(img, x, y); err != nil {
return err
}
fill := validFillColor(circle.Color)
radius := clamp(1, circle.Radius, 1024)
x0, y0 := x, y
f := 1 - radius
ddF_x, ddF_y := 1, -2*radius
x, y = 0, radius
img.Set(x0, y0+radius, fill)
img.Set(x0, y0-radius, fill)
img.Set(x0+radius, y0, fill)
img.Set(x0-radius, y0, fill)
for x < y {
if f >= 0 {
y--
ddF_y += 2
f += ddF_y
}
x++
ddF_x += 2
f += ddF_x
img.Set(x0+x, y0+y, fill)
img.Set(x0-x, y0+y, fill)
img.Set(x0+x, y0-y, fill)
img.Set(x0-x, y0-y, fill)
img.Set(x0+y, y0+x, fill)
img.Set(x0-y, y0+x, fill)
img.Set(x0+y, y0-x, fill)
img.Set(x0-y, y0-x, fill)
}
return nil
}
func (circle Circle) String() string {
return fmt.Sprintf("circle(fill=%v, radius=%d)", circle.Color,
circle.Radius)
}
func checkBounds(img image.Image, x, y int) error {
if !image.Rect(x, y, x, y).In(img.Bounds()) {
return fmt.Errorf("%s(): point (%d, %d) is outside the image\n",
caller(1), x, y)
}
return nil
}
func caller(steps int) string {
name := "?"
if pc, _, _, ok := runtime.Caller(steps + 1); ok {
name = filepath.Base(runtime.FuncForPC(pc).Name())
}
return name
}
type RegularPolygon struct {
color.Color
Radius int
Sides int
}
func (polygon RegularPolygon) Draw(img draw.Image, x, y int) error {
// No need to check the radius or sides are in bounds because you can
// only create polygons using NewRegularPolygon() which guarantees they
// are within bounds. But the x, y might be outside the image so we
// check. len(points) == sides + 1
if err := checkBounds(img, x, y); err != nil {
return err
}
fill := validFillColor(polygon.Color)
radius := clamp(1, polygon.Radius, 1024)
sides := clamp(3, polygon.Sides, 60)
points := getPoints(x, y, sides, float64(radius))
for i := 0; i < sides; i++ { // Draw lines between the apexes
drawLine(img, points[i], points[i+1], fill)
}
return nil
}
func getPoints(x, y, sides int, radius float64) []image.Point {
points := make([]image.Point, sides+1)
// Compute the shape's apexes (thanks to <NAME>)
fullCircle := 2 * math.Pi
x0, y0 := float64(x), float64(y)
for i := 0; i < sides; i++ {
θ := float64(float64(i) * fullCircle / float64(sides))
x1 := x0 + (radius * math.Sin(θ))
y1 := y0 + (radius * math.Cos(θ))
points[i] = image.Pt(int(x1), int(y1))
}
points[sides] = points[0] // close the shape
return points
}
// Based on my Perl Image::Base.pm module's line() method
func drawLine(img draw.Image, start, end image.Point,
fill color.Color) {
x0, x1 := start.X, end.X
y0, y1 := start.Y, end.Y
Δx := math.Abs(float64(x1 - x0))
Δy := math.Abs(float64(y1 - y0))
if Δx >= Δy { // shallow slope
if x0 > x1 {
x0, y0, x1, y1 = x1, y1, x0, y0
}
y := y0
yStep := 1
if y0 > y1 {
yStep = -1
}
remainder := float64(int(Δx/2)) - Δx
for x := x0; x <= x1; x++ {
img.Set(x, y, fill)
remainder += Δy
if remainder >= 0.0 {
remainder -= Δx
y += yStep
}
}
} else { // steep slope
if y0 > y1 {
x0, y0, x1, y1 = x1, y1, x0, y0
}
x := x0
xStep := 1
if x0 > x1 {
xStep = -1
}
remainder := float64(int(Δy/2)) - Δy
for y := y0; y <= y1; y++ {
img.Set(x, y, fill)
remainder += Δx
if remainder >= 0.0 {
remainder -= Δy
x += xStep
}
}
}
}
func (polygon RegularPolygon) String() string {
return fmt.Sprintf("polygon(fill=%v, radius=%d, sides=%d)",
polygon.Color, polygon.Radius, polygon.Sides)
}
type Option struct {
Fill color.Color
Radius int
}
func New(shape string, option Option) (Drawer, error) {
sidesForShape := map[string]int{"triangle": 3, "square": 4,
"pentagon": 5, "hexagon": 6, "heptagon": 7, "octagon": 8,
"enneagon": 9, "nonagon": 9, "decagon": 10}
if sides, found := sidesForShape[shape]; found {
return RegularPolygon{option.Fill, option.Radius, sides}, nil
}
if shape != "circle" {
return nil, fmt.Errorf("shapes.New(): invalid shape '%s'", shape)
}
return Circle{option.Fill, option.Radius}, nil
}
func FilledImage(width, height int, fill color.Color) draw.Image {
fill = validFillColor(fill)
width = clamp(1, width, 4096)
height = clamp(1, height, 4096)
img := image.NewRGBA(image.Rect(0, 0, width, height))
draw.Draw(img, img.Bounds(), &image.Uniform{fill}, image.ZP, draw.Src)
return img
}
func DrawShapes(img draw.Image, x, y int, drawers ...Drawer) error {
for _, drawer := range drawers {
if err := drawer.Draw(img, x, y); err != nil {
return err
}
// Thicker so that it shows up better in screenshots
if err := drawer.Draw(img, x+1, y); err != nil {
return err
}
if err := drawer.Draw(img, x, y+1); err != nil {
return err
}
}
return nil
}
func SaveImage(img image.Image, filename string) error {
file, err := os.Create(filename)
if err != nil {
return err
}
defer file.Close()
switch strings.ToLower(filepath.Ext(filename)) {
case ".jpg", ".jpeg":
return jpeg.Encode(file, img, nil)
case ".png":
return png.Encode(file, img)
}
return fmt.Errorf("shapes.SaveImage(): '%s' has an unrecognized "+
"suffix", filename)
} | src/shaper3/shapes/shapes.go | 0.878835 | 0.538377 | shapes.go | starcoder |
package messages
/*
This go file centralizes error log messages so we have them all in one place.
Although having the names of the consts as the error code (i.e CSPFK014E) and not as a descriptive name (i.e InvalidStoreType)
can reduce readability of the code that raises the error, we decided to do so for the following reasons:
1. Improves supportability – when we get this code in the log we can find it directly in the code without going
through the “info_messages.go” file first
2. Validates we don’t have error code duplications – If the code is only in the string then 2 errors can have the
same code (which is something that a developer can easily miss). However, If they are in the object name
then the compiler will not allow it.
*/
// Access Token
const CSPFK001E string = "CSPFK001E Failed to create access token object"
const CSPFK002E string = "CSPFK002E Failed to retrieve access token"
const CSPFK003E string = "CSPFK003E AccessToken failed to delete access token data. Reason: %s"
// Environment variables
const CSPFK004E string = "CSPFK004E Environment variable '%s' must be provided"
const CSPFK005E string = "CSPFK005E Provided incorrect value for environment variable %s"
const CSPFK007E string = "CSPFK007E CONTAINER_MODE '%s' is not supported. Supported values are: %v"
// Authenticator
const CSPFK008E string = "CSPFK008E Failed to instantiate authenticator configuration"
const CSPFK009E string = "CSPFK009E Failed to instantiate authenticator object"
const CSPFK010E string = "CSPFK010E Failed to authenticate"
const CSPFK011E string = "CSPFK011E Failed to parse authentication response"
// ProvideConjurSecrets
const CSPFK014E string = "CSPFK014E Failed to instantiate ProvideConjurSecrets function. Reason: %s"
const CSPFK015E string = "CSPFK015E Failed to instantiate secrets config"
const CSPFK016E string = "CSPFK016E Failed to provide DAP/Conjur secrets"
// Kubernetes
const CSPFK018E string = "CSPFK018E Failed to create Kubernetes client"
const CSPFK019E string = "CSPFK019E Failed to load in-cluster Kubernetes client config"
const CSPFK020E string = "CSPFK020E Failed to retrieve Kubernetes Secret"
const CSPFK021E string = "CSPFK021E Failed to retrieve Kubernetes Secrets"
const CSPFK022E string = "CSPFK022E Failed to update Kubernetes Secret"
const CSPFK023E string = "CSPFK023E Failed to update Kubernetes Secrets"
const CSPFK025E string = "CSPFK025E PathMap cannot be empty"
const CSPFK027E string = "CSPFK027E Failed to update Kubernetes Secrets map with DAP/Conjur secrets"
const CSPFK028E string = "CSPFK028E Unable to update Kubernetes Secret '%s'"
// DAP/Conjur
const CSPFK031E string = "CSPFK031E Failed to load DAP/Conjur config. Reason: %s"
const CSPFK032E string = "CSPFK032E Failed to create DAP/Conjur client from token. Reason: %s"
const CSPFK033E string = "CSPFK033E Failed to create DAP/Conjur client"
const CSPFK034E string = "CSPFK034E Failed to retrieve DAP/Conjur secrets. Reason: %s"
const CSPFK035E string = "CSPFK035E Failed to parse DAP/Conjur variable ID"
const CSPFK036E string = "CSPFK036E Variable ID '%s' is not in the format '<account>:variable:<variable_id>'"
const CSPFK037E string = "CSPFK037E Failed to parse DAP/Conjur variable IDs"
// General
const CSPFK038E string = "CSPFK038E Retransmission backoff exhausted"
const CSPFK039E string = "CSPFK039E Secrets Provider for Kubernetes failed to update secrets in %s mode. Reason: %s"
// Annotations
const CSPFK041E string = "CSPFK041E Failed to open annotations file '%s'. Reason: %s"
const CSPFK042E string = "CSPFK042E Annotation '%s' does not accept value '%s': must be type %s"
const CSPFK043E string = "CSPFK043E Annotation '%s' does not accept value '%s': only accepts %v"
const CSPFK044E string = "CSPFK044E Annotation '%s' must be provided"
const CSPFK045E string = "CSPFK045E Annotation file line %d is malformed: expecting format \"<key>=<quoted value>\""
const CSPFK046E string = "CSPFK046E Secret Store Type needs to be configured, either with 'SECRETS_DESTINATION' environment variable or 'conjur.org/secrets-destination' Pod annotation"
const CSPFK047E string = "CSPFK047E Secrets Provider in Push-to-File mode can only be configured with Pod annotations"
const CSPFK048E string = "CSPFK048E Secrets Provider in K8s Secrets mode requires either the 'K8S_SECRETS' environment variable or 'conjur.org/k8s-secrets' Pod annotation"
const CSPFK049E string = "CSPFK049E Failed to validate Pod annotations"
// Push to File
const CSPFK053E string = "CSPFK053E Unable to initialize Secrets Provider: unable to create secret group collection"
const CSPFK054E string = "CSPFK054E Unable to initialize Secrets Provider: unrecognized Store Type '%s'" | pkg/log/messages/error_messages.go | 0.532425 | 0.529446 | error_messages.go | starcoder |
package main
import (
"fmt"
"reflect"
)
type Slicer interface {
EqualTo(i int, x interface{}) bool
Len() int
}
type IntSlice []int
func (slice IntSlice) EqualTo(i int, x interface{}) bool {
return slice[i] == x.(int)
}
func (slice IntSlice) Len() int { return len(slice) }
func IntIndexSlicer(ints []int, x int) int {
return IndexSlicer(IntSlice(ints), x)
}
type FloatSlice []float64
func (slice FloatSlice) EqualTo(i int, x interface{}) bool {
return slice[i] == x.(float64)
}
func (slice FloatSlice) Len() int { return len(slice) }
func FloatIndexSlicer(floats []float64, x float64) int {
return IndexSlicer(FloatSlice(floats), x)
}
type StringSlice []string
func (slice StringSlice) EqualTo(i int, x interface{}) bool {
return slice[i] == x.(string)
}
func (slice StringSlice) Len() int { return len(slice) }
func StringIndexSlicer(strs []string, x string) int {
return IndexSlicer(StringSlice(strs), x)
}
// Returns the index position of x in slice or array xs providing xs's
// items are of the same time as x (integers or strings); returns -1 if x
// isn't in xs. Uses a slow linear search suitable for small amounts of
// unsorted data.
func IndexSlicer(slice Slicer, x interface{}) int {
for i := 0; i < slice.Len(); i++ {
if slice.EqualTo(i, x) {
return i
}
}
return -1
}
// Returns true if x is in slice or array xs providing xs's items are of
// the same time as x (integers or strings). Uses the Index() function
// which does a slow linear search suitable for small amounts of unsorted
// data.
func InSlice(xs interface{}, x interface{}) bool {
return Index(xs, x) > -1
}
// Returns the index position of x in slice or array xs providing xs's
// items are of the same time as x (integers or strings); returns -1 if x
// isn't in xs. Uses a slow linear search suitable for small amounts of
// unsorted data.
func Index(xs interface{}, x interface{}) int {
switch slice := xs.(type) {
case []int:
for i, y := range slice {
if y == x.(int) {
return i
}
}
case []string:
for i, y := range slice {
if y == x.(string) {
return i
}
}
}
return -1
}
func InSliceReflect(xs interface{}, x interface{}) bool {
return IndexReflect(xs, x) > -1
}
func IndexReflectX(xs interface{}, x interface{}) int { // Long-winded way
if slice := reflect.ValueOf(xs); slice.Kind() == reflect.Slice {
for i := 0; i < slice.Len(); i++ {
switch y := slice.Index(i).Interface().(type) {
case int:
if y == x.(int) {
return i
}
case string:
if y == x.(string) {
return i
}
}
}
}
return -1
}
func IndexReflect(xs interface{}, x interface{}) int {
if slice := reflect.ValueOf(xs); slice.Kind() == reflect.Slice {
for i := 0; i < slice.Len(); i++ {
if reflect.DeepEqual(x, slice.Index(i)) {
return i
}
}
}
return -1
}
func IntSliceIndex(xs []int, x int) int {
for i, y := range xs {
if x == y {
return i
}
}
return -1
}
func StringSliceIndex(xs []string, s string) int {
for i, x := range xs {
if x == s {
return i
}
}
return -1
}
func SliceIndex(limit int, predicate func(i int) bool) int {
for i := 0; i < limit; i++ {
if predicate(i) {
return i
}
}
return -1
}
func main() {
xs := []int{2, 4, 6, 8}
fmt.Println("5 @", Index(xs, 5), " 6 @", Index(xs, 6))
ys := []string{"C", "B", "K", "A"}
fmt.Println("Z @", Index(ys, "Z"), " A @", Index(ys, "A"))
fmt.Println("5 @", IndexReflectX(xs, 5), " 6 @", IndexReflectX(xs, 6))
fmt.Println("Z @", IndexReflectX(ys, "Z"), " A @",
IndexReflectX(ys, "A"))
fmt.Println("5 @", IndexReflect(xs, 5), " 6 @", IndexReflect(xs, 6))
fmt.Println("Z @", IndexReflect(ys, "Z"), " A @",
IndexReflect(ys, "A"))
fmt.Println("5 @", IntIndexSlicer(xs, 5),
" 6 @", IntIndexSlicer(xs, 6))
fmt.Println("Z @", StringIndexSlicer(ys, "Z"),
" A @", StringIndexSlicer(ys, "A"))
sliceIndex()
}
func sliceIndex() {
xs := []int{2, 4, 6, 8}
ys := []string{"C", "B", "K", "A"}
fmt.Println(
SliceIndex(len(xs), func(i int) bool { return xs[i] == 5 }),
SliceIndex(len(xs), func(i int) bool { return xs[i] == 6 }),
SliceIndex(len(ys), func(i int) bool { return ys[i] == "Z" }),
SliceIndex(len(ys), func(i int) bool { return ys[i] == "A" }))
} | src/contains/contains.go | 0.828211 | 0.526586 | contains.go | starcoder |
package shp
import (
"github.com/cpmech/gosl/chk"
"github.com/cpmech/gosl/gm"
"github.com/cpmech/gosl/utl"
)
// GetShapeNurbs returns a shape structure based on NURBS
// Note: span are the local ids of control points in NURBS defining elements
// Note: FaceLocalVerts does not work for internal surfaces; only those @ boundaries
func GetShapeNurbs(nurbs *gm.Nurbs, nrbfaces []*gm.Nurbs, span []int) (o *Shape) {
// basic data
o = new(Shape)
o.Type = "nurbs"
o.FaceType = "nurbs"
o.Gndim = nurbs.Gnd()
switch o.Gndim {
case 1:
o.BasicType = "lin2"
case 2:
o.BasicType = "qua4"
case 3:
o.BasicType = "hex8"
}
o.Nverts = nurbs.GetElemNumBasis()
o.VtkCode = VTK_POLY_VERTEX
o.Func = o.NurbsFunc
o.FaceFunc = o.NurbsFaceFunc
o.Nurbs = nurbs
o.Span = span
o.Ibasis = o.Nurbs.IndBasis(o.Span)
o.U = make([]float64, o.Gndim)
// faces basic data
nfaces := 2 * o.Gndim
o.FaceLocalVerts = nurbs.ElemBryLocalInds()
if o.Gndim == 3 {
o.NurbsFaces = nrbfaces
o.SpanFace = [][]int{
span[0:2], span[0:2],
span[2:4], span[2:4],
span[4:6], span[4:6],
}
o.FaceFlip = []bool{false, true, false, true, false, true} // => point to the inside
} else {
o.NurbsFaces = []*gm.Nurbs{nrbfaces[2], nrbfaces[1], nrbfaces[3], nrbfaces[0]}
o.SpanFace = [][]int{span[0:2], span[2:4], span[0:2], span[2:4]}
o.FaceFlip = []bool{false, false, true, true} // => point to the inside
}
o.IbasisFace = make([][]int, nfaces)
for idxface, face := range o.NurbsFaces {
o.IbasisFace[idxface] = face.IndBasis(o.SpanFace[idxface])
if idxface == 0 {
o.FaceNvertsMax = len(o.IbasisFace[idxface])
} else {
o.FaceNvertsMax = utl.Imax(o.FaceNvertsMax, len(o.IbasisFace[idxface]))
}
}
// allocate stracthpad variables
o.init_scratchpad()
return
}
// nurbs_func implements shape/deriv functions for NURBS
func nurbs_func(u, S []float64, dSdR [][]float64, r []float64, derivs bool, nurbs *gm.Nurbs, ibasis, span []int) {
// compute mapping to knots space
nd := nurbs.Gnd()
var umin, umax float64
for i := 0; i < nd; i++ {
umin = nurbs.U(i, span[i*2])
umax = nurbs.U(i, span[i*2+1])
u[i] = ((umax-umin)*r[i] + (umax + umin)) / 2.0
if u[i] < umin || u[i] > umax {
chk.Panic("cannot compute NURBS shape function outide cell range:\nr[%d]=%v, u[%d]=%v, urange=[%v,%v]", i, r[i], i, u[i], umin, umax)
}
}
// shape and/or derivatives in knots space
if derivs {
nurbs.CalcBasisAndDerivs(u)
} else {
nurbs.CalcBasis(u)
}
for k, l := range ibasis {
S[k] = nurbs.GetBasisL(l)
}
// derivatives in natural space
if derivs {
for k, l := range ibasis {
nurbs.GetDerivL(dSdR[k], l) // dSdR := dSdU
for i := 0; i < nd; i++ {
umin = nurbs.U(i, span[i*2])
umax = nurbs.U(i, span[i*2+1])
dSdR[k][i] *= (umax - umin) / 2.0 // dSdR[i] := dSdU[i] * du[i]/dr[i] (no sum on i)
}
}
}
return
}
// NurbsFunc implements shape/deriv functions for NURBS
func (o *Shape) NurbsFunc(S []float64, dSdR [][]float64, r []float64, derivs bool, idxface int) {
nurbs_func(o.U, S, dSdR, r, derivs, o.Nurbs, o.Ibasis, o.Span)
}
// NurbsFaceFunc implements shape/deriv functions for faces of NURBS
func (o *Shape) NurbsFaceFunc(S []float64, dSdR [][]float64, r []float64, derivs bool, idxface int) {
nurbs_func(o.U, S, dSdR, r, derivs, o.NurbsFaces[idxface], o.IbasisFace[idxface], o.SpanFace[idxface])
} | shp/nurbs.go | 0.558086 | 0.465934 | nurbs.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTPStatementLoop277 struct for BTPStatementLoop277
type BTPStatementLoop277 struct {
BTPStatement269
BtType *string `json:"btType,omitempty"`
Body *BTPStatement269 `json:"body,omitempty"`
SpaceAfterLoopType *BTPSpace10 `json:"spaceAfterLoopType,omitempty"`
}
// NewBTPStatementLoop277 instantiates a new BTPStatementLoop277 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTPStatementLoop277() *BTPStatementLoop277 {
this := BTPStatementLoop277{}
return &this
}
// NewBTPStatementLoop277WithDefaults instantiates a new BTPStatementLoop277 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTPStatementLoop277WithDefaults() *BTPStatementLoop277 {
this := BTPStatementLoop277{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTPStatementLoop277) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoop277) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTPStatementLoop277) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTPStatementLoop277) SetBtType(v string) {
o.BtType = &v
}
// GetBody returns the Body field value if set, zero value otherwise.
func (o *BTPStatementLoop277) GetBody() BTPStatement269 {
if o == nil || o.Body == nil {
var ret BTPStatement269
return ret
}
return *o.Body
}
// GetBodyOk returns a tuple with the Body field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoop277) GetBodyOk() (*BTPStatement269, bool) {
if o == nil || o.Body == nil {
return nil, false
}
return o.Body, true
}
// HasBody returns a boolean if a field has been set.
func (o *BTPStatementLoop277) HasBody() bool {
if o != nil && o.Body != nil {
return true
}
return false
}
// SetBody gets a reference to the given BTPStatement269 and assigns it to the Body field.
func (o *BTPStatementLoop277) SetBody(v BTPStatement269) {
o.Body = &v
}
// GetSpaceAfterLoopType returns the SpaceAfterLoopType field value if set, zero value otherwise.
func (o *BTPStatementLoop277) GetSpaceAfterLoopType() BTPSpace10 {
if o == nil || o.SpaceAfterLoopType == nil {
var ret BTPSpace10
return ret
}
return *o.SpaceAfterLoopType
}
// GetSpaceAfterLoopTypeOk returns a tuple with the SpaceAfterLoopType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoop277) GetSpaceAfterLoopTypeOk() (*BTPSpace10, bool) {
if o == nil || o.SpaceAfterLoopType == nil {
return nil, false
}
return o.SpaceAfterLoopType, true
}
// HasSpaceAfterLoopType returns a boolean if a field has been set.
func (o *BTPStatementLoop277) HasSpaceAfterLoopType() bool {
if o != nil && o.SpaceAfterLoopType != nil {
return true
}
return false
}
// SetSpaceAfterLoopType gets a reference to the given BTPSpace10 and assigns it to the SpaceAfterLoopType field.
func (o *BTPStatementLoop277) SetSpaceAfterLoopType(v BTPSpace10) {
o.SpaceAfterLoopType = &v
}
func (o BTPStatementLoop277) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTPStatement269, errBTPStatement269 := json.Marshal(o.BTPStatement269)
if errBTPStatement269 != nil {
return []byte{}, errBTPStatement269
}
errBTPStatement269 = json.Unmarshal([]byte(serializedBTPStatement269), &toSerialize)
if errBTPStatement269 != nil {
return []byte{}, errBTPStatement269
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Body != nil {
toSerialize["body"] = o.Body
}
if o.SpaceAfterLoopType != nil {
toSerialize["spaceAfterLoopType"] = o.SpaceAfterLoopType
}
return json.Marshal(toSerialize)
}
type NullableBTPStatementLoop277 struct {
value *BTPStatementLoop277
isSet bool
}
func (v NullableBTPStatementLoop277) Get() *BTPStatementLoop277 {
return v.value
}
func (v *NullableBTPStatementLoop277) Set(val *BTPStatementLoop277) {
v.value = val
v.isSet = true
}
func (v NullableBTPStatementLoop277) IsSet() bool {
return v.isSet
}
func (v *NullableBTPStatementLoop277) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTPStatementLoop277(val *BTPStatementLoop277) *NullableBTPStatementLoop277 {
return &NullableBTPStatementLoop277{value: val, isSet: true}
}
func (v NullableBTPStatementLoop277) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTPStatementLoop277) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btp_statement_loop_277.go | 0.693369 | 0.441252 | model_btp_statement_loop_277.go | starcoder |
package colors
import (
"encoding/hex"
"github.com/jung-kurt/gofpdf"
)
const Red, Amber, Green, Blue, DarkBlue, Black, Gray, LightGray, MiddleLightGray, MoreLightGray, VeryLightGray, ExtremeLightGray, Pink, LightPink = "#CC0000", "#AF780E", "#008000", "#000080", "#000060", "#000000", "#444444", "#666666", "#999999", "#D2D2D2", "#E5E5E5", "#F6F6F6", "#F987C5", "#FFE7EF"
const ExtremeLightBlue, OutOfScopeFancy, CustomDevelopedParts = "#DDFFFF", "#D5D7FF", "#FFFC97"
const LightBlue = "#77FFFF"
const Brown = "#8C4C17"
func DarkenHexColor(hexString string) string {
colorBytes, _ := hex.DecodeString(hexString[1:])
adjusted := make([]byte, 3)
for i := 0; i < 3; i++ {
if colorBytes[i] > 0x22 {
adjusted[i] = colorBytes[i] - 0x20
} else {
adjusted[i] = 0x00
}
}
return "#" + hex.EncodeToString(adjusted)
}
func BrightenHexColor(hexString string) string {
colorBytes, _ := hex.DecodeString(hexString[1:])
adjusted := make([]byte, 3)
for i := 0; i < 3; i++ {
if colorBytes[i] < 0xDD {
adjusted[i] = colorBytes[i] + 0x20
} else {
adjusted[i] = 0xFF
}
}
return "#" + hex.EncodeToString(adjusted)
}
func ColorCriticalRisk(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(255, 38, 0)
}
func RgbHexColorCriticalRisk() string {
return "#FF2600"
}
func ColorHighRisk(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(160, 40, 30)
}
func RgbHexColorHighRisk() string {
return "#A0281E"
}
func ColorElevatedRisk(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(255, 142, 0)
}
func RgbHexColorElevatedRisk() string {
return "#FF8E00"
}
func ColorMediumRisk(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(200, 120, 50)
}
func RgbHexColorMediumRisk() string {
return "#C87832"
}
func ColorLowRisk(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(35, 70, 95)
}
func RgbHexColorLowRisk() string {
return "#23465F"
}
func ColorOutOfScope(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(127, 127, 127)
}
func RgbHexColorOutOfScope() string {
return "#7F7F7F"
}
func ColorRiskStatusUnchecked(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(256, 0, 0)
}
func RgbHexColorRiskStatusUnchecked() string {
return "#FF0000"
}
func ColorRiskStatusMitigated(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(0, 143, 0)
}
func RgbHexColorRiskStatusMitigated() string {
return "#008F00"
}
func ColorRiskStatusInProgress(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(0, 0, 256)
}
func RgbHexColorRiskStatusInProgress() string {
return "#0000FF"
}
func ColorRiskStatusAccepted(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(255, 64, 255)
}
func RgbHexColorRiskStatusAccepted() string {
return "#FF40FF"
}
func ColorRiskStatusInDiscussion(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(256, 147, 0)
}
func RgbHexColorRiskStatusInDiscussion() string {
return "#FF9300"
}
func ColorRiskStatusFalsePositive(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(102, 102, 102)
}
func RgbHexColorRiskStatusFalsePositive() string {
return "#666666"
}
func ColorTwilight(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(58, 82, 200)
}
func RgbHexColorTwilight() string {
return "#3A52C8"
}
func ColorBusiness(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(83, 27, 147)
}
func RgbHexColorBusiness() string {
return "#531B93"
}
func ColorArchitecture(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(0, 84, 147)
}
func RgbHexColorArchitecture() string {
return "#005493"
}
func ColorDevelopment(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(222, 146, 35)
}
func RgbHexColorDevelopment() string {
return "#DE9223"
}
func ColorOperation(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(148, 127, 80)
}
func RgbHexColorOperation() string {
return "#947F50"
}
func ColorModelFailure(pdf *gofpdf.Fpdf) {
pdf.SetTextColor(148, 82, 0)
}
func RgbHexColorModelFailure() string {
return "#945200"
} | colors/colors.go | 0.747432 | 0.401629 | colors.go | starcoder |
package mingru
import (
"database/sql"
)
// GetLastInsertIDWithError checks a given error before calling GetLastInsertID.
func GetLastInsertIDWithError(result sql.Result, err error) (int64, error) {
if err != nil {
return 0, err
}
return result.LastInsertId()
}
// GetLastInsertIDUint64WithError checks a given error before calling GetLastInsertIDUint64.
func GetLastInsertIDUint64WithError(result sql.Result, err error) (uint64, error) {
if err != nil {
return 0, err
}
return GetLastInsertIDUint64(result)
}
// GetLastInsertIDIntWithError checks a given error before calling GetLastInsertIDInt.
func GetLastInsertIDIntWithError(result sql.Result, err error) (int, error) {
if err != nil {
return 0, err
}
return GetLastInsertIDInt(result)
}
// GetLastInsertIDUintWithError checks a given error before calling GetLastInsertIDUint.
func GetLastInsertIDUintWithError(result sql.Result, err error) (uint, error) {
if err != nil {
return 0, err
}
return GetLastInsertIDUint(result)
}
// GetLastInsertIDInt64WithError checks a given error before calling GetLastInsertIDInt64.
func GetLastInsertIDInt64WithError(result sql.Result, err error) (int64, error) {
if err != nil {
return 0, err
}
return result.LastInsertId()
}
// GetRowsAffectedWithError checks a given error before calling GetRowsAffected.
func GetRowsAffectedWithError(result sql.Result, err error) (int64, error) {
if err != nil {
return 0, err
}
return result.RowsAffected()
}
// GetRowsAffectedUint64WithError checks a given error before calling GetRowsAffectedUint64.
func GetRowsAffectedUint64WithError(result sql.Result, err error) (uint64, error) {
if err != nil {
return 0, err
}
return GetRowsAffectedUint64(result)
}
// GetRowsAffectedIntWithError checks a given error before calling GetRowsAffectedInt.
func GetRowsAffectedIntWithError(result sql.Result, err error) (int, error) {
if err != nil {
return 0, err
}
return GetRowsAffectedInt(result)
}
// GetRowsAffectedUintWithError checks a given error before calling GetRowsAffectedUint.
func GetRowsAffectedUintWithError(result sql.Result, err error) (uint, error) {
if err != nil {
return 0, err
}
return GetRowsAffectedUint(result)
}
// GetRowsAffectedInt64WithError checks a given error before calling GetRowsAffectedInt64.
func GetRowsAffectedInt64WithError(result sql.Result, err error) (int64, error) {
if err != nil {
return 0, err
}
return result.RowsAffected()
}
// CheckOneRowAffectedWithError checks a given error before calling CheckOneRowAffected.
func CheckOneRowAffectedWithError(result sql.Result, err error) error {
if err != nil {
return err
}
return CheckOneRowAffected(result)
} | errors.go | 0.638835 | 0.416144 | errors.go | starcoder |
package setop
import (
"bytes"
"fmt"
"strings"
)
// RawSourceCreator is a function that takes the name of a raw skippable sortable set, and returns a Skipper interface.
type RawSourceCreator func(b []byte) Skipper
// SetOpResultIterator is something that handles the results of a SetExpression.
type SetOpResultIterator func(res *SetOpResult)
const (
// Append simply appends all elements in the input lists and builds an output list from them.
Append = iota
// ConCat appends all elements in the input lists into a single element, and builds an output list with only that element.
ConCat
// IntegerSum decodes all values as int64 using common.DecodeInt64 and sums them.
IntegerSum
// IntegerDiv decodes all values as int64 using common.DecodeInt64 and divides the first value in the input lists by all other values in turn.
IntegerDiv
// IntegerMul decodes all values as int64 using common.DecodeInt64 and multiplies them with each other.
IntegerMul
// FloatSum decodes all values as float64 using common.DecodeFloat64 and sums them.
FloatSum
// FloatDiv decodes all values as float64 using common.DecodeFloat64 and divides the first value in the input lists by all other values in turn.
FloatDiv
// FloatMul decodes all values as float64 using common.DecodeFloat64 and multiplies them with each other.
FloatMul
// BigIntAnd decodes all values as big.Ints using common.DecodeBigInt and logical ANDs them with each other.
BigIntAnd
// BigIntAdd decodes all values as big.Ints using common.DecodeBigInt and sums them.
BigIntAdd
// BigIntAndNot decodes all values as big.Ints using common.DecodeBigInt and logcal AND NOTs them with each other.
BigIntAndNot
// BigIntDiv decodes all values as big.Ints using common.DecodeBigInt and divides the first value in the input lists by all other values in them.
BigIntDiv
// BigIntMod decodes all values as big.Ints using common.DecodeBigInt and does a modulo operation on each one in turn, with the first one as source value.
BigIntMod
// BigIntMul decodes all values as big.Ints using common.DecodeBigInt and multiplies them with each other.
BigIntMul
// BigIntOr decodes all values as big.Ints using common.DecodeBigInt and logical ORs them with each other.
BigIntOr
// BigIntRem decodes all values as big.Ints using common.DecodeBigInt and does a remainder operation on each one in turn, with the first one as source value.
BigIntRem
// BigIntXor decodes all values as big.Ints using common.DecodeBigInt and logical XORs them with each other.
BigIntXor
// First will simply return the first slice from the inputs.
First
// Last will simply return the last slice from the inputs.
Last
)
// SetOpMerge defines how a SetOp merges the values in the input sets.
type SetOpMerge int
func ParseSetOpMerge(s string) (result SetOpMerge, err error) {
switch s {
case "Append":
result = Append
case "ConCat":
result = ConCat
case "IntegerSum":
result = IntegerSum
case "IntegerDiv":
result = IntegerDiv
case "IntegerMul":
result = IntegerMul
case "FloatSum":
result = FloatSum
case "FloatDiv":
result = FloatDiv
case "FloatMul":
result = FloatMul
case "BigIntAnd":
result = BigIntAnd
case "BigIntAdd":
result = BigIntAdd
case "BigIntAndNot":
result = BigIntAndNot
case "BigIntDiv":
result = BigIntDiv
case "BigIntMod":
result = BigIntMod
case "BigIntMul":
result = BigIntMul
case "BigIntOr":
result = BigIntOr
case "BigIntRem":
result = BigIntRem
case "BigIntXor":
result = BigIntXor
case "First":
result = First
case "Last":
result = Last
default:
err = fmt.Errorf("Unknown SetOpType %v. Legal values: Append, ConCat, IntegerSum, IntegerDiv, IntegerMul, FloatSum, FloatDiv, FloatMul, BigIntAdd, BigIntAnd, BigIntAndNot, BigIntDiv, BigIntMod, BigIntMul, BigIntOr, BigIntRem, BigIntXor, First, Last.", s)
}
return
}
func (self SetOpMerge) String() string {
switch self {
case Append:
return "Append"
case ConCat:
return "ConCat"
case IntegerSum:
return "IntegerSum"
case IntegerDiv:
return "IntegerDiv"
case IntegerMul:
return "IntegerMul"
case FloatSum:
return "FloatSum"
case FloatDiv:
return "FloatDiv"
case FloatMul:
return "FloatMul"
case BigIntAnd:
return "BigIntAnd"
case BigIntAdd:
return "BigIntAdd"
case BigIntAndNot:
return "BigIntAndNot"
case BigIntDiv:
return "BigIntDiv"
case BigIntMod:
return "BigIntMod"
case BigIntMul:
return "BigIntMul"
case BigIntOr:
return "BigIntOr"
case BigIntRem:
return "BigIntRem"
case BigIntXor:
return "BigIntXor"
case First:
return "First"
case Last:
return "Last"
}
panic(fmt.Errorf("Unknown SetOpType %v", int(self)))
}
const (
Union = iota
Intersection
Difference
// Xor differs from the definition in http://en.wikipedia.org/wiki/Exclusive_or by only returning keys present in exactly ONE input set.
Xor
)
// SetOpType is the set operation to perform in a SetExpression.
type SetOpType int
func (self SetOpType) String() string {
switch self {
case Union:
return "U"
case Intersection:
return "I"
case Difference:
return "D"
case Xor:
return "X"
}
panic(fmt.Errorf("Unknown SetOpType %v", int(self)))
}
// SetOpSource is either a key to a raw source producing input data, or another SetOp that calculates input data.
// Weight is the weight of this source in the chosen Merge for the parent SetOp, if any.
type SetOpSource struct {
Key []byte
SetOp *SetOp
Weight *float64
}
// SetOp is a set operation to perform on a slice of SetOpSources, using a SetOpMerge function to merge the calculated values.
type SetOp struct {
Sources []SetOpSource
Type SetOpType
Merge SetOpMerge
}
func (self *SetOp) String() string {
sources := make([]string, len(self.Sources))
for index, source := range self.Sources {
if source.Key != nil {
sources[index] = string(source.Key)
} else {
sources[index] = fmt.Sprint(source.SetOp)
}
if source.Weight != nil {
sources[index] = fmt.Sprintf("%v*%v", sources[index], *source.Weight)
}
}
return fmt.Sprintf("(%v %v)", self.Type, strings.Join(sources, " "))
}
// SetExpression is a set operation defined by the Op or Code fields, coupled with range parameters and a Dest key defining where to put the results.
type SetExpression struct {
Op *SetOp
Code string
Min []byte
Max []byte
MinInc bool
MaxInc bool
Len int
Dest []byte
}
// Each will execute the set expression, using the provided RawSourceCreator, and iterate over the result using f.
func (self *SetExpression) Each(r RawSourceCreator, f SetOpResultIterator) (err error) {
if self.Op == nil {
self.Op = MustParse(self.Code)
}
skipper := createSkipper(r, self.Op)
min := self.Min
mininc := self.MinInc
count := 0
gt := -1
if self.MaxInc {
gt = 0
}
var res *SetOpResult
for res, err = skipper.Skip(min, mininc); res != nil && err == nil; res, err = skipper.Skip(min, mininc) {
if (self.Len > 0 && count >= self.Len) || (self.Max != nil && bytes.Compare(res.Key, self.Max) > gt) {
return
}
count++
min = res.Key
mininc = false
f(res)
}
return
}
// SetOpResult is a key and any values the Merge returned.
type SetOpResult struct {
Key []byte
Values [][]byte
}
// ShallowCopy returns another SetOpResult with the same Key and a copy of the Values.
func (self *SetOpResult) ShallowCopy() (result *SetOpResult) {
result = &SetOpResult{
Key: self.Key,
Values: make([][]byte, len(self.Values)),
}
copy(result.Values, self.Values)
return
}
func (self *SetOpResult) String() string {
return fmt.Sprintf("%+v", *self)
} | setop/set_op.go | 0.705379 | 0.72227 | set_op.go | starcoder |
package eff
type shape interface {
Drawable
SetBackgroundColor(Color)
BackgroundColor() Color
Clear()
DrawPoint(Point, Color)
DrawPoints([]Point, Color)
DrawColorPoints([]Point, []Color)
DrawLine(Point, Point, Color)
DrawLines([]Point, Color)
StrokeRect(Rect, Color)
StrokeRects([]Rect, Color)
StrokeColorRects([]Rect, []Color)
FillRect(Rect, Color)
FillRects([]Rect, Color)
FillColorRects([]Rect, []Color)
DrawImage(Image, Rect)
}
// Shape struct that can be added as a child to a canvas or another Shape
type Shape struct {
drawable
bgColor Color
drawCalls []func(Graphics)
}
// Draw this draws the shape and all of its children to the canvas, typically called by the canvas its added to
func (shape *Shape) Draw(canvas Canvas) {
shape.mu.RLock()
defer shape.mu.RUnlock()
if shape.Graphics() == nil {
return
}
shape.Graphics().Begin(shape)
if shape.ShouldDraw() {
shape.Graphics().FillRect(Rect{X: 0, Y: 0, W: shape.Rect().W, H: shape.Rect().H}, shape.bgColor)
for _, fn := range shape.drawCalls {
fn(shape.Graphics())
}
shape.SetShouldDraw(false)
for _, child := range shape.children {
rect := shape.Rect()
if rect.LocalInside(child.Rect()) {
child.Draw(canvas)
}
}
}
shape.Graphics().End(shape)
}
// SetBackgroundColor sets the background color of the shape
func (shape *Shape) SetBackgroundColor(c Color) {
shape.bgColor = c
shape.SetShouldDraw(true)
}
// BackgroundColor returns the current background color of the shape
func (shape *Shape) BackgroundColor() Color {
return shape.bgColor
}
// Clear removes all the of current draw calls on the shape
func (shape *Shape) Clear() {
shape.drawCalls = nil
shape.SetShouldDraw(true)
}
// DrawPoint adds a single point to the shape
func (shape *Shape) DrawPoint(p Point, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawPoint(p, c)
})
shape.SetShouldDraw(true)
}
// DrawPoints draws a slice of points to the shape all the same color
func (shape *Shape) DrawPoints(p []Point, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawPoints(p, c)
})
shape.SetShouldDraw(true)
}
// DrawColorPoints draws a slice of points to the shape using different colors, expects color slice to equal the point slice
func (shape *Shape) DrawColorPoints(p []Point, c []Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawColorPoints(p, c)
})
shape.SetShouldDraw(true)
}
// DrawLine draws a line to the shape
func (shape *Shape) DrawLine(p1 Point, p2 Point, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawLine(p1, p2, c)
})
shape.SetShouldDraw(true)
}
// DrawLines draws a slice of lines to canvas using a single color. The point slice length should be even since lines are defined by two points
func (shape *Shape) DrawLines(p []Point, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawLines(p, c)
})
shape.SetShouldDraw(true)
}
// StrokeRect strokes a rect to the canvas
func (shape *Shape) StrokeRect(r Rect, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.StrokeRect(r, c)
})
shape.SetShouldDraw(true)
}
// StrokeRects strokes a slice of rects using a single color
func (shape *Shape) StrokeRects(r []Rect, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.StrokeRects(r, c)
})
shape.SetShouldDraw(true)
}
// StrokeColorRects strokes a slice of rects using different colors, expects the length of color slice to equal the length of the rect slice
func (shape *Shape) StrokeColorRects(r []Rect, c []Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.StrokeColorRects(r, c)
})
shape.SetShouldDraw(true)
}
// FillRect fills a single rect to the canvas
func (shape *Shape) FillRect(r Rect, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.FillRect(r, c)
})
shape.SetShouldDraw(true)
}
// FillRects fills a slice of rects to the canvas using a single color
func (shape *Shape) FillRects(r []Rect, c Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.FillRects(r, c)
})
shape.SetShouldDraw(true)
}
// FillColorRects fills a slice of rects to the canvas using a different color for each, expects the length of the color slice to equal the length of the rect slice
func (shape *Shape) FillColorRects(r []Rect, c []Color) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.FillColorRects(r, c)
})
shape.SetShouldDraw(true)
}
// DrawText draws a text string to the canvas
func (shape *Shape) DrawText(f Font, text string, c Color, p Point) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawText(f, text, c, p)
})
shape.SetShouldDraw(true)
}
// DrawImage draws an image to the canvas
func (shape *Shape) DrawImage(i Image, r Rect) {
shape.drawCalls = append(shape.drawCalls, func(g Graphics) {
g.DrawImage(i, r)
})
shape.SetShouldDraw(true)
} | shape.go | 0.88565 | 0.716429 | shape.go | starcoder |
package vals
import (
"fmt"
"math"
"math/big"
"strconv"
"strings"
)
// Num is a stand-in type for int, *big.Int, *big.Rat or float64. This type
// doesn't offer type safety, but is useful as a marker.
type Num interface{}
// NumSlice is a stand-in type for []int, []*big.Int, []*big.Rat or []float64.
// This type doesn't offer type safety, but is useful as a marker.
type NumSlice interface{}
// ParseNum parses a string into a suitable number type. If the string does not
// represent a valid number, it returns nil.
func ParseNum(s string) Num {
if strings.ContainsRune(s, '/') {
// Parse as big.Rat
if z, ok := new(big.Rat).SetString(s); ok {
return NormalizeBigRat(z)
}
return nil
}
// Try parsing as big.Int
if z, ok := new(big.Int).SetString(s, 0); ok {
return NormalizeBigInt(z)
}
// Try parsing as float64
if f, err := strconv.ParseFloat(s, 64); err == nil {
return f
}
return nil
}
// NumType represents a number type.
type NumType uint8
// Possible values for NumType, sorted in the order of implicit conversion
// (lower types can be implicitly converted to higher types).
const (
Int NumType = iota
BigInt
BigRat
Float64
)
// UnifyNums unifies the given slice of numbers into the same type, converting
// those with lower NumType to the higest NumType present in the slice. The typ
// argument can be used to force the minimum NumType.
func UnifyNums(nums []Num, typ NumType) NumSlice {
for _, num := range nums {
if t := getNumType(num); t > typ {
typ = t
}
}
switch typ {
case Int:
unified := make([]int, len(nums))
for i, num := range nums {
unified[i] = num.(int)
}
return unified
case BigInt:
unified := make([]*big.Int, len(nums))
for i, num := range nums {
switch num := num.(type) {
case int:
unified[i] = big.NewInt(int64(num))
case *big.Int:
unified[i] = num
default:
panic("unreachable")
}
}
return unified
case BigRat:
unified := make([]*big.Rat, len(nums))
for i, num := range nums {
switch num := num.(type) {
case int:
unified[i] = big.NewRat(int64(num), 1)
case *big.Int:
var r big.Rat
r.SetInt(num)
unified[i] = &r
case *big.Rat:
unified[i] = num
default:
panic("unreachable")
}
}
return unified
case Float64:
unified := make([]float64, len(nums))
for i, num := range nums {
switch num := num.(type) {
case int:
unified[i] = float64(num)
case *big.Int:
if num.IsInt64() {
// Might fit in float64
unified[i] = float64(num.Int64())
} else {
// Definitely won't fit in float64
unified[i] = math.Inf(num.Sign())
}
case *big.Rat:
unified[i], _ = num.Float64()
case float64:
unified[i] = num
default:
panic("unreachable")
}
}
return unified
default:
panic("unreachable")
}
}
func getNumType(n Num) NumType {
switch n.(type) {
case int:
return Int
case *big.Int:
return BigInt
case *big.Rat:
return BigRat
case float64:
return Float64
default:
panic("invalid num type " + fmt.Sprintf("%T", n))
}
}
// NormalizeBigInt converts a big.Int to an int if it is within the range of
// int. Otherwise it returns n as is.
func NormalizeBigInt(z *big.Int) Num {
if i, ok := getInt(z); ok {
return i
}
return z
}
// NormalizeBigRat converts a big.Rat to a big.Int (or an int if within the
// range) if its denominator is 1.
func NormalizeBigRat(z *big.Rat) Num {
if z.IsInt() {
n := z.Num()
if i, ok := getInt(n); ok {
return i
}
return n
}
return z
}
func getInt(z *big.Int) (int, bool) {
// TODO: Use a more efficient implementation by examining z.Bits
if z.IsInt64() {
i64 := z.Int64()
i := int(i64)
if int64(i) == i64 {
return i, true
}
}
return -1, false
} | pkg/eval/vals/num.go | 0.594904 | 0.418994 | num.go | starcoder |
package atest
import (
"github.com/cadmean-ru/amphion/common/a"
"github.com/cadmean-ru/amphion/engine"
"testing"
)
var eng *engine.AmphionEngine
type TestingDelegate func(e *engine.AmphionEngine)
type SceneTestingDelegate func(e *engine.AmphionEngine, testScene, testObject *engine.SceneObject)
// RunEngineTest starts the Amphion engine and the calls the specified delegate.
func RunEngineTest(t *testing.T, delegate TestingDelegate) {
t.Logf("Starting engine test")
front := newTestingFrontend()
front.Init()
eng = engine.Initialize(front)
front.Run()
eng.Start()
delegate(eng)
}
// RunEngineTestWithScene first starts the Amphion engine.
// It then creates the default testing scene (See MakeTestScene).
// The prepareDelegate is called before the scene is shown.
// Here you can perform some setup like adding new objects and components to the scene.
// The testingDelegate is where you actually call the tested code.
func RunEngineTestWithScene(t *testing.T, prepareDelegate, testingDelegate SceneTestingDelegate) {
RunEngineTest(t, func(e *engine.AmphionEngine) {
var scene, testObject *engine.SceneObject
scene, testObject = MakeTestScene(func(e *engine.AmphionEngine) {
if testingDelegate != nil {
testingDelegate(e, scene, testObject)
}
})
if prepareDelegate != nil {
prepareDelegate(e, scene, testObject)
}
err := e.ShowScene(scene)
if err != nil {
t.Fatal(err)
}
})
}
// MakeTestScene creates the default testing scene, that contains only one child object of size (100; 100; 100)
// located in the center with the TestingComponent attached to it.
func MakeTestScene(delegate TestingDelegate) (*engine.SceneObject, *engine.SceneObject) {
scene := engine.NewSceneObject("test scene")
testObject := engine.NewSceneObject("test object")
testObject.Transform.Position = a.NewVector3(a.CenterInParent, a.CenterInParent, a.CenterInParent)
testObject.Transform.Size = a.NewVector3(100, 100, 100)
testObject.Transform.Pivot = a.NewVector3(0.5, 0.5, 0.5)
testObject.AddComponent(NewTestingComponent(delegate))
scene.AddChild(testObject)
return scene, testObject
}
// SimulateCallback simulates a frontend callback with the specified code and data.
func SimulateCallback(code int, data string) {
instance.simulateCallback(code, data)
}
// SimulateClick simulates user's click at the specified position on the screen.
func SimulateClick(x, y int, button engine.MouseButton) {
instance.simulateClick(x, y, button)
}
// SimulateClickOnObject simulates user's click in the center of the specified object.
func SimulateClickOnObject(o *engine.SceneObject, button engine.MouseButton) {
rect := o.Transform.GetGlobalRect()
x := int(rect.X.Min + rect.X.GetLength() / 2)
y := int(rect.Y.Min + rect.Y.GetLength() / 2)
SimulateClick(x, y, button)
}
// Blocks the calling goroutine until the engine is stopped.
func WaitForStop() {
eng.WaitForStop()
}
// Stops the testing instance of Amphion engine.
func Stop() {
eng.Stop()
} | atest/engineTest.go | 0.658198 | 0.41117 | engineTest.go | starcoder |
// +build !amd64 gccgo appengine nacl
package poly1305
import "encoding/binary"
const (
msgBlock = uint32(1 << 24)
finalBlock = uint32(0)
)
// Sum generates an authenticator for msg using a one-time key and returns the
// 16-byte result. Authenticating two different messages with the same key allows
// an attacker to forge messages at will.
func Sum(msg []byte, key [32]byte) [TagSize]byte {
var (
h, r [5]uint32
s [4]uint32
)
var out [TagSize]byte
initialize(&r, &s, &key)
// process full 16-byte blocks
n := len(msg) & (^(TagSize - 1))
if n > 0 {
update(msg[:n], msgBlock, &h, &r)
msg = msg[n:]
}
if len(msg) > 0 {
var block [TagSize]byte
off := copy(block[:], msg)
block[off] = 1
update(block[:], finalBlock, &h, &r)
}
finalize(&out, &h, &s)
return out
}
// New returns a hash.Hash computing the poly1305 sum.
// Notice that Poly1305 is insecure if one key is used twice.
func New(key [32]byte) *Hash {
p := new(Hash)
initialize(&(p.r), &(p.s), &key)
return p
}
// Hash implements a Poly1305 writer interface.
// Poly1305 cannot be used like common hash.Hash implementations,
// because using a poly1305 key twice breaks its security.
// So poly1305.Hash does not support some kind of reset.
type Hash struct {
h, r [5]uint32
s [4]uint32
buf [TagSize]byte
off int
done bool
}
// Size returns the number of bytes Sum will append.
func (p *Hash) Size() int { return TagSize }
// Write adds more data to the running Poly1305 hash.
// This function should return a non-nil error if a call
// to Write happens after a call to Sum. So it is not possible
// to compute the checksum and than add more data.
func (p *Hash) Write(msg []byte) (int, error) {
if p.done {
return 0, errWriteAfterSum
}
n := len(msg)
if p.off > 0 {
dif := TagSize - p.off
if n <= dif {
p.off += copy(p.buf[p.off:], msg)
return n, nil
}
copy(p.buf[p.off:], msg[:dif])
msg = msg[dif:]
update(p.buf[:], msgBlock, &(p.h), &(p.r))
p.off = 0
}
// process full 16-byte blocks
if nn := len(msg) & (^(TagSize - 1)); nn > 0 {
update(msg[:nn], msgBlock, &(p.h), &(p.r))
msg = msg[nn:]
}
if len(msg) > 0 {
p.off += copy(p.buf[p.off:], msg)
}
return n, nil
}
// Sum appends the Pol1305 hash of the previously
// processed data to b and returns the resulting slice.
// It is safe to call this function multiple times.
func (p *Hash) Sum(b []byte) []byte {
var out [TagSize]byte
h := p.h
if p.off > 0 {
var buf [TagSize]byte
copy(buf[:], p.buf[:p.off])
buf[p.off] = 1 // invariant: p.off < TagSize
update(buf[:], finalBlock, &h, &(p.r))
}
finalize(&out, &h, &(p.s))
p.done = true
return append(b, out[:]...)
}
func initialize(r *[5]uint32, s *[4]uint32, key *[32]byte) {
r[0] = binary.LittleEndian.Uint32(key[0:]) & 0x3ffffff
r[1] = (binary.LittleEndian.Uint32(key[3:]) >> 2) & 0x3ffff03
r[2] = (binary.LittleEndian.Uint32(key[6:]) >> 4) & 0x3ffc0ff
r[3] = (binary.LittleEndian.Uint32(key[9:]) >> 6) & 0x3f03fff
r[4] = (binary.LittleEndian.Uint32(key[12:]) >> 8) & 0x00fffff
s[0] = binary.LittleEndian.Uint32(key[16:])
s[1] = binary.LittleEndian.Uint32(key[20:])
s[2] = binary.LittleEndian.Uint32(key[24:])
s[3] = binary.LittleEndian.Uint32(key[28:])
}
func update(msg []byte, flag uint32, h, r *[5]uint32) {
h0, h1, h2, h3, h4 := h[0], h[1], h[2], h[3], h[4]
r0, r1, r2, r3, r4 := uint64(r[0]), uint64(r[1]), uint64(r[2]), uint64(r[3]), uint64(r[4])
R1, R2, R3, R4 := r1*5, r2*5, r3*5, r4*5
for len(msg) > 0 {
// h += msg
h0 += binary.LittleEndian.Uint32(msg[0:]) & 0x3ffffff
h1 += (binary.LittleEndian.Uint32(msg[3:]) >> 2) & 0x3ffffff
h2 += (binary.LittleEndian.Uint32(msg[6:]) >> 4) & 0x3ffffff
h3 += (binary.LittleEndian.Uint32(msg[9:]) >> 6) & 0x3ffffff
h4 += (binary.LittleEndian.Uint32(msg[12:]) >> 8) | flag
// h *= r
d0 := (uint64(h0) * r0) + (uint64(h1) * R4) + (uint64(h2) * R3) + (uint64(h3) * R2) + (uint64(h4) * R1)
d1 := (d0 >> 26) + (uint64(h0) * r1) + (uint64(h1) * r0) + (uint64(h2) * R4) + (uint64(h3) * R3) + (uint64(h4) * R2)
d2 := (d1 >> 26) + (uint64(h0) * r2) + (uint64(h1) * r1) + (uint64(h2) * r0) + (uint64(h3) * R4) + (uint64(h4) * R3)
d3 := (d2 >> 26) + (uint64(h0) * r3) + (uint64(h1) * r2) + (uint64(h2) * r1) + (uint64(h3) * r0) + (uint64(h4) * R4)
d4 := (d3 >> 26) + (uint64(h0) * r4) + (uint64(h1) * r3) + (uint64(h2) * r2) + (uint64(h3) * r1) + (uint64(h4) * r0)
// h %= p
h0 = uint32(d0) & 0x3ffffff
h1 = uint32(d1) & 0x3ffffff
h2 = uint32(d2) & 0x3ffffff
h3 = uint32(d3) & 0x3ffffff
h4 = uint32(d4) & 0x3ffffff
h0 += uint32(d4>>26) * 5
h1 += h0 >> 26
h0 = h0 & 0x3ffffff
msg = msg[TagSize:]
}
h[0], h[1], h[2], h[3], h[4] = h0, h1, h2, h3, h4
}
func finalize(out *[TagSize]byte, h *[5]uint32, s *[4]uint32) {
h0, h1, h2, h3, h4 := h[0], h[1], h[2], h[3], h[4]
// h %= p reduction
h2 += h1 >> 26
h1 &= 0x3ffffff
h3 += h2 >> 26
h2 &= 0x3ffffff
h4 += h3 >> 26
h3 &= 0x3ffffff
h0 += 5 * (h4 >> 26)
h4 &= 0x3ffffff
h1 += h0 >> 26
h0 &= 0x3ffffff
// h - p
t0 := h0 + 5
t1 := h1 + (t0 >> 26)
t2 := h2 + (t1 >> 26)
t3 := h3 + (t2 >> 26)
t4 := h4 + (t3 >> 26) - (1 << 26)
t0 &= 0x3ffffff
t1 &= 0x3ffffff
t2 &= 0x3ffffff
t3 &= 0x3ffffff
// select h if h < p else h - p
t_mask := (t4 >> 31) - 1
h_mask := ^t_mask
h0 = (h0 & h_mask) | (t0 & t_mask)
h1 = (h1 & h_mask) | (t1 & t_mask)
h2 = (h2 & h_mask) | (t2 & t_mask)
h3 = (h3 & h_mask) | (t3 & t_mask)
h4 = (h4 & h_mask) | (t4 & t_mask)
// h %= 2^128
h0 |= h1 << 26
h1 = ((h1 >> 6) | (h2 << 20))
h2 = ((h2 >> 12) | (h3 << 14))
h3 = ((h3 >> 18) | (h4 << 8))
// s: the s part of the key
// tag = (h + s) % (2^128)
t := uint64(h0) + uint64(s[0])
h0 = uint32(t)
t = uint64(h1) + uint64(s[1]) + (t >> 32)
h1 = uint32(t)
t = uint64(h2) + uint64(s[2]) + (t >> 32)
h2 = uint32(t)
t = uint64(h3) + uint64(s[3]) + (t >> 32)
h3 = uint32(t)
binary.LittleEndian.PutUint32(out[0:], h0)
binary.LittleEndian.PutUint32(out[4:], h1)
binary.LittleEndian.PutUint32(out[8:], h2)
binary.LittleEndian.PutUint32(out[12:], h3)
} | gosnippet/vendor/github.com/aead/poly1305/poly1305_ref.go | 0.666497 | 0.442456 | poly1305_ref.go | starcoder |
package govaluate
import (
"fmt"
"math"
"strconv"
"strings"
"unicode"
)
// ExprNodePrinter is an output builder for ExprNode.
// Use AppendString or AppendNode from custom handlers to append to output.
type ExprNodePrinter struct {
nodeHandler func(ExprNode, *ExprNodePrinter) error
output strings.Builder
err error
}
// PrintConfig is used to override default behavior when printing an expression with a default node handler.
type PrintConfig struct {
// FormatBoolLiteral overrides boolean literal output.
// Default handler returns "true" or "false".
FormatBoolLiteral func(bool) string
// FormatNumberLiteral overrides number literal output.
// Default handler formats number with strconv.FormatFloat(value, 'f', -1, 64).
FormatNumberLiteral func(float64) string
// FormatStringLiteral overrides string literal output.
// Default handler returns quoted value with other quotes and newlines escaped with backslash (\).
FormatStringLiteral func(string) string
// FormatVariable overrides variable output.
// Default handler simply returns identifier as is.
// This can be used to map variables to different names.
FormatVariable func(string) string
// OperatorMap contains a mapping for operator name overrides.
// For example, ** -> pow mapping will change output from x ** y to pow(x, y).
OperatorMap map[string]string
// OperatorMapper is similar to OperatorMap, but allows mapping by name and arity (number of arguments).
// For example, this can override unary and binary minus in a different way.
OperatorMapper func(name string, arity int) string
// InfixOperators contains overrides of what operators are printed in infix notation.
// By default, all operators written in special symbols and "in" operator are considered infix.
// For example, overriding pow -> true will change output from pow(x, y) to x pow y.
// This only applies if an operator is binary (two arguments).
InfixOperators map[string]bool
// PrecedenceFn overrides precedence of operators.
// Higher precedence means that the operation should performed first.
// See defaultPrecedence(string, int) for defaults.
PrecedenceFn func(name string, arity int) int
// Operators overrides default behavior when printing a particular operator.
// By default, special symbol unary operators are printed in prefix notation: !x, ~x.
// Infix binary operators (see InfixOperators) are printed in infix notation: x + y, x && y.
// Ternary if (?:) is printed like this: condition ? then : else.
// All other operators are printed as function calls: square(x), now(), pow(x, y).
Operators map[string]func(args []ExprNode, output *ExprNodePrinter) error
}
// AppendString appends a token to output as is.
func (b *ExprNodePrinter) AppendString(token string) {
if b.err == nil {
b.output.WriteString(token)
}
}
// AppendNode invokes node handler that will print node to output.
func (b *ExprNodePrinter) AppendNode(node ExprNode) {
if b.err == nil {
err := b.nodeHandler(node, b)
if b.err == nil {
b.err = err
}
}
}
// Print converts ExprNode to string with default node handler.
// PrintConfig can be used to configure output. Use empty PrintConfig for default behavior.
func (expr ExprNode) Print(config PrintConfig) (string, error) {
return expr.PrintWithHandler(defaultNodeHandler(config))
}
// PrintWithHandler converts ExprNode to string using the specified node handler.
// Node handler takes an ExprNode and feeds output to ExprNodePrinter.
func (expr ExprNode) PrintWithHandler(nodeHandler func(ExprNode, *ExprNodePrinter) error) (string, error) {
builder := &ExprNodePrinter{nodeHandler: nodeHandler}
builder.AppendNode(expr)
return builder.output.String(), builder.err
}
func defaultNodeHandler(config PrintConfig) func(ExprNode, *ExprNodePrinter) error {
return func(node ExprNode, output *ExprNodePrinter) error {
switch node.Type {
case NodeTypeLiteral:
return literal(node.Value, output, &config)
case NodeTypeVariable:
return variable(node.Name, output, &config)
case NodeTypeOperator:
return operator(node.Name, node.Args, output, &config)
}
return fmt.Errorf("unexpected node: %v", node)
}
}
func literal(value interface{}, output *ExprNodePrinter, config *PrintConfig) error {
var literal string
switch value.(type) {
case bool:
literal = boolLiteral(value.(bool), config)
case float64:
literal = numberLiteral(value.(float64), config)
case string:
literal = stringLiteral(value.(string), config)
default:
return fmt.Errorf("unsupported literal type: %v", value)
}
output.AppendString(literal)
return nil
}
func boolLiteral(value bool, config *PrintConfig) string {
if config.FormatBoolLiteral != nil {
return config.FormatBoolLiteral(value)
}
if value {
return "true"
}
return "false"
}
func numberLiteral(value float64, config *PrintConfig) string {
if config.FormatNumberLiteral != nil {
return config.FormatNumberLiteral(value)
}
return strconv.FormatFloat(value, 'f', -1, 64)
}
func stringLiteral(value string, config *PrintConfig) string {
if config.FormatStringLiteral != nil {
return config.FormatStringLiteral(value)
}
escapedValue := strings.NewReplacer(
"\\", "\\\\",
"\"", "\\\"",
"\r", "\\r",
"\n", "\\n",
).Replace(value)
return "\"" + escapedValue + "\""
}
func variable(name string, output *ExprNodePrinter, config *PrintConfig) error {
variable := name
if config.FormatVariable != nil {
variable = config.FormatVariable(name)
}
output.AppendString(variable)
return nil
}
func operator(name string, args []ExprNode, output *ExprNodePrinter, config *PrintConfig) error {
arity := len(args)
mappedName := config.mappedName(name, arity)
if fn, ok := config.Operators[mappedName]; ok {
return fn(args, output)
}
// binary operator: x + y
infix := config.isInfix(name, arity)
if infix {
selfPrecedence := config.precedence(name, arity)
leftPrecedence := config.precedenceForNode(args[0])
rightPrecedence := config.precedenceForNode(args[1])
if leftPrecedence < selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[0])
if leftPrecedence < selfPrecedence {
output.AppendString(")")
}
output.AppendString(" ")
output.AppendString(mappedName)
output.AppendString(" ")
if rightPrecedence <= selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[1])
if rightPrecedence <= selfPrecedence {
output.AppendString(")")
}
return nil
}
// prefix operator: !x
prefix := arity == 1 && isSpecial(mappedName)
if prefix {
selfPrecedence := config.precedence(name, arity)
rightPrecedence := config.precedenceForNode(args[0])
output.AppendString(mappedName)
if rightPrecedence < selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[0])
if rightPrecedence < selfPrecedence {
output.AppendString(")")
}
return nil
}
// ternary if: x ? y : z
if mappedName == "?:" && arity == 3 {
selfPrecedence := config.precedence(name, arity)
conditionPrecedence := config.precedenceForNode(args[0])
thenPrecedence := config.precedenceForNode(args[1])
elsePrecedence := config.precedenceForNode(args[2])
if conditionPrecedence <= selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[0])
if conditionPrecedence <= selfPrecedence {
output.AppendString(")")
}
output.AppendString(" ? ")
if thenPrecedence <= selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[1])
if thenPrecedence <= selfPrecedence {
output.AppendString(")")
}
output.AppendString(" : ")
if elsePrecedence < selfPrecedence {
output.AppendString("(")
}
output.AppendNode(args[2])
if elsePrecedence < selfPrecedence {
output.AppendString(")")
}
return nil
}
// function call: fn(a, b, c)
output.AppendString(mappedName)
output.AppendString("(")
for idx, arg := range args {
if idx > 0 {
output.AppendString(", ")
}
output.AppendNode(arg)
}
output.AppendString(")")
return nil
}
func isSpecial(name string) bool {
for _, r := range []rune(name) {
if unicode.IsLetter(r) {
return false
}
}
return len(name) > 0
}
func (config *PrintConfig) mappedName(operator string, arity int) string {
if mappedName, ok := config.OperatorMap[operator]; ok {
return mappedName
}
if config.OperatorMapper != nil {
if mappedName := config.OperatorMapper(operator, arity); mappedName != "" {
return mappedName
}
}
return operator
}
func (config *PrintConfig) isInfix(operator string, arity int) bool {
if arity != 2 {
return false
}
mappedName := config.mappedName(operator, arity)
if infix, found := config.InfixOperators[mappedName]; found {
return infix
}
return isSpecial(mappedName) || mappedName == "in"
}
func (config *PrintConfig) precedenceForNode(node ExprNode) int {
if node.Type == NodeTypeOperator {
return config.precedence(node.Name, len(node.Args))
}
// variable and literal have max precedence
return math.MaxInt32
}
func (config *PrintConfig) precedence(operator string, arity int) int {
if config.PrecedenceFn != nil {
mappedName := config.mappedName(operator, arity)
return config.PrecedenceFn(mappedName, arity)
}
return defaultPrecedence(operator, arity)
}
func defaultPrecedence(operator string, arity int) int {
if arity == 1 {
return 10
}
switch operator {
case ",":
return 0
case "?:", "?", ":":
return 1
case "??":
return 2
case "||":
return 3
case "&&":
return 4
case "==", "!=", ">", "<", ">=", "<=", "=~", "!~", "in":
return 5
case "&", "|", "^", "<<", ">>":
return 7
case "+", "-":
return 8
case "*", "/", "%":
return 9
case "**":
return 11
}
return 6
} | Print.go | 0.701611 | 0.49109 | Print.go | starcoder |
package geojson
import "github.com/mmadfox/geojson/geometry"
// SimplePoint ...
type SimplePoint struct {
geometry.Point
}
// NewSimplePoint returns a new SimplePoint object.
func NewSimplePoint(point geometry.Point) *SimplePoint {
return &SimplePoint{Point: point}
}
// ForEach ...
func (g *SimplePoint) ForEach(iter func(geom Object) bool) bool {
return iter(g)
}
// Empty ...
func (g *SimplePoint) Empty() bool {
return g.Point.Empty()
}
// Valid ...
func (g *SimplePoint) Valid() bool {
return g.Point.Valid()
}
// Rect ...
func (g *SimplePoint) Rect() geometry.Rect {
return g.Point.Rect()
}
// Spatial ...
func (g *SimplePoint) Spatial() Spatial {
return g
}
// Center ...
func (g *SimplePoint) Center() geometry.Point {
return g.Point
}
// Base ...
func (g *SimplePoint) Base() geometry.Point {
return g.Point
}
// AppendJSON ...
func (g *SimplePoint) AppendJSON(dst []byte) []byte {
dst = append(dst, `{"type":"Point","coordinates":`...)
dst = appendJSONPoint(dst, g.Point, nil, 0)
dst = append(dst, '}')
return dst
}
// JSON ...
func (g *SimplePoint) JSON() string {
return string(g.AppendJSON(nil))
}
// MarshalJSON ...
func (g *SimplePoint) MarshalJSON() ([]byte, error) {
return g.AppendJSON(nil), nil
}
// String ...
func (g *SimplePoint) String() string {
return string(g.AppendJSON(nil))
}
// Within ...
func (g *SimplePoint) Within(obj Object) bool {
return obj.Contains(g)
}
// Contains ...
func (g *SimplePoint) Contains(obj Object) bool {
return obj.Spatial().WithinPoint(g.Point)
}
// Intersects ...
func (g *SimplePoint) Intersects(obj Object) bool {
if obj, ok := obj.(*Circle); ok {
return obj.Contains(g)
}
return obj.Spatial().IntersectsPoint(g.Point)
}
// WithinRect ...
func (g *SimplePoint) WithinRect(rect geometry.Rect) bool {
return rect.ContainsPoint(g.Point)
}
// WithinPoint ...
func (g *SimplePoint) WithinPoint(point geometry.Point) bool {
return point.ContainsPoint(g.Point)
}
// WithinLine ...
func (g *SimplePoint) WithinLine(line *geometry.Line) bool {
return line.ContainsPoint(g.Point)
}
// WithinPoly ...
func (g *SimplePoint) WithinPoly(poly *geometry.Poly) bool {
return poly.ContainsPoint(g.Point)
}
// IntersectsPoint ...
func (g *SimplePoint) IntersectsPoint(point geometry.Point) bool {
return g.Point.IntersectsPoint(point)
}
// IntersectsRect ...
func (g *SimplePoint) IntersectsRect(rect geometry.Rect) bool {
return g.Point.IntersectsRect(rect)
}
// IntersectsLine ...
func (g *SimplePoint) IntersectsLine(line *geometry.Line) bool {
return g.Point.IntersectsLine(line)
}
// IntersectsPoly ...
func (g *SimplePoint) IntersectsPoly(poly *geometry.Poly) bool {
return g.Point.IntersectsPoly(poly)
}
// NumPoints ...
func (g *SimplePoint) NumPoints() int {
return 1
}
// Distance ...
func (g *SimplePoint) Distance(obj Object) float64 {
return obj.Spatial().DistancePoint(g.Point)
}
// DistancePoint ...
func (g *SimplePoint) DistancePoint(point geometry.Point) float64 {
return geoDistancePoints(g.Center(), point)
}
// DistanceRect ...
func (g *SimplePoint) DistanceRect(rect geometry.Rect) float64 {
return geoDistancePoints(g.Center(), rect.Center())
}
// DistanceLine ...
func (g *SimplePoint) DistanceLine(line *geometry.Line) float64 {
return geoDistancePoints(g.Center(), line.Rect().Center())
}
// DistancePoly ...
func (g *SimplePoint) DistancePoly(poly *geometry.Poly) float64 {
return geoDistancePoints(g.Center(), poly.Rect().Center())
} | simplepoint.go | 0.899652 | 0.744285 | simplepoint.go | starcoder |
package linear
import (
"fmt"
"spell"
"spell/scorer"
)
type vectorScore struct {
*scorer.Vector
score float64
}
type LearnProgress struct {
VectorSystemsCount int
Step int
BestScore int
RelaxingCount int
}
type Learner struct {
*scorer.Vectoriser
learnProgress LearnProgress
}
func (learner *Learner) Learn(learningData []*spell.LearningTerm) spell.ScoreModel {
learner.learnProgress = LearnProgress{}
vectorSystems := make([]*VectorSystem, 0, len(learningData))
for _, learningTerm := range learningData {
vectorSystem := learner.vectorSystem(learningTerm)
if vectorSystem != nil {
vectorSystems = append(vectorSystems, vectorSystem)
}
}
learner.learnProgress.VectorSystemsCount = len(vectorSystems)
var bestVector *scorer.Vector
if len(vectorSystems) > 0 && vectorSystems[0] != nil &&
len(vectorSystems[0].Vectors) > 0 && vectorSystems[0].Vectors[0] != nil {
var (
vector *scorer.Vector
tries = 100
maxRelaxCount = 10
relaxingCount = 0
maxDirectSearchCount = 1000
bestScore = 0
prevBestScore = 0
)
triesLoop:
for i := 0; i < tries; i++ {
learner.learnProgress.Step = i + 1
vector = scorer.RandomVector(vectorSystems[0].Vectors[0].Len())
currentScore := learner.score(vectorSystems, vector)
for k := 0; k < maxDirectSearchCount; k++ {
vVectors := vector.Variate(0, 1, 0.1)
valuableVectors := make([]vectorScore, 0, len(vVectors))
for _, v := range vVectors {
vScore := learner.score(vectorSystems, v)
if vScore > currentScore {
valuableVectors = append(valuableVectors, vectorScore{
Vector: v,
score: float64(vScore),
})
}
}
if len(valuableVectors) > 0 {
maxScore := 0.0
for _, vs := range valuableVectors {
if vs.score > maxScore {
maxScore = vs.score
}
}
nextVector := vector.Clone()
for _, vs := range valuableVectors {
nextVector = nextVector.MoveToward(vs.Vector, vs.score / maxScore)
}
currentScore = learner.score(vectorSystems, nextVector)
vector = nextVector
} else {
break
}
if currentScore > bestScore {
bestScore = currentScore
bestVector = vector
learner.learnProgress.BestScore = bestScore
}
}
if bestScore > prevBestScore {
relaxingCount = 0
} else {
relaxingCount++
learner.learnProgress.RelaxingCount = relaxingCount
if relaxingCount == maxRelaxCount {
break triesLoop
}
}
prevBestScore = bestScore
}
}
return &Scorer{
Weights: bestVector,
Vectoriser: scorer.InitVectoriser(),
}
}
func (learner *Learner) LearnProgress() string {
learnProgress := learner.learnProgress;
return fmt.Sprintf("Step %d. Best score: %d. VS count: %d. Relaxing Count: %d", learnProgress.Step, learnProgress.BestScore, learnProgress.VectorSystemsCount, learnProgress.RelaxingCount)
}
func (learner *Learner) score(vectorSystems []*VectorSystem, vector *scorer.Vector) int {
currentScore := 0
for _, vectorSystem := range vectorSystems {
if vectorSystem.IsSatisfied(vector) {
currentScore += 1
}
}
return currentScore
}
func (learner *Learner) vectorSystem(a *spell.LearningTerm) *VectorSystem {
baseVector := (*scorer.Vector)(nil)
for _, suggestion := range a.Suggestions {
if suggestion.Term == a.Term {
baseVector = learner.Vectorize(suggestion.Prescription)
break
}
}
if baseVector == nil {
return nil
}
vectorSystem := InitVectorSystem()
for _, suggestion := range a.Suggestions {
if suggestion.Term != a.Term && a.Misspell != suggestion.Term {
vector := learner.Vectorize(suggestion.Prescription)
vector = vector.Sub(baseVector)
vectorSystem.Add(vector)
}
}
vectorSystem.Normalize()
if len(vectorSystem.Vectors) > 0 {
return vectorSystem
}
return nil
} | spell/scorer/linear/stochastic_learner.go | 0.586996 | 0.527742 | stochastic_learner.go | starcoder |
package diceware
// loadDiceString returns a JSON string containing the diceware wordlist.
// this function is listed separately, because it is veeeeery long.
func loadDiceString() []byte {
return []byte(`{"11111": "abacus",
"11112": "abdomen",
"11113": "abdominal",
"11114": "abide",
"11115": "abiding",
"11116": "ability",
"11121": "ablaze",
"11122": "able",
"11123": "abnormal",
"11124": "abrasion",
"11125": "abrasive",
"11126": "abreast",
"11131": "abridge",
"11132": "abroad",
"11133": "abruptly",
"11134": "absence",
"11135": "absentee",
"11136": "absently",
"11141": "absinthe",
"11142": "absolute",
"11143": "absolve",
"11144": "abstain",
"11145": "abstract",
"11146": "absurd",
"11151": "accent",
"11152": "acclaim",
"11153": "acclimate",
"11154": "accompany",
"11155": "account",
"11156": "accuracy",
"11161": "accurate",
"11162": "accustom",
"11163": "acetone",
"11164": "achiness",
"11165": "aching",
"11166": "acid",
"11211": "acorn",
"11212": "acquaint",
"11213": "acquire",
"11214": "acre",
"11215": "acrobat",
"11216": "acronym",
"11221": "acting",
"11222": "action",
"11223": "activate",
"11224": "activator",
"11225": "active",
"11226": "activism",
"11231": "activist",
"11232": "activity",
"11233": "actress",
"11234": "acts",
"11235": "acutely",
"11236": "acuteness",
"11241": "aeration",
"11242": "aerobics",
"11243": "aerosol",
"11244": "aerospace",
"11245": "afar",
"11246": "affair",
"11251": "affected",
"11252": "affecting",
"11253": "affection",
"11254": "affidavit",
"11255": "affiliate",
"11256": "affirm",
"11261": "affix",
"11262": "afflicted",
"11263": "affluent",
"11264": "afford",
"11265": "affront",
"11266": "aflame",
"11311": "afloat",
"11312": "aflutter",
"11313": "afoot",
"11314": "afraid",
"11315": "afterglow",
"11316": "afterlife",
"11321": "aftermath",
"11322": "aftermost",
"11323": "afternoon",
"11324": "aged",
"11325": "ageless",
"11326": "agency",
"11331": "agenda",
"11332": "agent",
"11333": "aggregate",
"11334": "aghast",
"11335": "agile",
"11336": "agility",
"11341": "aging",
"11342": "agnostic",
"11343": "agonize",
"11344": "agonizing",
"11345": "agony",
"11346": "agreeable",
"11351": "agreeably",
"11352": "agreed",
"11353": "agreeing",
"11354": "agreement",
"11355": "aground",
"11356": "ahead",
"11361": "ahoy",
"11362": "aide",
"11363": "aids",
"11364": "aim",
"11365": "ajar",
"11366": "alabaster",
"11411": "alarm",
"11412": "albatross",
"11413": "album",
"11414": "alfalfa",
"11415": "algebra",
"11416": "algorithm",
"11421": "alias",
"11422": "alibi",
"11423": "alienable",
"11424": "alienate",
"11425": "aliens",
"11426": "alike",
"11431": "alive",
"11432": "alkaline",
"11433": "alkalize",
"11316": "afterlife",
"11321": "aftermath",
"11322": "aftermost",
"11323": "afternoon",
"11324": "aged",
"11325": "ageless",
"11326": "agency",
"11331": "agenda",
"11332": "agent",
"11333": "aggregate",
"11334": "aghast",
"11335": "agile",
"11336": "agility",
"11341": "aging",
"11342": "agnostic",
"11343": "agonize",
"11344": "agonizing",
"11345": "agony",
"11346": "agreeable",
"11351": "agreeably",
"11352": "agreed",
"11353": "agreeing",
"11354": "agreement",
"11355": "aground",
"11356": "ahead",
"11361": "ahoy",
"11362": "aide",
"11363": "aids",
"11364": "aim",
"11365": "ajar",
"11366": "alabaster",
"11411": "alarm",
"11412": "albatross",
"11413": "album",
"11414": "alfalfa",
"11415": "algebra",
"11416": "algorithm",
"11421": "alias",
"11422": "alibi",
"11423": "alienable",
"11424": "alienate",
"11425": "aliens",
"11426": "alike",
"11431": "alive",
"11432": "alkaline",
"11433": "alkalize",
"11434": "almanac",
"11435": "almighty",
"11436": "almost",
"11441": "aloe",
"11442": "aloft",
"11443": "aloha",
"11444": "alone",
"11445": "alongside",
"11446": "aloof",
"11451": "alphabet",
"11452": "alright",
"11453": "although",
"11454": "altitude",
"11455": "alto",
"11456": "aluminum",
"11461": "alumni",
"11462": "always",
"11463": "amaretto",
"11464": "amaze",
"11465": "amazingly",
"11466": "amber",
"11511": "ambiance",
"11512": "ambiguity",
"11513": "ambiguous",
"11514": "ambition",
"11515": "ambitious",
"11516": "ambulance",
"11521": "ambush",
"11522": "amendable",
"11523": "amendment",
"11524": "amends",
"11525": "amenity",
"11526": "amiable",
"11531": "amicably",
"11532": "amid",
"11533": "amigo",
"11534": "amino",
"11535": "amiss",
"11536": "ammonia",
"11541": "ammonium",
"11542": "amnesty",
"11543": "amniotic",
"11544": "among",
"11545": "amount",
"11546": "amperage",
"11551": "ample",
"11552": "amplifier",
"11553": "amplify",
"11554": "amply",
"11555": "amuck",
"11556": "amulet",
"11561": "amusable",
"11562": "amused",
"11563": "amusement",
"11564": "amuser",
"11565": "amusing",
"11566": "anaconda",
"11611": "anaerobic",
"11612": "anagram",
"11613": "anatomist",
"11614": "anatomy",
"11615": "anchor",
"11616": "anchovy",
"11621": "ancient",
"11622": "android",
"11623": "anemia",
"11624": "anemic",
"11625": "aneurism",
"11626": "anew",
"11631": "angelfish",
"11632": "angelic",
"11633": "anger",
"11634": "angled",
"11635": "angler",
"11636": "angles",
"11641": "angling",
"11642": "angrily",
"11643": "angriness",
"11644": "anguished",
"11645": "angular",
"11646": "animal",
"11651": "animate",
"11652": "animating",
"11653": "animation",
"11654": "animator",
"11655": "anime",
"11656": "animosity",
"11661": "ankle",
"11662": "annex",
"11663": "annotate",
"11664": "announcer",
"11665": "annoying",
"11666": "annually",
"12111": "annuity",
"12112": "anointer",
"12113": "another",
"12114": "answering",
"12115": "antacid",
"12116": "antarctic",
"12121": "anteater",
"12122": "antelope",
"12123": "antennae",
"12124": "anthem",
"12125": "anthill",
"12126": "anthology",
"12131": "antibody",
"12132": "antics",
"12133": "antidote",
"12134": "antihero",
"12135": "antiquely",
"12136": "antiques",
"12141": "antiquity",
"12142": "antirust",
"12143": "antitoxic",
"12144": "antitrust",
"12145": "antiviral",
"12146": "antivirus",
"12151": "antler",
"12152": "antonym",
"12153": "antsy",
"12154": "anvil",
"12155": "anybody",
"12156": "anyhow",
"12161": "anymore",
"12162": "anyone",
"12163": "anyplace",
"12164": "anything",
"12165": "anytime",
"12166": "anyway",
"12211": "anywhere",
"12212": "aorta",
"12213": "apache",
"12214": "apostle",
"12215": "appealing",
"12216": "appear",
"12221": "appease",
"12222": "appeasing",
"12223": "appendage",
"12224": "appendix",
"12225": "appetite",
"12226": "appetizer",
"12231": "applaud",
"12232": "applause",
"12233": "apple",
"12234": "appliance",
"12235": "applicant",
"12236": "applied",
"12241": "apply",
"12242": "appointee",
"12243": "appraisal",
"12244": "appraiser",
"12245": "apprehend",
"12246": "approach",
"12251": "approval",
"12252": "approve",
"12253": "apricot",
"12254": "april",
"12255": "apron",
"12256": "aptitude",
"12261": "aptly",
"12262": "aqua",
"12263": "aqueduct",
"12264": "arbitrary",
"12265": "arbitrate",
"12266": "ardently",
"12311": "area",
"12312": "arena",
"12313": "arguable",
"12314": "arguably",
"12315": "argue",
"12316": "arise",
"12321": "armadillo",
"12322": "armband",
"12323": "armchair",
"12324": "armed",
"12325": "armful",
"12326": "armhole",
"12331": "arming",
"12332": "armless",
"12333": "armoire",
"12334": "armored",
"12335": "armory",
"12336": "armrest",
"12341": "army",
"12342": "aroma",
"12343": "arose",
"12344": "around",
"12345": "arousal",
"12346": "arrange",
"12351": "array",
"12352": "arrest",
"12353": "arrival",
"12354": "arrive",
"12355": "arrogance",
"12356": "arrogant",
"12361": "arson",
"12362": "art",
"12363": "ascend",
"12364": "ascension",
"12365": "ascent",
"12366": "ascertain",
"12411": "ashamed",
"12412": "ashen",
"12413": "ashes",
"12414": "ashy",
"12415": "aside",
"12416": "askew",
"12421": "asleep",
"12422": "asparagus",
"12423": "aspect",
"12424": "aspirate",
"12425": "aspire",
"12426": "aspirin",
"12431": "astonish",
"12432": "astound",
"12433": "astride",
"12434": "astrology",
"12435": "astronaut",
"12436": "astronomy",
"12441": "astute",
"12442": "atlantic",
"12443": "atlas",
"12444": "atom",
"12445": "atonable",
"12446": "atop",
"12451": "atrium",
"12452": "atrocious",
"12453": "atrophy",
"12454": "attach",
"12455": "attain",
"12456": "attempt",
"12461": "attendant",
"12462": "attendee",
"12463": "attention",
"12464": "attentive",
"12465": "attest",
"12466": "attic",
"12511": "attire",
"12512": "attitude",
"12513": "attractor",
"12514": "attribute",
"12515": "atypical",
"12516": "auction",
"12521": "audacious",
"12522": "audacity",
"12523": "audible",
"12524": "audibly",
"12525": "audience",
"12526": "audio",
"12531": "audition",
"12532": "augmented",
"12533": "august",
"12534": "authentic",
"12535": "author",
"12536": "autism",
"12541": "autistic",
"12542": "autograph",
"12543": "automaker",
"12544": "automated",
"12545": "automatic",
"12546": "autopilot",
"12551": "available",
"12552": "avalanche",
"12553": "avatar",
"12554": "avenge",
"12555": "avenging",
"12556": "avenue",
"12561": "average",
"12562": "aversion",
"12563": "avert",
"12564": "aviation",
"12565": "aviator",
"12566": "avid",
"12611": "avoid",
"12612": "await",
"12613": "awaken",
"12614": "award",
"12615": "aware",
"12616": "awhile",
"12621": "awkward",
"12622": "awning",
"12623": "awoke",
"12624": "awry",
"12625": "axis",
"12626": "babble",
"12631": "babbling",
"12632": "babied",
"12633": "baboon",
"12634": "backache",
"12635": "backboard",
"12636": "backboned",
"12641": "backdrop",
"12642": "backed",
"12643": "backer",
"12644": "backfield",
"12645": "backfire",
"12646": "backhand",
"12651": "backing",
"12652": "backlands",
"12653": "backlash",
"12654": "backless",
"12655": "backlight",
"12656": "backlit",
"12661": "backlog",
"12662": "backpack",
"12663": "backpedal",
"12664": "backrest",
"12665": "backroom",
"12666": "backshift",
"13111": "backside",
"13112": "backslid",
"13113": "backspace",
"13114": "backspin",
"13115": "backstab",
"13116": "backstage",
"13121": "backtalk",
"13122": "backtrack",
"13123": "backup",
"13124": "backward",
"13125": "backwash",
"13126": "backwater",
"13131": "backyard",
"13132": "bacon",
"13133": "bacteria",
"13134": "bacterium",
"13135": "badass",
"13136": "badge",
"13141": "badland",
"13142": "badly",
"13143": "badness",
"13144": "baffle",
"13145": "baffling",
"13146": "bagel",
"13151": "bagful",
"13152": "baggage",
"13153": "bagged",
"13154": "baggie",
"13155": "bagginess",
"13156": "bagging",
"13161": "baggy",
"13162": "bagpipe",
"13163": "baguette",
"13164": "baked",
"13165": "bakery",
"13166": "bakeshop",
"13211": "baking",
"13212": "balance",
"13213": "balancing",
"13214": "balcony",
"13215": "balmy",
"13216": "balsamic",
"13221": "bamboo",
"13222": "banana",
"13223": "banish",
"13224": "banister",
"13225": "banjo",
"13226": "bankable",
"13231": "bankbook",
"13232": "banked",
"13233": "banker",
"13234": "banking",
"13235": "banknote",
"13236": "bankroll",
"13241": "banner",
"13242": "bannister",
"13243": "banshee",
"13244": "banter",
"13245": "barbecue",
"13246": "barbed",
"13251": "barbell",
"13252": "barber",
"13253": "barcode",
"13254": "barge",
"13255": "bargraph",
"13256": "barista",
"13261": "baritone",
"13262": "barley",
"13263": "barmaid",
"13264": "barman",
"13265": "barn",
"13266": "barometer",
"13311": "barrack",
"13312": "barracuda",
"13313": "barrel",
"13314": "barrette",
"13315": "barricade",
"13316": "barrier",
"13321": "barstool",
"13322": "bartender",
"13323": "barterer",
"13324": "bash",
"13325": "basically",
"13326": "basics",
"13331": "basil",
"13332": "basin",
"13333": "basis",
"13334": "basket",
"13335": "batboy",
"13336": "batch",
"13341": "bath",
"13342": "baton",
"13343": "bats",
"13344": "battalion",
"13345": "battered",
"13346": "battering",
"13351": "battery",
"13352": "batting",
"13353": "battle",
"13354": "bauble",
"13355": "bazooka",
"13356": "blabber",
"13361": "bladder",
"13362": "blade",
"13363": "blah",
"13364": "blame",
"13365": "blaming",
"13366": "blanching",
"13411": "blandness",
"13412": "blank",
"13413": "blaspheme",
"13414": "blasphemy",
"13415": "blast",
"13416": "blatancy",
"13421": "blatantly",
"13422": "blazer",
"13423": "blazing",
"13424": "bleach",
"13425": "bleak",
"13426": "bleep",
"13431": "blemish",
"13432": "blend",
"13433": "bless",
"13434": "blighted",
"13435": "blimp",
"13436": "bling",
"13441": "blinked",
"13442": "blinker",
"13443": "blinking",
"13444": "blinks",
"13445": "blip",
"13446": "blissful",
"13451": "blitz",
"13452": "blizzard",
"13453": "bloated",
"13454": "bloating",
"13455": "blob",
"13456": "blog",
"13461": "bloomers",
"13462": "blooming",
"13463": "blooper",
"13464": "blot",
"13465": "blouse",
"13466": "blubber",
"13511": "bluff",
"13512": "bluish",
"13513": "blunderer",
"13514": "blunt",
"13515": "blurb",
"13516": "blurred",
"13521": "blurry",
"13522": "blurt",
"13523": "blush",
"13524": "blustery",
"13525": "boaster",
"13526": "boastful",
"13531": "boasting",
"13532": "boat",
"13533": "bobbed",
"13534": "bobbing",
"13535": "bobble",
"13536": "bobcat",
"13541": "bobsled",
"13542": "bobtail",
"13543": "bodacious",
"13544": "body",
"13545": "bogged",
"13546": "boggle",
"13551": "bogus",
"13552": "boil",
"13553": "bok",
"13554": "bolster",
"13555": "bolt",
"13556": "bonanza",
"13561": "bonded",
"13562": "bonding",
"13563": "bondless",
"13564": "boned",
"13565": "bonehead",
"13566": "boneless",
"13611": "bonelike",
"13612": "boney",
"13613": "bonfire",
"13614": "bonnet",
"13615": "bonsai",
"13616": "bonus",
"13621": "bony",
"13622": "boogeyman",
"13623": "boogieman",
"13624": "book",
"13625": "boondocks",
"13626": "booted",
"13631": "booth",
"13632": "bootie",
"13633": "booting",
"13634": "bootlace",
"13635": "bootleg",
"13636": "boots",
"13641": "boozy",
"13642": "borax",
"13643": "boring",
"13644": "borough",
"13645": "borrower",
"13646": "borrowing",
"13651": "boss",
"13652": "botanical",
"13653": "botanist",
"13654": "botany",
"13655": "botch",
"13656": "both",
"13661": "bottle",
"13662": "bottling",
"13663": "bottom",
"13664": "bounce",
"13665": "bouncing",
"13666": "bouncy",
"14111": "bounding",
"14112": "boundless",
"14113": "bountiful",
"14114": "bovine",
"14115": "boxcar",
"14116": "boxer",
"14121": "boxing",
"14122": "boxlike",
"14123": "boxy",
"14124": "breach",
"14125": "breath",
"14126": "breeches",
"14131": "breeching",
"14132": "breeder",
"14133": "breeding",
"14134": "breeze",
"14135": "breezy",
"14136": "brethren",
"14141": "brewery",
"14142": "brewing",
"14143": "briar",
"14144": "bribe",
"14145": "brick",
"14146": "bride",
"14151": "bridged",
"14152": "brigade",
"14153": "bright",
"14154": "brilliant",
"14155": "brim",
"14156": "bring",
"14161": "brink",
"14162": "brisket",
"14163": "briskly",
"14164": "briskness",
"14165": "bristle",
"14166": "brittle",
"14211": "broadband",
"14212": "broadcast",
"14213": "broaden",
"14214": "broadly",
"14215": "broadness",
"14216": "broadside",
"14221": "broadways",
"14222": "broiler",
"14223": "broiling",
"14224": "broken",
"14225": "broker",
"14226": "bronchial",
"14231": "bronco",
"14232": "bronze",
"14233": "bronzing",
"14234": "brook",
"14235": "broom",
"14236": "brought",
"14241": "browbeat",
"14242": "brownnose",
"14243": "browse",
"14244": "browsing",
"14245": "bruising",
"14246": "brunch",
"14251": "brunette",
"14252": "brunt",
"14253": "brush",
"14254": "brussels",
"14255": "brute",
"14256": "brutishly",
"14261": "bubble",
"14262": "bubbling",
"14263": "bubbly",
"14264": "buccaneer",
"14265": "bucked",
"14266": "bucket",
"14311": "buckle",
"14312": "buckshot",
"14313": "buckskin",
"14314": "bucktooth",
"14315": "buckwheat",
"14316": "buddhism",
"14321": "buddhist",
"14322": "budding",
"14323": "buddy",
"14324": "budget",
"14325": "buffalo",
"14326": "buffed",
"14331": "buffer",
"14332": "buffing",
"14333": "buffoon",
"14334": "buggy",
"14335": "bulb",
"14336": "bulge",
"14341": "bulginess",
"14342": "bulgur",
"14343": "bulk",
"14344": "bulldog",
"14345": "bulldozer",
"14346": "bullfight",
"14351": "bullfrog",
"14352": "bullhorn",
"14353": "bullion",
"14354": "bullish",
"14355": "bullpen",
"14356": "bullring",
"14361": "bullseye",
"14362": "bullwhip",
"14363": "bully",
"14364": "bunch",
"14365": "bundle",
"14366": "bungee",
"14411": "bunion",
"14412": "bunkbed",
"14413": "bunkhouse",
"14414": "bunkmate",
"14415": "bunny",
"14416": "bunt",
"14421": "busboy",
"14422": "bush",
"14423": "busily",
"14424": "busload",
"14425": "bust",
"14426": "busybody",
"14431": "buzz",
"14432": "cabana",
"14433": "cabbage",
"14434": "cabbie",
"14435": "cabdriver",
"14436": "cable",
"14441": "caboose",
"14442": "cache",
"14443": "cackle",
"14444": "cacti",
"14445": "cactus",
"14446": "caddie",
"14451": "caddy",
"14452": "cadet",
"14453": "cadillac",
"14454": "cadmium",
"14455": "cage",
"14456": "cahoots",
"14461": "cake",
"14462": "calamari",
"14463": "calamity",
"14464": "calcium",
"14465": "calculate",
"14466": "calculus",
"14511": "caliber",
"14512": "calibrate",
"14513": "calm",
"14514": "caloric",
"14515": "calorie",
"14516": "calzone",
"14521": "camcorder",
"14522": "cameo",
"14523": "camera",
"14524": "camisole",
"14525": "camper",
"14526": "campfire",
"14531": "camping",
"14532": "campsite",
"14533": "campus",
"14534": "canal",
"14535": "canary",
"14536": "cancel",
"14541": "candied",
"14542": "candle",
"14543": "candy",
"14544": "cane",
"14545": "canine",
"14546": "canister",
"14551": "cannabis",
"14552": "canned",
"14553": "canning",
"14554": "cannon",
"14555": "cannot",
"14556": "canola",
"14561": "canon",
"14562": "canopener",
"14563": "canopy",
"14564": "canteen",
"14565": "canyon",
"14566": "capable",
"14611": "capably",
"14612": "capacity",
"14613": "cape",
"14614": "capillary",
"14615": "capital",
"14616": "capitol",
"14621": "capped",
"14622": "capricorn",
"14623": "capsize",
"14624": "capsule",
"14625": "caption",
"14626": "captivate",
"14631": "captive",
"14632": "captivity",
"14633": "capture",
"14634": "caramel",
"14635": "carat",
"14636": "caravan",
"14641": "carbon",
"14642": "cardboard",
"14643": "carded",
"14644": "cardiac",
"14645": "cardigan",
"14646": "cardinal",
"14651": "cardstock",
"14652": "carefully",
"14653": "caregiver",
"14654": "careless",
"14655": "caress",
"14656": "caretaker",
"14661": "cargo",
"14662": "caring",
"14663": "carless",
"14664": "carload",
"14665": "carmaker",
"14666": "carnage",
"15111": "carnation",
"15112": "carnival",
"15113": "carnivore",
"15114": "carol",
"15115": "carpenter",
"15116": "carpentry",
"15121": "carpool",
"15122": "carport",
"15123": "carried",
"15124": "carrot",
"15125": "carrousel",
"15126": "carry",
"15131": "cartel",
"15132": "cartload",
"15133": "carton",
"15134": "cartoon",
"15135": "cartridge",
"15136": "cartwheel",
"15141": "carve",
"15142": "carving",
"15143": "carwash",
"15144": "cascade",
"15145": "case",
"15146": "cash",
"15151": "casing",
"15152": "casino",
"15153": "casket",
"15154": "cassette",
"15155": "casually",
"15156": "casualty",
"15161": "catacomb",
"15162": "catalog",
"15163": "catalyst",
"15164": "catalyze",
"15165": "catapult",
"15166": "cataract",
"15211": "catatonic",
"15212": "catcall",
"15213": "catchable",
"15214": "catcher",
"15215": "catching",
"15216": "catchy",
"15221": "caterer",
"15222": "catering",
"15223": "catfight",
"15224": "catfish",
"15225": "cathedral",
"15226": "cathouse",
"15231": "catlike",
"15232": "catnap",
"15233": "catnip",
"15234": "catsup",
"15235": "cattail",
"15236": "cattishly",
"15241": "cattle",
"15242": "catty",
"15243": "catwalk",
"15244": "caucasian",
"15245": "caucus",
"15246": "causal",
"15251": "causation",
"15252": "cause",
"15253": "causing",
"15254": "cauterize",
"15255": "caution",
"15256": "cautious",
"15261": "cavalier",
"15262": "cavalry",
"15263": "caviar",
"15264": "cavity",
"15265": "cedar",
"15266": "celery",
"15311": "celestial",
"15312": "celibacy",
"15313": "celibate",
"15314": "celtic",
"15315": "cement",
"15316": "census",
"15321": "ceramics",
"15322": "ceremony",
"15323": "certainly",
"15324": "certainty",
"15325": "certified",
"15326": "certify",
"15331": "cesarean",
"15332": "cesspool",
"15333": "chafe",
"15334": "chaffing",
"15335": "chain",
"15336": "chair",
"15341": "chalice",
"15342": "challenge",
"15343": "chamber",
"15344": "chamomile",
"15345": "champion",
"15346": "chance",
"15351": "change",
"15352": "channel",
"15353": "chant",
"15354": "chaos",
"15355": "chaperone",
"15356": "chaplain",
"15361": "chapped",
"15362": "chaps",
"15363": "chapter",
"15364": "character",
"15365": "charbroil",
"15366": "charcoal",
"15411": "charger",
"15412": "charging",
"15413": "chariot",
"15414": "charity",
"15415": "charm",
"15416": "charred",
"15421": "charter",
"15422": "charting",
"15423": "chase",
"15424": "chasing",
"15425": "chaste",
"15426": "chastise",
"15431": "chastity",
"15432": "chatroom",
"15433": "chatter",
"15434": "chatting",
"15435": "chatty",
"15436": "cheating",
"15441": "cheddar",
"15442": "cheek",
"15443": "cheer",
"15444": "cheese",
"15445": "cheesy",
"15446": "chef",
"15451": "chemicals",
"15452": "chemist",
"15453": "chemo",
"15454": "cherisher",
"15455": "cherub",
"15456": "chess",
"15461": "chest",
"15462": "chevron",
"15463": "chevy",
"15464": "chewable",
"15465": "chewer",
"15466": "chewing",
"15511": "chewy",
"15512": "chief",
"15513": "chihuahua",
"15514": "childcare",
"15515": "childhood",
"15516": "childish",
"15521": "childless",
"15522": "childlike",
"15523": "chili",
"15524": "chill",
"15525": "chimp",
"15526": "chip",
"15531": "chirping",
"15532": "chirpy",
"15533": "chitchat",
"15534": "chivalry",
"15535": "chive",
"15536": "chloride",
"15541": "chlorine",
"15542": "choice",
"15543": "chokehold",
"15544": "choking",
"15545": "chomp",
"15546": "chooser",
"15551": "choosing",
"15552": "choosy",
"15553": "chop",
"15554": "chosen",
"15555": "chowder",
"15556": "chowtime",
"15561": "chrome",
"15562": "chubby",
"15563": "chuck",
"15564": "chug",
"15565": "chummy",
"15566": "chump",
"15611": "chunk",
"15612": "churn",
"15613": "chute",
"15614": "cider",
"15615": "cilantro",
"15616": "cinch",
"15621": "cinema",
"15622": "cinnamon",
"15623": "circle",
"15624": "circling",
"15625": "circular",
"15626": "circulate",
"15631": "circus",
"15632": "citable",
"15633": "citadel",
"15634": "citation",
"15635": "citizen",
"15636": "citric",
"15641": "citrus",
"15642": "city",
"15643": "civic",
"15644": "civil",
"15645": "clad",
"15646": "claim",
"15651": "clambake",
"15652": "clammy",
"15653": "clamor",
"15654": "clamp",
"15655": "clamshell",
"15656": "clang",
"15661": "clanking",
"15662": "clapped",
"15663": "clapper",
"15664": "clapping",
"15665": "clarify",
"15666": "clarinet",
"16111": "clarity",
"16112": "clash",
"16113": "clasp",
"16114": "class",
"16115": "clatter",
"16116": "clause",
"16121": "clavicle",
"16122": "claw",
"16123": "clay",
"16124": "clean",
"16125": "clear",
"16126": "cleat",
"16131": "cleaver",
"16132": "cleft",
"16133": "clench",
"16134": "clergyman",
"16135": "clerical",
"16136": "clerk",
"16141": "clever",
"16142": "clicker",
"16143": "client",
"16144": "climate",
"16145": "climatic",
"16146": "cling",
"16151": "clinic",
"16152": "clinking",
"16153": "clip",
"16154": "clique",
"16155": "cloak",
"16156": "clobber",
"16161": "clock",
"16162": "clone",
"16163": "cloning",
"16164": "closable",
"16165": "closure",
"16166": "clothes",
"16211": "clothing",
"16212": "cloud",
"16213": "clover",
"16214": "clubbed",
"16215": "clubbing",
"16216": "clubhouse",
"16221": "clump",
"16222": "clumsily",
"16223": "clumsy",
"16224": "clunky",
"16225": "clustered",
"16226": "clutch",
"16231": "clutter",
"16232": "coach",
"16233": "coagulant",
"16234": "coastal",
"16235": "coaster",
"16236": "coasting",
"16241": "coastland",
"16242": "coastline",
"16243": "coat",
"16244": "coauthor",
"16245": "cobalt",
"16246": "cobbler",
"16251": "cobweb",
"16252": "cocoa",
"16253": "coconut",
"16254": "cod",
"16255": "coeditor",
"16256": "coerce",
"16261": "coexist",
"16262": "coffee",
"16263": "cofounder",
"16264": "cognition",
"16265": "cognitive",
"16266": "cogwheel",
"16311": "coherence",
"16312": "coherent",
"16313": "cohesive",
"16314": "coil",
"16315": "coke",
"16316": "cola",
"16321": "cold",
"16322": "coleslaw",
"16323": "coliseum",
"16324": "collage",
"16325": "collapse",
"16326": "collar",
"16331": "collected",
"16332": "collector",
"16333": "collide",
"16334": "collie",
"16335": "collision",
"16336": "colonial",
"16341": "colonist",
"16342": "colonize",
"16343": "colony",
"16344": "colossal",
"16345": "colt",
"16346": "coma",
"16351": "come",
"16352": "comfort",
"16353": "comfy",
"16354": "comic",
"16355": "coming",
"16356": "comma",
"16361": "commence",
"16362": "commend",
"16363": "comment",
"16364": "commerce",
"16365": "commode",
"16366": "commodity",
"16411": "commodore",
"16412": "common",
"16413": "commotion",
"16414": "commute",
"16415": "commuting",
"16416": "compacted",
"16421": "compacter",
"16422": "compactly",
"16423": "compactor",
"16424": "companion",
"16425": "company",
"16426": "compare",
"16431": "compel",
"16432": "compile",
"16433": "comply",
"16434": "component",
"16435": "composed",
"16436": "composer",
"16441": "composite",
"16442": "compost",
"16443": "composure",
"16444": "compound",
"16445": "compress",
"16446": "comprised",
"16451": "computer",
"16452": "computing",
"16453": "comrade",
"16454": "concave",
"16455": "conceal",
"16456": "conceded",
"16461": "concept",
"16462": "concerned",
"16463": "concert",
"16464": "conch",
"16465": "concierge",
"16466": "concise",
"16511": "conclude",
"16512": "concrete",
"16513": "concur",
"16514": "condense",
"16515": "condiment",
"16516": "condition",
"16521": "condone",
"16522": "conducive",
"16523": "conductor",
"16524": "conduit",
"16525": "cone",
"16526": "confess",
"16531": "confetti",
"16532": "confidant",
"16533": "confident",
"16534": "confider",
"16535": "confiding",
"16536": "configure",
"16541": "confined",
"16542": "confining",
"16543": "confirm",
"16544": "conflict",
"16545": "conform",
"16546": "confound",
"16551": "confront",
"16552": "confused",
"16553": "confusing",
"16554": "confusion",
"16555": "congenial",
"16556": "congested",
"16561": "congrats",
"16562": "congress",
"16563": "conical",
"16564": "conjoined",
"16565": "conjure",
"16566": "conjuror",
"16611": "connected",
"16612": "connector",
"16613": "consensus",
"16614": "consent",
"16615": "console",
"16616": "consoling",
"16621": "consonant",
"16622": "constable",
"16623": "constant",
"16624": "constrain",
"16625": "constrict",
"16626": "construct",
"16631": "consult",
"16632": "consumer",
"16633": "consuming",
"16634": "contact",
"16635": "container",
"16636": "contempt",
"16641": "contend",
"16642": "contented",
"16643": "contently",
"16644": "contents",
"16645": "contest",
"16646": "context",
"16651": "contort",
"16652": "contour",
"16653": "contrite",
"16654": "control",
"16655": "contusion",
"16656": "convene",
"16661": "convent",
"16662": "copartner",
"16663": "cope",
"16664": "copied",
"16665": "copier",
"16666": "copilot",
"21111": "coping",
"21112": "copious",
"21113": "copper",
"21114": "copy",
"21115": "coral",
"21116": "cork",
"21121": "cornball",
"21122": "cornbread",
"21123": "corncob",
"21124": "cornea",
"21125": "corned",
"21126": "corner",
"21131": "cornfield",
"21132": "cornflake",
"21133": "cornhusk",
"21134": "cornmeal",
"21135": "cornstalk",
"21136": "corny",
"21141": "coronary",
"21142": "coroner",
"21143": "corporal",
"21144": "corporate",
"21145": "corral",
"21146": "correct",
"21151": "corridor",
"21152": "corrode",
"21153": "corroding",
"21154": "corrosive",
"21155": "corsage",
"21156": "corset",
"21161": "cortex",
"21162": "cosigner",
"21163": "cosmetics",
"21164": "cosmic",
"21165": "cosmos",
"21166": "cosponsor",
"21211": "cost",
"21212": "cottage",
"21213": "cotton",
"21214": "couch",
"21215": "cough",
"21216": "could",
"21221": "countable",
"21222": "countdown",
"21223": "counting",
"21224": "countless",
"21225": "country",
"21226": "county",
"21231": "courier",
"21232": "covenant",
"21233": "cover",
"21234": "coveted",
"21235": "coveting",
"21236": "coyness",
"21241": "cozily",
"21242": "coziness",
"21243": "cozy",
"21244": "crabbing",
"21245": "crabgrass",
"21246": "crablike",
"21251": "crabmeat",
"21252": "cradle",
"21253": "cradling",
"21254": "crafter",
"21255": "craftily",
"21256": "craftsman",
"21261": "craftwork",
"21262": "crafty",
"21263": "cramp",
"21264": "cranberry",
"21265": "crane",
"21266": "cranial",
"21311": "cranium",
"21312": "crank",
"21313": "crate",
"21314": "crave",
"21315": "craving",
"21316": "crawfish",
"21321": "crawlers",
"21322": "crawling",
"21323": "crayfish",
"21324": "crayon",
"21325": "crazed",
"21326": "crazily",
"21331": "craziness",
"21332": "crazy",
"21333": "creamed",
"21334": "creamer",
"21335": "creamlike",
"21336": "crease",
"21341": "creasing",
"21342": "creatable",
"21343": "create",
"21344": "creation",
"21345": "creative",
"21346": "creature",
"21351": "credible",
"21352": "credibly",
"21353": "credit",
"21354": "creed",
"21355": "creme",
"21356": "creole",
"21361": "crepe",
"21362": "crept",
"21363": "crescent",
"21364": "crested",
"21365": "cresting",
"21366": "crestless",
"21411": "crevice",
"21412": "crewless",
"21413": "crewman",
"21414": "crewmate",
"21415": "crib",
"21416": "cricket",
"21421": "cried",
"21422": "crier",
"21423": "crimp",
"21424": "crimson",
"21425": "cringe",
"21426": "cringing",
"21431": "crinkle",
"21432": "crinkly",
"21433": "crisped",
"21434": "crisping",
"21435": "crisply",
"21436": "crispness",
"21441": "crispy",
"21442": "criteria",
"21443": "critter",
"21444": "croak",
"21445": "crock",
"21446": "crook",
"21451": "croon",
"21452": "crop",
"21453": "cross",
"21454": "crouch",
"21455": "crouton",
"21456": "crowbar",
"21461": "crowd",
"21462": "crown",
"21463": "crucial",
"21464": "crudely",
"21465": "crudeness",
"21466": "cruelly",
"21511": "cruelness",
"21512": "cruelty",
"21513": "crumb",
"21514": "crummiest",
"21515": "crummy",
"21516": "crumpet",
"21521": "crumpled",
"21522": "cruncher",
"21523": "crunching",
"21524": "crunchy",
"21525": "crusader",
"21526": "crushable",
"21531": "crushed",
"21532": "crusher",
"21533": "crushing",
"21534": "crust",
"21535": "crux",
"21536": "crying",
"21541": "cryptic",
"21542": "crystal",
"21543": "cubbyhole",
"21544": "cube",
"21545": "cubical",
"21546": "cubicle",
"21551": "cucumber",
"21552": "cuddle",
"21553": "cuddly",
"21554": "cufflink",
"21555": "culinary",
"21556": "culminate",
"21561": "culpable",
"21562": "culprit",
"21563": "cultivate",
"21564": "cultural",
"21565": "culture",
"21566": "cupbearer",
"21611": "cupcake",
"21612": "cupid",
"21613": "cupped",
"21614": "cupping",
"21615": "curable",
"21616": "curator",
"21621": "curdle",
"21622": "cure",
"21623": "curfew",
"21624": "curing",
"21625": "curled",
"21626": "curler",
"21631": "curliness",
"21632": "curling",
"21633": "curly",
"21634": "curry",
"21635": "curse",
"21636": "cursive",
"21641": "cursor",
"21642": "curtain",
"21643": "curtly",
"21644": "curtsy",
"21645": "curvature",
"21646": "curve",
"21651": "curvy",
"21652": "cushy",
"21653": "cusp",
"21654": "cussed",
"21655": "custard",
"21656": "custodian",
"21661": "custody",
"21662": "customary",
"21663": "customer",
"21664": "customize",
"21665": "customs",
"21666": "cut",
"22111": "cycle",
"22112": "cyclic",
"22113": "cycling",
"22114": "cyclist",
"22115": "cylinder",
"22116": "cymbal",
"22121": "cytoplasm",
"22122": "cytoplast",
"22123": "dab",
"22124": "dad",
"22125": "daffodil",
"22126": "dagger",
"22131": "daily",
"22132": "daintily",
"22133": "dainty",
"22134": "dairy",
"22135": "daisy",
"22136": "dallying",
"22141": "dance",
"22142": "dancing",
"22143": "dandelion",
"22144": "dander",
"22145": "dandruff",
"22146": "dandy",
"22151": "danger",
"22152": "dangle",
"22153": "dangling",
"22154": "daredevil",
"22155": "dares",
"22156": "daringly",
"22161": "darkened",
"22162": "darkening",
"22163": "darkish",
"22164": "darkness",
"22165": "darkroom",
"22166": "darling",
"22211": "darn",
"22212": "dart",
"22213": "darwinism",
"22214": "dash",
"22215": "dastardly",
"22216": "data",
"22221": "datebook",
"22222": "dating",
"22223": "daughter",
"22224": "daunting",
"22225": "dawdler",
"22226": "dawn",
"22231": "daybed",
"22232": "daybreak",
"22233": "daycare",
"22234": "daydream",
"22235": "daylight",
"22236": "daylong",
"22241": "dayroom",
"22242": "daytime",
"22243": "dazzler",
"22244": "dazzling",
"22245": "deacon",
"22246": "deafening",
"22251": "deafness",
"22252": "dealer",
"22253": "dealing",
"22254": "dealmaker",
"22255": "dealt",
"22256": "dean",
"22261": "debatable",
"22262": "debate",
"22263": "debating",
"22264": "debit",
"22265": "debrief",
"22266": "debtless",
"22311": "debtor",
"22312": "debug",
"22313": "debunk",
"22314": "decade",
"22315": "decaf",
"22316": "decal",
"22321": "decathlon",
"22322": "decay",
"22323": "deceased",
"22324": "deceit",
"22325": "deceiver",
"22326": "deceiving",
"22331": "december",
"22332": "decency",
"22333": "decent",
"22334": "deception",
"22335": "deceptive",
"22336": "decibel",
"22341": "decidable",
"22342": "decimal",
"22343": "decimeter",
"22344": "decipher",
"22345": "deck",
"22346": "declared",
"22351": "decline",
"22352": "decode",
"22353": "decompose",
"22354": "decorated",
"22355": "decorator",
"22356": "decoy",
"22361": "decrease",
"22362": "decree",
"22363": "dedicate",
"22364": "dedicator",
"22365": "deduce",
"22366": "deduct",
"22411": "deed",
"22412": "deem",
"22413": "deepen",
"22414": "deeply",
"22415": "deepness",
"22416": "deface",
"22421": "defacing",
"22422": "defame",
"22423": "default",
"22424": "defeat",
"22425": "defection",
"22426": "defective",
"22431": "defendant",
"22432": "defender",
"22433": "defense",
"22434": "defensive",
"22435": "deferral",
"22436": "deferred",
"22441": "defiance",
"22442": "defiant",
"22443": "defile",
"22444": "defiling",
"22445": "define",
"22446": "definite",
"22451": "deflate",
"22452": "deflation",
"22453": "deflator",
"22454": "deflected",
"22455": "deflector",
"22456": "defog",
"22461": "deforest",
"22462": "defraud",
"22463": "defrost",
"22464": "deftly",
"22465": "defuse",
"22466": "defy",
"22511": "degraded",
"22512": "degrading",
"22513": "degrease",
"22514": "degree",
"22515": "dehydrate",
"22516": "deity",
"22521": "dejected",
"22522": "delay",
"22523": "delegate",
"22524": "delegator",
"22525": "delete",
"22526": "deletion",
"22531": "delicacy",
"22532": "delicate",
"22533": "delicious",
"22534": "delighted",
"22535": "delirious",
"22536": "delirium",
"22541": "deliverer",
"22542": "delivery",
"22543": "delouse",
"22544": "delta",
"22545": "deluge",
"22546": "delusion",
"22551": "deluxe",
"22552": "demanding",
"22553": "demeaning",
"22554": "demeanor",
"22555": "demise",
"22556": "democracy",
"22561": "democrat",
"22562": "demote",
"22563": "demotion",
"22564": "demystify",
"22565": "denatured",
"22566": "deniable",
"22611": "denial",
"22612": "denim",
"22613": "denote",
"22614": "dense",
"22615": "density",
"22616": "dental",
"22621": "dentist",
"22622": "denture",
"22623": "deny",
"22624": "deodorant",
"22625": "deodorize",
"22626": "departed",
"22631": "departure",
"22632": "depict",
"22633": "deplete",
"22634": "depletion",
"22635": "deplored",
"22636": "deploy",
"22641": "deport",
"22642": "depose",
"22643": "depraved",
"22644": "depravity",
"22645": "deprecate",
"22646": "depress",
"22651": "deprive",
"22652": "depth",
"22653": "deputize",
"22654": "deputy",
"22655": "derail",
"22656": "deranged",
"22661": "derby",
"22662": "derived",
"22663": "desecrate",
"22664": "deserve",
"22665": "deserving",
"22666": "designate",
"23111": "designed",
"23112": "designer",
"23113": "designing",
"23114": "deskbound",
"23115": "desktop",
"23116": "deskwork",
"23121": "desolate",
"23122": "despair",
"23123": "despise",
"23124": "despite",
"23125": "destiny",
"23126": "destitute",
"23131": "destruct",
"23132": "detached",
"23133": "detail",
"23134": "detection",
"23135": "detective",
"23136": "detector",
"23141": "detention",
"23142": "detergent",
"23143": "detest",
"23144": "detonate",
"23145": "detonator",
"23146": "detoxify",
"23151": "detract",
"23152": "deuce",
"23153": "devalue",
"23154": "deviancy",
"23155": "deviant",
"23156": "deviate",
"23161": "deviation",
"23162": "deviator",
"23163": "device",
"23164": "devious",
"23165": "devotedly",
"23166": "devotee",
"23211": "devotion",
"23212": "devourer",
"23213": "devouring",
"23214": "devoutly",
"23215": "dexterity",
"23216": "dexterous",
"23221": "diabetes",
"23222": "diabetic",
"23223": "diabolic",
"23224": "diagnoses",
"23225": "diagnosis",
"23226": "diagram",
"23231": "dial",
"23232": "diameter",
"23233": "diaper",
"23234": "diaphragm",
"23235": "diary",
"23236": "dice",
"23241": "dicing",
"23242": "dictate",
"23243": "dictation",
"23244": "dictator",
"23245": "difficult",
"23246": "diffused",
"23251": "diffuser",
"23252": "diffusion",
"23253": "diffusive",
"23254": "dig",
"23255": "dilation",
"23256": "diligence",
"23261": "diligent",
"23262": "dill",
"23263": "dilute",
"23264": "dime",
"23265": "diminish",
"23266": "dimly",
"23311": "dimmed",
"23312": "dimmer",
"23313": "dimness",
"23314": "dimple",
"23315": "diner",
"23316": "dingbat",
"23321": "dinghy",
"23322": "dinginess",
"23323": "dingo",
"23324": "dingy",
"23325": "dining",
"23326": "dinner",
"23331": "diocese",
"23332": "dioxide",
"23333": "diploma",
"23334": "dipped",
"23335": "dipper",
"23336": "dipping",
"23341": "directed",
"23342": "direction",
"23343": "directive",
"23344": "directly",
"23345": "directory",
"23346": "direness",
"23351": "dirtiness",
"23352": "disabled",
"23353": "disagree",
"23354": "disallow",
"23355": "disarm",
"23356": "disarray",
"23361": "disaster",
"23362": "disband",
"23363": "disbelief",
"23364": "disburse",
"23365": "discard",
"23366": "discern",
"23411": "discharge",
"23412": "disclose",
"23413": "discolor",
"23414": "discount",
"23415": "discourse",
"23416": "discover",
"23421": "discuss",
"23422": "disdain",
"23423": "disengage",
"23424": "disfigure",
"23425": "disgrace",
"23426": "dish",
"23431": "disinfect",
"23432": "disjoin",
"23433": "disk",
"23434": "dislike",
"23435": "disliking",
"23436": "dislocate",
"23441": "dislodge",
"23442": "disloyal",
"23443": "dismantle",
"23444": "dismay",
"23445": "dismiss",
"23446": "dismount",
"23451": "disobey",
"23452": "disorder",
"23453": "disown",
"23454": "disparate",
"23455": "disparity",
"23456": "dispatch",
"23461": "dispense",
"23462": "dispersal",
"23463": "dispersed",
"23464": "disperser",
"23465": "displace",
"23466": "display",
"23511": "displease",
"23512": "disposal",
"23513": "dispose",
"23514": "disprove",
"23515": "dispute",
"23516": "disregard",
"23521": "disrupt",
"23522": "dissuade",
"23523": "distance",
"23524": "distant",
"23525": "distaste",
"23526": "distill",
"23531": "distinct",
"23532": "distort",
"23533": "distract",
"23534": "distress",
"23535": "district",
"23536": "distrust",
"23541": "ditch",
"23542": "ditto",
"23543": "ditzy",
"23544": "dividable",
"23545": "divided",
"23546": "dividend",
"23551": "dividers",
"23552": "dividing",
"23553": "divinely",
"23554": "diving",
"23555": "divinity",
"23556": "divisible",
"23561": "divisibly",
"23562": "division",
"23563": "divisive",
"23564": "divorcee",
"23565": "dizziness",
"23566": "dizzy",
"23611": "doable",
"23612": "docile",
"23613": "dock",
"23614": "doctrine",
"23615": "document",
"23616": "dodge",
"23621": "dodgy",
"23622": "doily",
"23623": "doing",
"23624": "dole",
"23625": "dollar",
"23626": "dollhouse",
"23631": "dollop",
"23632": "dolly",
"23633": "dolphin",
"23634": "domain",
"23635": "domelike",
"23636": "domestic",
"23641": "dominion",
"23642": "dominoes",
"23643": "donated",
"23644": "donation",
"23645": "donator",
"23646": "donor",
"23651": "donut",
"23652": "doodle",
"23653": "doorbell",
"23654": "doorframe",
"23655": "doorknob",
"23656": "doorman",
"23661": "doormat",
"23662": "doornail",
"23663": "doorpost",
"23664": "doorstep",
"23665": "doorstop",
"23666": "doorway",
"24111": "doozy",
"24112": "dork",
"24113": "dormitory",
"24114": "dorsal",
"24115": "dosage",
"24116": "dose",
"24121": "dotted",
"24122": "doubling",
"24123": "douche",
"24124": "dove",
"24125": "down",
"24126": "dowry",
"24131": "doze",
"24132": "drab",
"24133": "dragging",
"24134": "dragonfly",
"24135": "dragonish",
"24136": "dragster",
"24141": "drainable",
"24142": "drainage",
"24143": "drained",
"24144": "drainer",
"24145": "drainpipe",
"24146": "dramatic",
"24151": "dramatize",
"24152": "drank",
"24153": "drapery",
"24154": "drastic",
"24155": "draw",
"24156": "dreaded",
"24161": "dreadful",
"24162": "dreadlock",
"24163": "dreamboat",
"24164": "dreamily",
"24165": "dreamland",
"24166": "dreamless",
"24211": "dreamlike",
"24212": "dreamt",
"24213": "dreamy",
"24214": "drearily",
"24215": "dreary",
"24216": "drench",
"24221": "dress",
"24222": "drew",
"24223": "dribble",
"24224": "dried",
"24225": "drier",
"24226": "drift",
"24231": "driller",
"24232": "drilling",
"24233": "drinkable",
"24234": "drinking",
"24235": "dripping",
"24236": "drippy",
"24241": "drivable",
"24242": "driven",
"24243": "driver",
"24244": "driveway",
"24245": "driving",
"24246": "drizzle",
"24251": "drizzly",
"24252": "drone",
"24253": "drool",
"24254": "droop",
"24255": "drop-down",
"24256": "dropbox",
"24261": "dropkick",
"24262": "droplet",
"24263": "dropout",
"24264": "dropper",
"24265": "drove",
"24266": "drown",
"24311": "drowsily",
"24312": "drudge",
"24313": "drum",
"24314": "dry",
"24315": "dubbed",
"24316": "dubiously",
"24321": "duchess",
"24322": "duckbill",
"24323": "ducking",
"24324": "duckling",
"24325": "ducktail",
"24326": "ducky",
"24331": "duct",
"24332": "dude",
"24333": "duffel",
"24334": "dugout",
"24335": "duh",
"24336": "duke",
"24341": "duller",
"24342": "dullness",
"24343": "duly",
"24344": "dumping",
"24345": "dumpling",
"24346": "dumpster",
"24351": "duo",
"24352": "dupe",
"24353": "duplex",
"24354": "duplicate",
"24355": "duplicity",
"24356": "durable",
"24361": "durably",
"24362": "duration",
"24363": "duress",
"24364": "during",
"24365": "dusk",
"24366": "dust",
"24411": "dutiful",
"24412": "duty",
"24413": "duvet",
"24414": "dwarf",
"24415": "dweeb",
"24416": "dwelled",
"24421": "dweller",
"24422": "dwelling",
"24423": "dwindle",
"24424": "dwindling",
"24425": "dynamic",
"24426": "dynamite",
"24431": "dynasty",
"24432": "dyslexia",
"24433": "dyslexic",
"24434": "each",
"24435": "eagle",
"24436": "earache",
"24441": "eardrum",
"24442": "earflap",
"24443": "earful",
"24444": "earlobe",
"24445": "early",
"24446": "earmark",
"24451": "earmuff",
"24452": "earphone",
"24453": "earpiece",
"24454": "earplugs",
"24455": "earring",
"24456": "earshot",
"24461": "earthen",
"24462": "earthlike",
"24463": "earthling",
"24464": "earthly",
"24465": "earthworm",
"24466": "earthy",
"24511": "earwig",
"24512": "easeful",
"24513": "easel",
"24514": "easiest",
"24515": "easily",
"24516": "easiness",
"24521": "easing",
"24522": "eastbound",
"24523": "eastcoast",
"24524": "easter",
"24525": "eastward",
"24526": "eatable",
"24531": "eaten",
"24532": "eatery",
"24533": "eating",
"24534": "eats",
"24535": "ebay",
"24536": "ebony",
"24541": "ebook",
"24542": "ecard",
"24543": "eccentric",
"24544": "echo",
"24545": "eclair",
"24546": "eclipse",
"24551": "ecologist",
"24552": "ecology",
"24553": "economic",
"24554": "economist",
"24555": "economy",
"24556": "ecosphere",
"24561": "ecosystem",
"24562": "edge",
"24563": "edginess",
"24564": "edging",
"24565": "edgy",
"24566": "edition",
"24611": "editor",
"24612": "educated",
"24613": "education",
"24614": "educator",
"24615": "eel",
"24616": "effective",
"24621": "effects",
"24622": "efficient",
"24623": "effort",
"24624": "eggbeater",
"24625": "egging",
"24626": "eggnog",
"24631": "eggplant",
"24632": "eggshell",
"24633": "egomaniac",
"24634": "egotism",
"24635": "egotistic",
"24636": "either",
"24641": "eject",
"24642": "elaborate",
"24643": "elastic",
"24644": "elated",
"24645": "elbow",
"24646": "eldercare",
"24651": "elderly",
"24652": "eldest",
"24653": "electable",
"24654": "election",
"24655": "elective",
"24656": "elephant",
"24661": "elevate",
"24662": "elevating",
"24663": "elevation",
"24664": "elevator",
"24665": "eleven",
"24666": "elf",
"25111": "eligible",
"25112": "eligibly",
"25113": "eliminate",
"25114": "elite",
"25115": "elitism",
"25116": "elixir",
"25121": "elk",
"25122": "ellipse",
"25123": "elliptic",
"25124": "elm",
"25125": "elongated",
"25126": "elope",
"25131": "eloquence",
"25132": "eloquent",
"25133": "elsewhere",
"25134": "elude",
"25135": "elusive",
"25136": "elves",
"25141": "email",
"25142": "embargo",
"25143": "embark",
"25144": "embassy",
"25145": "embattled",
"25146": "embellish",
"25151": "ember",
"25152": "embezzle",
"25153": "emblaze",
"25154": "emblem",
"25155": "embody",
"25156": "embolism",
"25161": "emboss",
"25162": "embroider",
"25163": "emcee",
"25164": "emerald",
"25165": "emergency",
"25166": "emission",
"25211": "emit",
"25212": "emote",
"25213": "emoticon",
"25214": "emotion",
"25215": "empathic",
"25216": "empathy",
"25221": "emperor",
"25222": "emphases",
"25223": "emphasis",
"25224": "emphasize",
"25225": "emphatic",
"25226": "empirical",
"25231": "employed",
"25232": "employee",
"25233": "employer",
"25234": "emporium",
"25235": "empower",
"25236": "emptier",
"25241": "emptiness",
"25242": "empty",
"25243": "emu",
"25244": "enable",
"25245": "enactment",
"25246": "enamel",
"25251": "enchanted",
"25252": "enchilada",
"25253": "encircle",
"25254": "enclose",
"25255": "enclosure",
"25256": "encode",
"25261": "encore",
"25262": "encounter",
"25263": "encourage",
"25264": "encroach",
"25265": "encrust",
"25266": "encrypt",
"25311": "endanger",
"25312": "endeared",
"25313": "endearing",
"25314": "ended",
"25315": "ending",
"25316": "endless",
"25321": "endnote",
"25322": "endocrine",
"25323": "endorphin",
"25324": "endorse",
"25325": "endowment",
"25326": "endpoint",
"25331": "endurable",
"25332": "endurance",
"25333": "enduring",
"25334": "energetic",
"25335": "energize",
"25336": "energy",
"25341": "enforced",
"25342": "enforcer",
"25343": "engaged",
"25344": "engaging",
"25345": "engine",
"25346": "engorge",
"25351": "engraved",
"25352": "engraver",
"25353": "engraving",
"25354": "engross",
"25355": "engulf",
"25356": "enhance",
"25361": "enigmatic",
"25362": "enjoyable",
"25363": "enjoyably",
"25364": "enjoyer",
"25365": "enjoying",
"25366": "enjoyment",
"25411": "enlarged",
"25412": "enlarging",
"25413": "enlighten",
"25414": "enlisted",
"25415": "enquirer",
"25416": "enrage",
"25421": "enrich",
"25422": "enroll",
"25423": "enslave",
"25424": "ensnare",
"25425": "ensure",
"25426": "entail",
"25431": "entangled",
"25432": "entering",
"25433": "entertain",
"25434": "enticing",
"25435": "entire",
"25436": "entitle",
"25441": "entity",
"25442": "entomb",
"25443": "entourage",
"25444": "entrap",
"25445": "entree",
"25446": "entrench",
"25451": "entrust",
"25452": "entryway",
"25453": "entwine",
"25454": "enunciate",
"25455": "envelope",
"25456": "enviable",
"25461": "enviably",
"25462": "envious",
"25463": "envision",
"25464": "envoy",
"25465": "envy",
"25466": "enzyme",
"25511": "epic",
"25512": "epidemic",
"25513": "epidermal",
"25514": "epidermis",
"25515": "epidural",
"25516": "epilepsy",
"25521": "epileptic",
"25522": "epilogue",
"25523": "epiphany",
"25524": "episode",
"25525": "equal",
"25526": "equate",
"25531": "equation",
"25532": "equator",
"25533": "equinox",
"25534": "equipment",
"25535": "equity",
"25536": "equivocal",
"25541": "eradicate",
"25542": "erasable",
"25543": "erased",
"25544": "eraser",
"25545": "erasure",
"25546": "ergonomic",
"25551": "errand",
"25552": "errant",
"25553": "erratic",
"25554": "error",
"25555": "erupt",
"25556": "escalate",
"25561": "escalator",
"25562": "escapable",
"25563": "escapade",
"25564": "escapist",
"25565": "escargot",
"25566": "eskimo",
"25611": "esophagus",
"25612": "espionage",
"25613": "espresso",
"25614": "esquire",
"25615": "essay",
"25616": "essence",
"25621": "essential",
"25622": "establish",
"25623": "estate",
"25624": "esteemed",
"25625": "estimate",
"25626": "estimator",
"25631": "estranged",
"25632": "estrogen",
"25633": "etching",
"25634": "eternal",
"25635": "eternity",
"25636": "ethanol",
"25641": "ether",
"25642": "ethically",
"25643": "ethics",
"25644": "euphemism",
"25645": "evacuate",
"25646": "evacuee",
"25651": "evade",
"25652": "evaluate",
"25653": "evaluator",
"25654": "evaporate",
"25655": "evasion",
"25656": "evasive",
"25661": "even",
"25662": "everglade",
"25663": "evergreen",
"25664": "everybody",
"25665": "everyday",
"25666": "everyone",
"26111": "evict",
"26112": "evidence",
"26113": "evident",
"26114": "evil",
"26115": "evoke",
"26116": "evolution",
"26121": "evolve",
"26122": "exact",
"26123": "exalted",
"26124": "example",
"26125": "excavate",
"26126": "excavator",
"26131": "exceeding",
"26132": "exception",
"26133": "excess",
"26134": "exchange",
"26135": "excitable",
"26136": "exciting",
"26141": "exclaim",
"26142": "exclude",
"26143": "excluding",
"26144": "exclusion",
"26145": "exclusive",
"26146": "excretion",
"26151": "excretory",
"26152": "excursion",
"26153": "excusable",
"26154": "excusably",
"26155": "excuse",
"26156": "exemplary",
"26161": "exemplify",
"26162": "exemption",
"26163": "exerciser",
"26164": "exert",
"26165": "exes",
"26166": "exfoliate",
"26211": "exhale",
"26212": "exhaust",
"26213": "exhume",
"26214": "exile",
"26215": "existing",
"26216": "exit",
"26221": "exodus",
"26222": "exonerate",
"26223": "exorcism",
"26224": "exorcist",
"26225": "expand",
"26226": "expanse",
"26231": "expansion",
"26232": "expansive",
"26233": "expectant",
"26234": "expedited",
"26235": "expediter",
"26236": "expel",
"26241": "expend",
"26242": "expenses",
"26243": "expensive",
"26244": "expert",
"26245": "expire",
"26246": "expiring",
"26251": "explain",
"26252": "expletive",
"26253": "explicit",
"26254": "explode",
"26255": "exploit",
"26256": "explore",
"26261": "exploring",
"26262": "exponent",
"26263": "exporter",
"26264": "exposable",
"26265": "expose",
"26266": "exposure",
"26311": "express",
"26312": "expulsion",
"26313": "exquisite",
"26314": "extended",
"26315": "extending",
"26316": "extent",
"26321": "extenuate",
"26322": "exterior",
"26323": "external",
"26324": "extinct",
"26325": "extortion",
"26326": "extradite",
"26331": "extras",
"26332": "extrovert",
"26333": "extrude",
"26334": "extruding",
"26335": "exuberant",
"26336": "fable",
"26341": "fabric",
"26342": "fabulous",
"26343": "facebook",
"26344": "facecloth",
"26345": "facedown",
"26346": "faceless",
"26351": "facelift",
"26352": "faceplate",
"26353": "faceted",
"26354": "facial",
"26355": "facility",
"26356": "facing",
"26361": "facsimile",
"26362": "faction",
"26363": "factoid",
"26364": "factor",
"26365": "factsheet",
"26366": "factual",
"26411": "faculty",
"26412": "fade",
"26413": "fading",
"26414": "failing",
"26415": "falcon",
"26416": "fall",
"26421": "false",
"26422": "falsify",
"26423": "fame",
"26424": "familiar",
"26425": "family",
"26426": "famine",
"26431": "famished",
"26432": "fanatic",
"26433": "fancied",
"26434": "fanciness",
"26435": "fancy",
"26436": "fanfare",
"26441": "fang",
"26442": "fanning",
"26443": "fantasize",
"26444": "fantastic",
"26445": "fantasy",
"26446": "fascism",
"26451": "fastball",
"26452": "faster",
"26453": "fasting",
"26454": "fastness",
"26455": "faucet",
"26456": "favorable",
"26461": "favorably",
"26462": "favored",
"26463": "favoring",
"26464": "favorite",
"26465": "fax",
"26466": "feast",
"26511": "federal",
"26512": "fedora",
"26513": "feeble",
"26514": "feed",
"26515": "feel",
"26516": "feisty",
"26521": "feline",
"26522": "felt-tip",
"26523": "feminine",
"26524": "feminism",
"26525": "feminist",
"26526": "feminize",
"26531": "femur",
"26532": "fence",
"26533": "fencing",
"26534": "fender",
"26535": "ferment",
"26536": "fernlike",
"26541": "ferocious",
"26542": "ferocity",
"26543": "ferret",
"26544": "ferris",
"26545": "ferry",
"26546": "fervor",
"26551": "fester",
"26552": "festival",
"26553": "festive",
"26554": "festivity",
"26555": "fetal",
"26556": "fetch",
"26561": "fever",
"26562": "fiber",
"26563": "fiction",
"26564": "fiddle",
"26565": "fiddling",
"26566": "fidelity",
"26611": "fidgeting",
"26612": "fidgety",
"26613": "fifteen",
"26614": "fifth",
"26615": "fiftieth",
"26616": "fifty",
"26621": "figment",
"26622": "figure",
"26623": "figurine",
"26624": "filing",
"26625": "filled",
"26626": "filler",
"26631": "filling",
"26632": "film",
"26633": "filter",
"26634": "filth",
"26635": "filtrate",
"26636": "finale",
"26641": "finalist",
"26642": "finalize",
"26643": "finally",
"26644": "finance",
"26645": "financial",
"26646": "finch",
"26651": "fineness",
"26652": "finer",
"26653": "finicky",
"26654": "finished",
"26655": "finisher",
"26656": "finishing",
"26661": "finite",
"26662": "finless",
"26663": "finlike",
"26664": "fiscally",
"26665": "fit",
"26666": "five",
"31111": "flaccid",
"31112": "flagman",
"31113": "flagpole",
"31114": "flagship",
"31115": "flagstick",
"31116": "flagstone",
"31121": "flail",
"31122": "flakily",
"31123": "flaky",
"31124": "flame",
"31125": "flammable",
"31126": "flanked",
"31131": "flanking",
"31132": "flannels",
"31133": "flap",
"31134": "flaring",
"31135": "flashback",
"31136": "flashbulb",
"31141": "flashcard",
"31142": "flashily",
"31143": "flashing",
"31144": "flashy",
"31145": "flask",
"31146": "flatbed",
"31151": "flatfoot",
"31152": "flatly",
"31153": "flatness",
"31154": "flatten",
"31155": "flattered",
"31156": "flatterer",
"31161": "flattery",
"31162": "flattop",
"31163": "flatware",
"31164": "flatworm",
"31165": "flavored",
"31166": "flavorful",
"31211": "flavoring",
"31212": "flaxseed",
"31213": "fled",
"31214": "fleshed",
"31215": "fleshy",
"31216": "flick",
"31221": "flier",
"31222": "flight",
"31223": "flinch",
"31224": "fling",
"31225": "flint",
"31226": "flip",
"31231": "flirt",
"31232": "float",
"31233": "flock",
"31234": "flogging",
"31235": "flop",
"31236": "floral",
"31241": "florist",
"31242": "floss",
"31243": "flounder",
"31244": "flyable",
"31245": "flyaway",
"31246": "flyer",
"31251": "flying",
"31252": "flyover",
"31253": "flypaper",
"31254": "foam",
"31255": "foe",
"31256": "fog",
"31261": "foil",
"31262": "folic",
"31263": "folk",
"31264": "follicle",
"31265": "follow",
"31266": "fondling",
"31311": "fondly",
"31312": "fondness",
"31313": "fondue",
"31314": "font",
"31315": "food",
"31316": "fool",
"31321": "footage",
"31322": "football",
"31323": "footbath",
"31324": "footboard",
"31325": "footer",
"31326": "footgear",
"31331": "foothill",
"31332": "foothold",
"31333": "footing",
"31334": "footless",
"31335": "footman",
"31336": "footnote",
"31341": "footpad",
"31342": "footpath",
"31343": "footprint",
"31344": "footrest",
"31345": "footsie",
"31346": "footsore",
"31351": "footwear",
"31352": "footwork",
"31353": "fossil",
"31354": "foster",
"31355": "founder",
"31356": "founding",
"31361": "fountain",
"31362": "fox",
"31363": "foyer",
"31364": "fraction",
"31365": "fracture",
"31366": "fragile",
"31411": "fragility",
"31412": "fragment",
"31413": "fragrance",
"31414": "fragrant",
"31415": "frail",
"31416": "frame",
"31421": "framing",
"31422": "frantic",
"31423": "fraternal",
"31424": "frayed",
"31425": "fraying",
"31426": "frays",
"31431": "freckled",
"31432": "freckles",
"31433": "freebase",
"31434": "freebee",
"31435": "freebie",
"31436": "freedom",
"31441": "freefall",
"31442": "freehand",
"31443": "freeing",
"31444": "freeload",
"31445": "freely",
"31446": "freemason",
"31451": "freeness",
"31452": "freestyle",
"31453": "freeware",
"31454": "freeway",
"31455": "freewill",
"31456": "freezable",
"31461": "freezing",
"31462": "freight",
"31463": "french",
"31464": "frenzied",
"31465": "frenzy",
"31466": "frequency",
"31511": "frequent",
"31512": "fresh",
"31513": "fretful",
"31514": "fretted",
"31515": "friction",
"31516": "friday",
"31521": "fridge",
"31522": "fried",
"31523": "friend",
"31524": "frighten",
"31525": "frightful",
"31526": "frigidity",
"31531": "frigidly",
"31532": "frill",
"31533": "fringe",
"31534": "frisbee",
"31535": "frisk",
"31536": "fritter",
"31541": "frivolous",
"31542": "frolic",
"31543": "from",
"31544": "front",
"31545": "frostbite",
"31546": "frosted",
"31551": "frostily",
"31552": "frosting",
"31553": "frostlike",
"31554": "frosty",
"31555": "froth",
"31556": "frown",
"31561": "frozen",
"31562": "fructose",
"31563": "frugality",
"31564": "frugally",
"31565": "fruit",
"31566": "frustrate",
"31611": "frying",
"31612": "gab",
"31613": "gaffe",
"31614": "gag",
"31615": "gainfully",
"31616": "gaining",
"31621": "gains",
"31622": "gala",
"31623": "gallantly",
"31624": "galleria",
"31625": "gallery",
"31626": "galley",
"31631": "gallon",
"31632": "gallows",
"31633": "gallstone",
"31634": "galore",
"31635": "galvanize",
"31636": "gambling",
"31641": "game",
"31642": "gaming",
"31643": "gamma",
"31644": "gander",
"31645": "gangly",
"31646": "gangrene",
"31651": "gangway",
"31652": "gap",
"31653": "garage",
"31654": "garbage",
"31655": "garden",
"31656": "gargle",
"31661": "garland",
"31662": "garlic",
"31663": "garment",
"31664": "garnet",
"31665": "garnish",
"31666": "garter",
"32111": "gas",
"32112": "gatherer",
"32113": "gathering",
"32114": "gating",
"32115": "gauging",
"32116": "gauntlet",
"32121": "gauze",
"32122": "gave",
"32123": "gawk",
"32124": "gazing",
"32125": "gear",
"32126": "gecko",
"32131": "geek",
"32132": "geiger",
"32133": "gem",
"32134": "gender",
"32135": "generic",
"32136": "generous",
"32141": "genetics",
"32142": "genre",
"32143": "gentile",
"32144": "gentleman",
"32145": "gently",
"32146": "gents",
"32151": "geography",
"32152": "geologic",
"32153": "geologist",
"32154": "geology",
"32155": "geometric",
"32156": "geometry",
"32161": "geranium",
"32162": "gerbil",
"32163": "geriatric",
"32164": "germicide",
"32165": "germinate",
"32166": "germless",
"32211": "germproof",
"32212": "gestate",
"32213": "gestation",
"32214": "gesture",
"32215": "getaway",
"32216": "getting",
"32221": "getup",
"32222": "giant",
"32223": "gibberish",
"32224": "giblet",
"32225": "giddily",
"32226": "giddiness",
"32231": "giddy",
"32232": "gift",
"32233": "gigabyte",
"32234": "gigahertz",
"32235": "gigantic",
"32236": "giggle",
"32241": "giggling",
"32242": "giggly",
"32243": "gigolo",
"32244": "gilled",
"32245": "gills",
"32246": "gimmick",
"32251": "girdle",
"32252": "giveaway",
"32253": "given",
"32254": "giver",
"32255": "giving",
"32256": "gizmo",
"32261": "gizzard",
"32262": "glacial",
"32263": "glacier",
"32264": "glade",
"32265": "gladiator",
"32266": "gladly",
"32311": "glamorous",
"32312": "glamour",
"32313": "glance",
"32314": "glancing",
"32315": "glandular",
"32316": "glare",
"32321": "glaring",
"32322": "glass",
"32323": "glaucoma",
"32324": "glazing",
"32325": "gleaming",
"32326": "gleeful",
"32331": "glider",
"32332": "gliding",
"32333": "glimmer",
"32334": "glimpse",
"32335": "glisten",
"32336": "glitch",
"32341": "glitter",
"32342": "glitzy",
"32343": "gloater",
"32344": "gloating",
"32345": "gloomily",
"32346": "gloomy",
"32351": "glorified",
"32352": "glorifier",
"32353": "glorify",
"32354": "glorious",
"32355": "glory",
"32356": "gloss",
"32361": "glove",
"32362": "glowing",
"32363": "glowworm",
"32364": "glucose",
"32365": "glue",
"32366": "gluten",
"32411": "glutinous",
"32412": "glutton",
"32413": "gnarly",
"32414": "gnat",
"32415": "goal",
"32416": "goatskin",
"32421": "goes",
"32422": "goggles",
"32423": "going",
"32424": "goldfish",
"32425": "goldmine",
"32426": "goldsmith",
"32431": "golf",
"32432": "goliath",
"32433": "gonad",
"32434": "gondola",
"32435": "gone",
"32436": "gong",
"32441": "good",
"32442": "gooey",
"32443": "goofball",
"32444": "goofiness",
"32445": "goofy",
"32446": "google",
"32451": "goon",
"32452": "gopher",
"32453": "gore",
"32454": "gorged",
"32455": "gorgeous",
"32456": "gory",
"32461": "gosling",
"32462": "gossip",
"32463": "gothic",
"32464": "gotten",
"32465": "gout",
"32466": "gown",
"32511": "grab",
"32512": "graceful",
"32513": "graceless",
"32514": "gracious",
"32515": "gradation",
"32516": "graded",
"32521": "grader",
"32522": "gradient",
"32523": "grading",
"32524": "gradually",
"32525": "graduate",
"32526": "graffiti",
"32531": "grafted",
"32532": "grafting",
"32533": "grain",
"32534": "granddad",
"32535": "grandkid",
"32536": "grandly",
"32541": "grandma",
"32542": "grandpa",
"32543": "grandson",
"32544": "granite",
"32545": "granny",
"32546": "granola",
"32551": "grant",
"32552": "granular",
"32553": "grape",
"32554": "graph",
"32555": "grapple",
"32556": "grappling",
"32561": "grasp",
"32562": "grass",
"32563": "gratified",
"32564": "gratify",
"32565": "grating",
"32566": "gratitude",
"32611": "gratuity",
"32612": "gravel",
"32613": "graveness",
"32614": "graves",
"32615": "graveyard",
"32616": "gravitate",
"32621": "gravity",
"32622": "gravy",
"32623": "gray",
"32624": "grazing",
"32625": "greasily",
"32626": "greedily",
"32631": "greedless",
"32632": "greedy",
"32633": "green",
"32634": "greeter",
"32635": "greeting",
"32636": "grew",
"32641": "greyhound",
"32642": "grid",
"32643": "grief",
"32644": "grievance",
"32645": "grieving",
"32646": "grievous",
"32651": "grill",
"32652": "grimace",
"32653": "grimacing",
"32654": "grime",
"32655": "griminess",
"32656": "grimy",
"32661": "grinch",
"32662": "grinning",
"32663": "grip",
"32664": "gristle",
"32665": "grit",
"32666": "groggily",
"33111": "groggy",
"33112": "groin",
"33113": "groom",
"33114": "groove",
"33115": "grooving",
"33116": "groovy",
"33121": "grope",
"33122": "ground",
"33123": "grouped",
"33124": "grout",
"33125": "grove",
"33126": "grower",
"33131": "growing",
"33132": "growl",
"33133": "grub",
"33134": "grudge",
"33135": "grudging",
"33136": "grueling",
"33141": "gruffly",
"33142": "grumble",
"33143": "grumbling",
"33144": "grumbly",
"33145": "grumpily",
"33146": "grunge",
"33151": "grunt",
"33152": "guacamole",
"33153": "guidable",
"33154": "guidance",
"33155": "guide",
"33156": "guiding",
"33161": "guileless",
"33162": "guise",
"33163": "gulf",
"33164": "gullible",
"33165": "gully",
"33166": "gulp",
"33211": "gumball",
"33212": "gumdrop",
"33213": "gumminess",
"33214": "gumming",
"33215": "gummy",
"33216": "gurgle",
"33221": "gurgling",
"33222": "guru",
"33223": "gush",
"33224": "gusto",
"33225": "gusty",
"33226": "gutless",
"33231": "guts",
"33232": "gutter",
"33233": "guy",
"33234": "guzzler",
"33235": "gyration",
"33236": "habitable",
"33241": "habitant",
"33242": "habitat",
"33243": "habitual",
"33244": "hacked",
"33245": "hacker",
"33246": "hacking",
"33251": "hacksaw",
"33252": "had",
"33253": "haggler",
"33254": "haiku",
"33255": "half",
"33256": "halogen",
"33261": "halt",
"33262": "halved",
"33263": "halves",
"33264": "hamburger",
"33265": "hamlet",
"33266": "hammock",
"33311": "hamper",
"33312": "hamster",
"33313": "hamstring",
"33314": "handbag",
"33315": "handball",
"33316": "handbook",
"33321": "handbrake",
"33322": "handcart",
"33323": "handclap",
"33324": "handclasp",
"33325": "handcraft",
"33326": "handcuff",
"33331": "handed",
"33332": "handful",
"33333": "handgrip",
"33334": "handgun",
"33335": "handheld",
"33336": "handiness",
"33341": "handiwork",
"33342": "handlebar",
"33343": "handled",
"33344": "handler",
"33345": "handling",
"33346": "handmade",
"33351": "handoff",
"33352": "handpick",
"33353": "handprint",
"33354": "handrail",
"33355": "handsaw",
"33356": "handset",
"33361": "handsfree",
"33362": "handshake",
"33363": "handstand",
"33364": "handwash",
"33365": "handwork",
"33366": "handwoven",
"33411": "handwrite",
"33412": "handyman",
"33413": "hangnail",
"33414": "hangout",
"33415": "hangover",
"33416": "hangup",
"33421": "hankering",
"33422": "hankie",
"33423": "hanky",
"33424": "haphazard",
"33425": "happening",
"33426": "happier",
"33431": "happiest",
"33432": "happily",
"33433": "happiness",
"33434": "happy",
"33435": "harbor",
"33436": "hardcopy",
"33441": "hardcore",
"33442": "hardcover",
"33443": "harddisk",
"33444": "hardened",
"33445": "hardener",
"33446": "hardening",
"33451": "hardhat",
"33452": "hardhead",
"33453": "hardiness",
"33454": "hardly",
"33455": "hardness",
"33456": "hardship",
"33461": "hardware",
"33462": "hardwired",
"33463": "hardwood",
"33464": "hardy",
"33465": "harmful",
"33466": "harmless",
"33511": "harmonica",
"33512": "harmonics",
"33513": "harmonize",
"33514": "harmony",
"33515": "harness",
"33516": "harpist",
"33521": "harsh",
"33522": "harvest",
"33523": "hash",
"33524": "hassle",
"33525": "haste",
"33526": "hastily",
"33531": "hastiness",
"33532": "hasty",
"33533": "hatbox",
"33534": "hatchback",
"33535": "hatchery",
"33536": "hatchet",
"33541": "hatching",
"33542": "hatchling",
"33543": "hate",
"33544": "hatless",
"33545": "hatred",
"33546": "haunt",
"33551": "haven",
"33552": "hazard",
"33553": "hazelnut",
"33554": "hazily",
"33555": "haziness",
"33556": "hazing",
"33561": "hazy",
"33562": "headache",
"33563": "headband",
"33564": "headboard",
"33565": "headcount",
"33566": "headdress",
"33611": "headed",
"33612": "header",
"33613": "headfirst",
"33614": "headgear",
"33615": "heading",
"33616": "headlamp",
"33621": "headless",
"33622": "headlock",
"33623": "headphone",
"33624": "headpiece",
"33625": "headrest",
"33626": "headroom",
"33631": "headscarf",
"33632": "headset",
"33633": "headsman",
"33634": "headstand",
"33635": "headstone",
"33636": "headway",
"33641": "headwear",
"33642": "heap",
"33643": "heat",
"33644": "heave",
"33645": "heavily",
"33646": "heaviness",
"33651": "heaving",
"33652": "hedge",
"33653": "hedging",
"33654": "heftiness",
"33655": "hefty",
"33656": "helium",
"33661": "helmet",
"33662": "helper",
"33663": "helpful",
"33664": "helping",
"33665": "helpless",
"33666": "helpline",
"34111": "hemlock",
"34112": "hemstitch",
"34113": "hence",
"34114": "henchman",
"34115": "henna",
"34116": "herald",
"34121": "herbal",
"34122": "herbicide",
"34123": "herbs",
"34124": "heritage",
"34125": "hermit",
"34126": "heroics",
"34131": "heroism",
"34132": "herring",
"34133": "herself",
"34134": "hertz",
"34135": "hesitancy",
"34136": "hesitant",
"34141": "hesitate",
"34142": "hexagon",
"34143": "hexagram",
"34144": "hubcap",
"34145": "huddle",
"34146": "huddling",
"34151": "huff",
"34152": "hug",
"34153": "hula",
"34154": "hulk",
"34155": "hull",
"34156": "human",
"34161": "humble",
"34162": "humbling",
"34163": "humbly",
"34164": "humid",
"34165": "humiliate",
"34166": "humility",
"34211": "humming",
"34212": "hummus",
"34213": "humongous",
"34214": "humorist",
"34215": "humorless",
"34216": "humorous",
"34221": "humpback",
"34222": "humped",
"34223": "humvee",
"34224": "hunchback",
"34225": "hundredth",
"34226": "hunger",
"34231": "hungrily",
"34232": "hungry",
"34233": "hunk",
"34234": "hunter",
"34235": "hunting",
"34236": "huntress",
"34241": "huntsman",
"34242": "hurdle",
"34243": "hurled",
"34244": "hurler",
"34245": "hurling",
"34246": "hurray",
"34251": "hurricane",
"34252": "hurried",
"34253": "hurry",
"34254": "hurt",
"34255": "husband",
"34256": "hush",
"34261": "husked",
"34262": "huskiness",
"34263": "hut",
"34264": "hybrid",
"34265": "hydrant",
"34266": "hydrated",
"34311": "hydration",
"34312": "hydrogen",
"34313": "hydroxide",
"34314": "hyperlink",
"34315": "hypertext",
"34316": "hyphen",
"34321": "hypnoses",
"34322": "hypnosis",
"34323": "hypnotic",
"34324": "hypnotism",
"34325": "hypnotist",
"34326": "hypnotize",
"34331": "hypocrisy",
"34332": "hypocrite",
"34333": "ibuprofen",
"34334": "ice",
"34335": "iciness",
"34336": "icing",
"34341": "icky",
"34342": "icon",
"34343": "icy",
"34344": "idealism",
"34345": "idealist",
"34346": "idealize",
"34351": "ideally",
"34352": "idealness",
"34353": "identical",
"34354": "identify",
"34355": "identity",
"34356": "ideology",
"34361": "idiocy",
"34362": "idiom",
"34363": "idly",
"34364": "igloo",
"34365": "ignition",
"34366": "ignore",
"34411": "iguana",
"34412": "illicitly",
"34413": "illusion",
"34414": "illusive",
"34415": "image",
"34416": "imaginary",
"34421": "imagines",
"34422": "imaging",
"34423": "imbecile",
"34424": "imitate",
"34425": "imitation",
"34426": "immature",
"34431": "immerse",
"34432": "immersion",
"34433": "imminent",
"34434": "immobile",
"34435": "immodest",
"34436": "immorally",
"34441": "immortal",
"34442": "immovable",
"34443": "immovably",
"34444": "immunity",
"34445": "immunize",
"34446": "impaired",
"34451": "impale",
"34452": "impart",
"34453": "impatient",
"34454": "impeach",
"34455": "impeding",
"34456": "impending",
"34461": "imperfect",
"34462": "imperial",
"34463": "impish",
"34464": "implant",
"34465": "implement",
"34466": "implicate",
"34511": "implicit",
"34512": "implode",
"34513": "implosion",
"34514": "implosive",
"34515": "imply",
"34516": "impolite",
"34521": "important",
"34522": "importer",
"34523": "impose",
"34524": "imposing",
"34525": "impotence",
"34526": "impotency",
"34531": "impotent",
"34532": "impound",
"34533": "imprecise",
"34534": "imprint",
"34535": "imprison",
"34536": "impromptu",
"34541": "improper",
"34542": "improve",
"34543": "improving",
"34544": "improvise",
"34545": "imprudent",
"34546": "impulse",
"34551": "impulsive",
"34552": "impure",
"34553": "impurity",
"34554": "iodine",
"34555": "iodize",
"34556": "ion",
"34561": "ipad",
"34562": "iphone",
"34563": "ipod",
"34564": "irate",
"34565": "irk",
"34566": "iron",
"34611": "irregular",
"34612": "irrigate",
"34613": "irritable",
"34614": "irritably",
"34615": "irritant",
"34616": "irritate",
"34621": "islamic",
"34622": "islamist",
"34623": "isolated",
"34624": "isolating",
"34625": "isolation",
"34626": "isotope",
"34631": "issue",
"34632": "issuing",
"34633": "italicize",
"34634": "italics",
"34635": "item",
"34636": "itinerary",
"34641": "itunes",
"34642": "ivory",
"34643": "ivy",
"34644": "jab",
"34645": "jackal",
"34646": "jacket",
"34651": "jackknife",
"34652": "jackpot",
"34653": "jailbird",
"34654": "jailbreak",
"34655": "jailer",
"34656": "jailhouse",
"34661": "jalapeno",
"34662": "jam",
"34663": "janitor",
"34664": "january",
"34665": "jargon",
"34666": "jarring",
"35111": "jasmine",
"35112": "jaundice",
"35113": "jaunt",
"35114": "java",
"35115": "jawed",
"35116": "jawless",
"35121": "jawline",
"35122": "jaws",
"35123": "jaybird",
"35124": "jaywalker",
"35125": "jazz",
"35126": "jeep",
"35131": "jeeringly",
"35132": "jellied",
"35133": "jelly",
"35134": "jersey",
"35135": "jester",
"35136": "jet",
"35141": "jiffy",
"35142": "jigsaw",
"35143": "jimmy",
"35144": "jingle",
"35145": "jingling",
"35146": "jinx",
"35151": "jitters",
"35152": "jittery",
"35153": "job",
"35154": "jockey",
"35155": "jockstrap",
"35156": "jogger",
"35161": "jogging",
"35162": "john",
"35163": "joining",
"35164": "jokester",
"35165": "jokingly",
"35166": "jolliness",
"35211": "jolly",
"35212": "jolt",
"35213": "jot",
"35214": "jovial",
"35215": "joyfully",
"35216": "joylessly",
"35221": "joyous",
"35222": "joyride",
"35223": "joystick",
"35224": "jubilance",
"35225": "jubilant",
"35226": "judge",
"35231": "judgingly",
"35232": "judicial",
"35233": "judiciary",
"35234": "judo",
"35235": "juggle",
"35236": "juggling",
"35241": "jugular",
"35242": "juice",
"35243": "juiciness",
"35244": "juicy",
"35245": "jujitsu",
"35246": "jukebox",
"35251": "july",
"35252": "jumble",
"35253": "jumbo",
"35254": "jump",
"35255": "junction",
"35256": "juncture",
"35261": "june",
"35262": "junior",
"35263": "juniper",
"35264": "junkie",
"35265": "junkman",
"35266": "junkyard",
"35311": "jurist",
"35312": "juror",
"35313": "jury",
"35314": "justice",
"35315": "justifier",
"35316": "justify",
"35321": "justly",
"35322": "justness",
"35323": "juvenile",
"35324": "kabob",
"35325": "kangaroo",
"35326": "karaoke",
"35331": "karate",
"35332": "karma",
"35333": "kebab",
"35334": "keenly",
"35335": "keenness",
"35336": "keep",
"35341": "keg",
"35342": "kelp",
"35343": "kennel",
"35344": "kept",
"35345": "kerchief",
"35346": "kerosene",
"35351": "kettle",
"35352": "kick",
"35353": "kiln",
"35354": "kilobyte",
"35355": "kilogram",
"35356": "kilometer",
"35361": "kilowatt",
"35362": "kilt",
"35363": "kimono",
"35364": "kindle",
"35365": "kindling",
"35366": "kindly",
"35411": "kindness",
"35412": "kindred",
"35413": "kinetic",
"35414": "kinfolk",
"35415": "king",
"35416": "kinship",
"35421": "kinsman",
"35422": "kinswoman",
"35423": "kissable",
"35424": "kisser",
"35425": "kissing",
"35426": "kitchen",
"35431": "kite",
"35432": "kitten",
"35433": "kitty",
"35434": "kiwi",
"35435": "kleenex",
"35436": "knapsack",
"35441": "knee",
"35442": "knelt",
"35443": "knickers",
"35444": "knoll",
"35445": "koala",
"35446": "kooky",
"35451": "kosher",
"35452": "krypton",
"35453": "kudos",
"35454": "kung",
"35455": "labored",
"35456": "laborer",
"35461": "laboring",
"35462": "laborious",
"35463": "labrador",
"35464": "ladder",
"35465": "ladies",
"35466": "ladle",
"35511": "ladybug",
"35512": "ladylike",
"35513": "lagged",
"35514": "lagging",
"35515": "lagoon",
"35516": "lair",
"35521": "lake",
"35522": "lance",
"35523": "landed",
"35524": "landfall",
"35525": "landfill",
"35526": "landing",
"35531": "landlady",
"35532": "landless",
"35533": "landline",
"35534": "landlord",
"35535": "landmark",
"35536": "landmass",
"35541": "landmine",
"35542": "landowner",
"35543": "landscape",
"35544": "landside",
"35545": "landslide",
"35546": "language",
"35551": "lankiness",
"35552": "lanky",
"35553": "lantern",
"35554": "lapdog",
"35555": "lapel",
"35556": "lapped",
"35561": "lapping",
"35562": "laptop",
"35563": "lard",
"35564": "large",
"35565": "lark",
"35566": "lash",
"35611": "lasso",
"35612": "last",
"35613": "latch",
"35614": "late",
"35615": "lather",
"35616": "latitude",
"35621": "latrine",
"35622": "latter",
"35623": "latticed",
"35624": "launch",
"35625": "launder",
"35626": "laundry",
"35631": "laurel",
"35632": "lavender",
"35633": "lavish",
"35634": "laxative",
"35635": "lazily",
"35636": "laziness",
"35641": "lazy",
"35642": "lecturer",
"35643": "left",
"35644": "legacy",
"35645": "legal",
"35646": "legend",
"35651": "legged",
"35652": "leggings",
"35653": "legible",
"35654": "legibly",
"35655": "legislate",
"35656": "lego",
"35661": "legroom",
"35662": "legume",
"35663": "legwarmer",
"35664": "legwork",
"35665": "lemon",
"35666": "lend",
"36111": "length",
"36112": "lens",
"36113": "lent",
"36114": "leotard",
"36115": "lesser",
"36116": "letdown",
"36121": "lethargic",
"36122": "lethargy",
"36123": "letter",
"36124": "lettuce",
"36125": "level",
"36126": "leverage",
"36131": "levers",
"36132": "levitate",
"36133": "levitator",
"36134": "liability",
"36135": "liable",
"36136": "liberty",
"36141": "librarian",
"36142": "library",
"36143": "licking",
"36144": "licorice",
"36145": "lid",
"36146": "life",
"36151": "lifter",
"36152": "lifting",
"36153": "liftoff",
"36154": "ligament",
"36155": "likely",
"36156": "likeness",
"36161": "likewise",
"36162": "liking",
"36163": "lilac",
"36164": "lilly",
"36165": "lily",
"36166": "limb",
"36211": "limeade",
"36212": "limelight",
"36213": "limes",
"36214": "limit",
"36215": "limping",
"36216": "limpness",
"36221": "line",
"36222": "lingo",
"36223": "linguini",
"36224": "linguist",
"36225": "lining",
"36226": "linked",
"36231": "linoleum",
"36232": "linseed",
"36233": "lint",
"36234": "lion",
"36235": "lip",
"36236": "liquefy",
"36241": "liqueur",
"36242": "liquid",
"36243": "lisp",
"36244": "list",
"36245": "litigate",
"36246": "litigator",
"36251": "litmus",
"36252": "litter",
"36253": "little",
"36254": "livable",
"36255": "lived",
"36256": "lively",
"36261": "liver",
"36262": "livestock",
"36263": "lividly",
"36264": "living",
"36265": "lizard",
"36266": "lubricant",
"36311": "lubricate",
"36312": "lucid",
"36313": "luckily",
"36314": "luckiness",
"36315": "luckless",
"36316": "lucrative",
"36321": "ludicrous",
"36322": "lugged",
"36323": "lukewarm",
"36324": "lullaby",
"36325": "lumber",
"36326": "luminance",
"36331": "luminous",
"36332": "lumpiness",
"36333": "lumping",
"36334": "lumpish",
"36335": "lunacy",
"36336": "lunar",
"36341": "lunchbox",
"36342": "luncheon",
"36343": "lunchroom",
"36344": "lunchtime",
"36345": "lung",
"36346": "lurch",
"36351": "lure",
"36352": "luridness",
"36353": "lurk",
"36354": "lushly",
"36355": "lushness",
"36356": "luster",
"36361": "lustfully",
"36362": "lustily",
"36363": "lustiness",
"36364": "lustrous",
"36365": "lusty",
"36366": "luxurious",
"36411": "luxury",
"36412": "lying",
"36413": "lyrically",
"36414": "lyricism",
"36415": "lyricist",
"36416": "lyrics",
"36421": "macarena",
"36422": "macaroni",
"36423": "macaw",
"36424": "mace",
"36425": "machine",
"36426": "machinist",
"36431": "magazine",
"36432": "magenta",
"36433": "maggot",
"36434": "magical",
"36435": "magician",
"36436": "magma",
"36441": "magnesium",
"36442": "magnetic",
"36443": "magnetism",
"36444": "magnetize",
"36445": "magnifier",
"36446": "magnify",
"36451": "magnitude",
"36452": "magnolia",
"36453": "mahogany",
"36454": "maimed",
"36455": "majestic",
"36456": "majesty",
"36461": "majorette",
"36462": "majority",
"36463": "makeover",
"36464": "maker",
"36465": "makeshift",
"36466": "making",
"36511": "malformed",
"36512": "malt",
"36513": "mama",
"36514": "mammal",
"36515": "mammary",
"36516": "mammogram",
"36521": "manager",
"36522": "managing",
"36523": "manatee",
"36524": "mandarin",
"36525": "mandate",
"36526": "mandatory",
"36531": "mandolin",
"36532": "manger",
"36533": "mangle",
"36534": "mango",
"36535": "mangy",
"36536": "manhandle",
"36541": "manhole",
"36542": "manhood",
"36543": "manhunt",
"36544": "manicotti",
"36545": "manicure",
"36546": "manifesto",
"36551": "manila",
"36552": "mankind",
"36553": "manlike",
"36554": "manliness",
"36555": "manly",
"36556": "manmade",
"36561": "manned",
"36562": "mannish",
"36563": "manor",
"36564": "manpower",
"36565": "mantis",
"36566": "mantra",
"36611": "manual",
"36612": "many",
"36613": "map",
"36614": "marathon",
"36615": "marauding",
"36616": "marbled",
"36621": "marbles",
"36622": "marbling",
"36623": "march",
"36624": "mardi",
"36625": "margarine",
"36626": "margarita",
"36631": "margin",
"36632": "marigold",
"36633": "marina",
"36634": "marine",
"36635": "marital",
"36636": "maritime",
"36641": "marlin",
"36642": "marmalade",
"36643": "maroon",
"36644": "married",
"36645": "marrow",
"36646": "marry",
"36651": "marshland",
"36652": "marshy",
"36653": "marsupial",
"36654": "marvelous",
"36655": "marxism",
"36656": "mascot",
"36661": "masculine",
"36662": "mashed",
"36663": "mashing",
"36664": "massager",
"36665": "masses",
"36666": "massive",
"41111": "mastiff",
"41112": "matador",
"41113": "matchbook",
"41114": "matchbox",
"41115": "matcher",
"41116": "matching",
"41121": "matchless",
"41122": "material",
"41123": "maternal",
"41124": "maternity",
"41125": "math",
"41126": "mating",
"41131": "matriarch",
"41132": "matrimony",
"41133": "matrix",
"41134": "matron",
"41135": "matted",
"41136": "matter",
"41141": "maturely",
"41142": "maturing",
"41143": "maturity",
"41144": "mauve",
"41145": "maverick",
"41146": "maximize",
"41151": "maximum",
"41152": "maybe",
"41153": "mayday",
"41154": "mayflower",
"41155": "moaner",
"41156": "moaning",
"41161": "mobile",
"41162": "mobility",
"41163": "mobilize",
"41164": "mobster",
"41165": "mocha",
"41166": "mocker",
"41211": "mockup",
"41212": "modified",
"41213": "modify",
"41214": "modular",
"41215": "modulator",
"41216": "module",
"41221": "moisten",
"41222": "moistness",
"41223": "moisture",
"41224": "molar",
"41225": "molasses",
"41226": "mold",
"41231": "molecular",
"41232": "molecule",
"41233": "molehill",
"41234": "mollusk",
"41235": "mom",
"41236": "monastery",
"41241": "monday",
"41242": "monetary",
"41243": "monetize",
"41244": "moneybags",
"41245": "moneyless",
"41246": "moneywise",
"41251": "mongoose",
"41252": "mongrel",
"41253": "monitor",
"41254": "monkhood",
"41255": "monogamy",
"41256": "monogram",
"41261": "monologue",
"41262": "monopoly",
"41263": "monorail",
"41264": "monotone",
"41265": "monotype",
"41266": "monoxide",
"41311": "monsieur",
"41312": "monsoon",
"41313": "monstrous",
"41314": "monthly",
"41315": "monument",
"41316": "moocher",
"41321": "moodiness",
"41322": "moody",
"41323": "mooing",
"41324": "moonbeam",
"41325": "mooned",
"41326": "moonlight",
"41331": "moonlike",
"41332": "moonlit",
"41333": "moonrise",
"41334": "moonscape",
"41335": "moonshine",
"41336": "moonstone",
"41341": "moonwalk",
"41342": "mop",
"41343": "morale",
"41344": "morality",
"41345": "morally",
"41346": "morbidity",
"41351": "morbidly",
"41352": "morphine",
"41353": "morphing",
"41354": "morse",
"41355": "mortality",
"41356": "mortally",
"41361": "mortician",
"41362": "mortified",
"41363": "mortify",
"41364": "mortuary",
"41365": "mosaic",
"41366": "mossy",
"41411": "most",
"41412": "mothball",
"41413": "mothproof",
"41414": "motion",
"41415": "motivate",
"41416": "motivator",
"41421": "motive",
"41422": "motocross",
"41423": "motor",
"41424": "motto",
"41425": "mountable",
"41426": "mountain",
"41431": "mounted",
"41432": "mounting",
"41433": "mourner",
"41434": "mournful",
"41435": "mouse",
"41436": "mousiness",
"41441": "moustache",
"41442": "mousy",
"41443": "mouth",
"41444": "movable",
"41445": "move",
"41446": "movie",
"41451": "moving",
"41452": "mower",
"41453": "mowing",
"41454": "much",
"41455": "muck",
"41456": "mud",
"41461": "mug",
"41462": "mulberry",
"41463": "mulch",
"41464": "mule",
"41465": "mulled",
"41466": "mullets",
"41511": "multiple",
"41512": "multiply",
"41513": "multitask",
"41514": "multitude",
"41515": "mumble",
"41516": "mumbling",
"41521": "mumbo",
"41522": "mummified",
"41523": "mummify",
"41524": "mummy",
"41525": "mumps",
"41526": "munchkin",
"41531": "mundane",
"41532": "municipal",
"41533": "muppet",
"41534": "mural",
"41535": "murkiness",
"41536": "murky",
"41541": "murmuring",
"41542": "muscular",
"41543": "museum",
"41544": "mushily",
"41545": "mushiness",
"41546": "mushroom",
"41551": "mushy",
"41552": "music",
"41553": "musket",
"41554": "muskiness",
"41555": "musky",
"41556": "mustang",
"41561": "mustard",
"41562": "muster",
"41563": "mustiness",
"41564": "musty",
"41565": "mutable",
"41566": "mutate",
"41611": "mutation",
"41612": "mute",
"41613": "mutilated",
"41614": "mutilator",
"41615": "mutiny",
"41616": "mutt",
"41621": "mutual",
"41622": "muzzle",
"41623": "myself",
"41624": "myspace",
"41625": "mystified",
"41626": "mystify",
"41631": "myth",
"41632": "nacho",
"41633": "nag",
"41634": "nail",
"41635": "name",
"41636": "naming",
"41641": "nanny",
"41642": "nanometer",
"41643": "nape",
"41644": "napkin",
"41645": "napped",
"41646": "napping",
"41651": "nappy",
"41652": "narrow",
"41653": "nastily",
"41654": "nastiness",
"41655": "national",
"41656": "native",
"41661": "nativity",
"41662": "natural",
"41663": "nature",
"41664": "naturist",
"41665": "nautical",
"41666": "navigate",
"42111": "navigator",
"42112": "navy",
"42113": "nearby",
"42114": "nearest",
"42115": "nearly",
"42116": "nearness",
"42121": "neatly",
"42122": "neatness",
"42123": "nebula",
"42124": "nebulizer",
"42125": "nectar",
"42126": "negate",
"42131": "negation",
"42132": "negative",
"42133": "neglector",
"42134": "negligee",
"42135": "negligent",
"42136": "negotiate",
"42141": "nemeses",
"42142": "nemesis",
"42143": "neon",
"42144": "nephew",
"42145": "nerd",
"42146": "nervous",
"42151": "nervy",
"42152": "nest",
"42153": "net",
"42154": "neurology",
"42155": "neuron",
"42156": "neurosis",
"42161": "neurotic",
"42162": "neuter",
"42163": "neutron",
"42164": "never",
"42165": "next",
"42166": "nibble",
"42211": "nickname",
"42212": "nicotine",
"42213": "niece",
"42214": "nifty",
"42215": "nimble",
"42216": "nimbly",
"42221": "nineteen",
"42222": "ninetieth",
"42223": "ninja",
"42224": "nintendo",
"42225": "ninth",
"42226": "nuclear",
"42231": "nuclei",
"42232": "nucleus",
"42233": "nugget",
"42234": "nullify",
"42235": "number",
"42236": "numbing",
"42241": "numbly",
"42242": "numbness",
"42243": "numeral",
"42244": "numerate",
"42245": "numerator",
"42246": "numeric",
"42251": "numerous",
"42252": "nuptials",
"42253": "nursery",
"42254": "nursing",
"42255": "nurture",
"42256": "nutcase",
"42261": "nutlike",
"42262": "nutmeg",
"42263": "nutrient",
"42264": "nutshell",
"42265": "nuttiness",
"42266": "nutty",
"42311": "nuzzle",
"42312": "nylon",
"42313": "oaf",
"42314": "oak",
"42315": "oasis",
"42316": "oat",
"42321": "obedience",
"42322": "obedient",
"42323": "obituary",
"42324": "object",
"42325": "obligate",
"42326": "obliged",
"42331": "oblivion",
"42332": "oblivious",
"42333": "oblong",
"42334": "obnoxious",
"42335": "oboe",
"42336": "obscure",
"42341": "obscurity",
"42342": "observant",
"42343": "observer",
"42344": "observing",
"42345": "obsessed",
"42346": "obsession",
"42351": "obsessive",
"42352": "obsolete",
"42353": "obstacle",
"42354": "obstinate",
"42355": "obstruct",
"42356": "obtain",
"42361": "obtrusive",
"42362": "obtuse",
"42363": "obvious",
"42364": "occultist",
"42365": "occupancy",
"42366": "occupant",
"42411": "occupier",
"42412": "occupy",
"42413": "ocean",
"42414": "ocelot",
"42415": "octagon",
"42416": "octane",
"42421": "october",
"42422": "octopus",
"42423": "ogle",
"42424": "oil",
"42425": "oink",
"42426": "ointment",
"42431": "okay",
"42432": "old",
"42433": "olive",
"42434": "olympics",
"42435": "omega",
"42436": "omen",
"42441": "ominous",
"42442": "omission",
"42443": "omit",
"42444": "omnivore",
"42445": "onboard",
"42446": "oncoming",
"42451": "ongoing",
"42452": "onion",
"42453": "online",
"42454": "onlooker",
"42455": "only",
"42456": "onscreen",
"42461": "onset",
"42462": "onshore",
"42463": "onslaught",
"42464": "onstage",
"42465": "onto",
"42466": "onward",
"42511": "onyx",
"42512": "oops",
"42513": "ooze",
"42514": "oozy",
"42515": "opacity",
"42516": "opal",
"42521": "open",
"42522": "operable",
"42523": "operate",
"42524": "operating",
"42525": "operation",
"42526": "operative",
"42531": "operator",
"42532": "opium",
"42533": "opossum",
"42534": "opponent",
"42535": "oppose",
"42536": "opposing",
"42541": "opposite",
"42542": "oppressed",
"42543": "oppressor",
"42544": "opt",
"42545": "opulently",
"42546": "osmosis",
"42551": "other",
"42552": "otter",
"42553": "ouch",
"42554": "ought",
"42555": "ounce",
"42556": "outage",
"42561": "outback",
"42562": "outbid",
"42563": "outboard",
"42564": "outbound",
"42565": "outbreak",
"42566": "outburst",
"42611": "outcast",
"42612": "outclass",
"42613": "outcome",
"42614": "outdated",
"42615": "outdoors",
"42616": "outer",
"42621": "outfield",
"42622": "outfit",
"42623": "outflank",
"42624": "outgoing",
"42625": "outgrow",
"42626": "outhouse",
"42631": "outing",
"42632": "outlast",
"42633": "outlet",
"42634": "outline",
"42635": "outlook",
"42636": "outlying",
"42641": "outmatch",
"42642": "outmost",
"42643": "outnumber",
"42644": "outplayed",
"42645": "outpost",
"42646": "outpour",
"42651": "output",
"42652": "outrage",
"42653": "outrank",
"42654": "outreach",
"42655": "outright",
"42656": "outscore",
"42661": "outsell",
"42662": "outshine",
"42663": "outshoot",
"42664": "outsider",
"42665": "outskirts",
"42666": "outsmart",
"43111": "outsource",
"43112": "outspoken",
"43113": "outtakes",
"43114": "outthink",
"43115": "outward",
"43116": "outweigh",
"43121": "outwit",
"43122": "oval",
"43123": "ovary",
"43124": "oven",
"43125": "overact",
"43126": "overall",
"43131": "overarch",
"43132": "overbid",
"43133": "overbill",
"43134": "overbite",
"43135": "overblown",
"43136": "overboard",
"43141": "overbook",
"43142": "overbuilt",
"43143": "overcast",
"43144": "overcoat",
"43145": "overcome",
"43146": "overcook",
"43151": "overcrowd",
"43152": "overdraft",
"43153": "overdrawn",
"43154": "overdress",
"43155": "overdrive",
"43156": "overdue",
"43161": "overeager",
"43162": "overeater",
"43163": "overexert",
"43164": "overfed",
"43165": "overfeed",
"43166": "overfill",
"43211": "overflow",
"43212": "overfull",
"43213": "overgrown",
"43214": "overhand",
"43215": "overhang",
"43216": "overhaul",
"43221": "overhead",
"43222": "overhear",
"43223": "overheat",
"43224": "overhung",
"43225": "overjoyed",
"43226": "overkill",
"43231": "overlabor",
"43232": "overlaid",
"43233": "overlap",
"43234": "overlay",
"43235": "overload",
"43236": "overlook",
"43241": "overlord",
"43242": "overlying",
"43243": "overnight",
"43244": "overpass",
"43245": "overpay",
"43246": "overplant",
"43251": "overplay",
"43252": "overpower",
"43253": "overprice",
"43254": "overrate",
"43255": "overreach",
"43256": "overreact",
"43261": "override",
"43262": "overripe",
"43263": "overrule",
"43264": "overrun",
"43265": "overshoot",
"43266": "overshot",
"43311": "oversight",
"43312": "oversized",
"43313": "oversleep",
"43314": "oversold",
"43315": "overspend",
"43316": "overstate",
"43321": "overstay",
"43322": "overstep",
"43323": "overstock",
"43324": "overstuff",
"43325": "oversweet",
"43326": "overtake",
"43331": "overthrow",
"43332": "overtime",
"43333": "overtly",
"43334": "overtone",
"43335": "overture",
"43336": "overturn",
"43341": "overuse",
"43342": "overvalue",
"43343": "overview",
"43344": "overwrite",
"43345": "owl",
"43346": "oxford",
"43351": "oxidant",
"43352": "oxidation",
"43353": "oxidize",
"43354": "oxidizing",
"43355": "oxygen",
"43356": "oxymoron",
"43361": "oyster",
"43362": "ozone",
"43363": "paced",
"43364": "pacemaker",
"43365": "pacific",
"43366": "pacifier",
"43411": "pacifism",
"43412": "pacifist",
"43413": "pacify",
"43414": "padded",
"43415": "padding",
"43416": "paddle",
"43421": "paddling",
"43422": "padlock",
"43423": "pagan",
"43424": "pager",
"43425": "paging",
"43426": "pajamas",
"43431": "palace",
"43432": "palatable",
"43433": "palm",
"43434": "palpable",
"43435": "palpitate",
"43436": "paltry",
"43441": "pampered",
"43442": "pamperer",
"43443": "pampers",
"43444": "pamphlet",
"43445": "panama",
"43446": "pancake",
"43451": "pancreas",
"43452": "panda",
"43453": "pandemic",
"43454": "pang",
"43455": "panhandle",
"43456": "panic",
"43461": "panning",
"43462": "panorama",
"43463": "panoramic",
"43464": "panther",
"43465": "pantomime",
"43466": "pantry",
"43511": "pants",
"43512": "pantyhose",
"43513": "paparazzi",
"43514": "papaya",
"43515": "paper",
"43516": "paprika",
"43521": "papyrus",
"43522": "parabola",
"43523": "parachute",
"43524": "parade",
"43525": "paradox",
"43526": "paragraph",
"43531": "parakeet",
"43532": "paralegal",
"43533": "paralyses",
"43534": "paralysis",
"43535": "paralyze",
"43536": "paramedic",
"43541": "parameter",
"43542": "paramount",
"43543": "parasail",
"43544": "parasite",
"43545": "parasitic",
"43546": "parcel",
"43551": "parched",
"43552": "parchment",
"43553": "pardon",
"43554": "parish",
"43555": "parka",
"43556": "parking",
"43561": "parkway",
"43562": "parlor",
"43563": "parmesan",
"43564": "parole",
"43565": "parrot",
"43566": "parsley",
"43611": "parsnip",
"43612": "partake",
"43613": "parted",
"43614": "parting",
"43615": "partition",
"43616": "partly",
"43621": "partner",
"43622": "partridge",
"43623": "party",
"43624": "passable",
"43625": "passably",
"43626": "passage",
"43631": "passcode",
"43632": "passenger",
"43633": "passerby",
"43634": "passing",
"43635": "passion",
"43636": "passive",
"43641": "passivism",
"43642": "passover",
"43643": "passport",
"43644": "password",
"43645": "pasta",
"43646": "pasted",
"43651": "pastel",
"43652": "pastime",
"43653": "pastor",
"43654": "pastrami",
"43655": "pasture",
"43656": "pasty",
"43661": "patchwork",
"43662": "patchy",
"43663": "paternal",
"43664": "paternity",
"43665": "path",
"43666": "patience",
"44111": "patient",
"44112": "patio",
"44113": "patriarch",
"44114": "patriot",
"44115": "patrol",
"44116": "patronage",
"44121": "patronize",
"44122": "pauper",
"44123": "pavement",
"44124": "paver",
"44125": "pavestone",
"44126": "pavilion",
"44131": "paving",
"44132": "pawing",
"44133": "payable",
"44134": "payback",
"44135": "paycheck",
"44136": "payday",
"44141": "payee",
"44142": "payer",
"44143": "paying",
"44144": "payment",
"44145": "payphone",
"44146": "payroll",
"44151": "pebble",
"44152": "pebbly",
"44153": "pecan",
"44154": "pectin",
"44155": "peculiar",
"44156": "peddling",
"44161": "pediatric",
"44162": "pedicure",
"44163": "pedigree",
"44164": "pedometer",
"44165": "pegboard",
"44166": "pelican",
"44211": "pellet",
"44212": "pelt",
"44213": "pelvis",
"44214": "penalize",
"44215": "penalty",
"44216": "pencil",
"44221": "pendant",
"44222": "pending",
"44223": "penholder",
"44224": "penknife",
"44225": "pennant",
"44226": "penniless",
"44231": "penny",
"44232": "penpal",
"44233": "pension",
"44234": "pentagon",
"44235": "pentagram",
"44236": "pep",
"44241": "perceive",
"44242": "percent",
"44243": "perch",
"44244": "percolate",
"44245": "perennial",
"44246": "perfected",
"44251": "perfectly",
"44252": "perfume",
"44253": "periscope",
"44254": "perish",
"44255": "perjurer",
"44256": "perjury",
"44261": "perkiness",
"44262": "perky",
"44263": "perm",
"44264": "peroxide",
"44265": "perpetual",
"44266": "perplexed",
"44311": "persecute",
"44312": "persevere",
"44313": "persuaded",
"44314": "persuader",
"44315": "pesky",
"44316": "peso",
"44321": "pessimism",
"44322": "pessimist",
"44323": "pester",
"44324": "pesticide",
"44325": "petal",
"44326": "petite",
"44331": "petition",
"44332": "petri",
"44333": "petroleum",
"44334": "petted",
"44335": "petticoat",
"44336": "pettiness",
"44341": "petty",
"44342": "petunia",
"44343": "phantom",
"44344": "phobia",
"44345": "phoenix",
"44346": "phonebook",
"44351": "phoney",
"44352": "phonics",
"44353": "phoniness",
"44354": "phony",
"44355": "phosphate",
"44356": "photo",
"44361": "phrase",
"44362": "phrasing",
"44363": "placard",
"44364": "placate",
"44365": "placidly",
"44366": "plank",
"44411": "planner",
"44412": "plant",
"44413": "plasma",
"44414": "plaster",
"44415": "plastic",
"44416": "plated",
"44421": "platform",
"44422": "plating",
"44423": "platinum",
"44424": "platonic",
"44425": "platter",
"44426": "platypus",
"44431": "plausible",
"44432": "plausibly",
"44433": "playable",
"44434": "playback",
"44435": "player",
"44436": "playful",
"44441": "playgroup",
"44442": "playhouse",
"44443": "playing",
"44444": "playlist",
"44445": "playmaker",
"44446": "playmate",
"44451": "playoff",
"44452": "playpen",
"44453": "playroom",
"44454": "playset",
"44455": "plaything",
"44456": "playtime",
"44461": "plaza",
"44462": "pleading",
"44463": "pleat",
"44464": "pledge",
"44465": "plentiful",
"44466": "plenty",
"44511": "plethora",
"44512": "plexiglas",
"44513": "pliable",
"44514": "plod",
"44515": "plop",
"44516": "plot",
"44521": "plow",
"44522": "ploy",
"44523": "pluck",
"44524": "plug",
"44525": "plunder",
"44526": "plunging",
"44531": "plural",
"44532": "plus",
"44533": "plutonium",
"44534": "plywood",
"44535": "poach",
"44536": "pod",
"44541": "poem",
"44542": "poet",
"44543": "pogo",
"44544": "pointed",
"44545": "pointer",
"44546": "pointing",
"44551": "pointless",
"44552": "pointy",
"44553": "poise",
"44554": "poison",
"44555": "poker",
"44556": "poking",
"44561": "polar",
"44562": "police",
"44563": "policy",
"44564": "polio",
"44565": "polish",
"44566": "politely",
"44611": "polka",
"44612": "polo",
"44613": "polyester",
"44614": "polygon",
"44615": "polygraph",
"44616": "polymer",
"44621": "poncho",
"44622": "pond",
"44623": "pony",
"44624": "popcorn",
"44625": "pope",
"44626": "poplar",
"44631": "popper",
"44632": "poppy",
"44633": "popsicle",
"44634": "populace",
"44635": "popular",
"44636": "populate",
"44641": "porcupine",
"44642": "pork",
"44643": "porous",
"44644": "porridge",
"44645": "portable",
"44646": "portal",
"44651": "portfolio",
"44652": "porthole",
"44653": "portion",
"44654": "portly",
"44655": "portside",
"44656": "poser",
"44661": "posh",
"44662": "posing",
"44663": "possible",
"44664": "possibly",
"44665": "possum",
"44666": "postage",
"45111": "postal",
"45112": "postbox",
"45113": "postcard",
"45114": "posted",
"45115": "poster",
"45116": "posting",
"45121": "postnasal",
"45122": "posture",
"45123": "postwar",
"45124": "pouch",
"45125": "pounce",
"45126": "pouncing",
"45131": "pound",
"45132": "pouring",
"45133": "pout",
"45134": "powdered",
"45135": "powdering",
"45136": "powdery",
"45141": "power",
"45142": "powwow",
"45143": "pox",
"45144": "praising",
"45145": "prance",
"45146": "prancing",
"45151": "pranker",
"45152": "prankish",
"45153": "prankster",
"45154": "prayer",
"45155": "praying",
"45156": "preacher",
"45161": "preaching",
"45162": "preachy",
"45163": "preamble",
"45164": "precinct",
"45165": "precise",
"45166": "precision",
"45211": "precook",
"45212": "precut",
"45213": "predator",
"45214": "predefine",
"45215": "predict",
"45216": "preface",
"45221": "prefix",
"45222": "preflight",
"45223": "preformed",
"45224": "pregame",
"45225": "pregnancy",
"45226": "pregnant",
"45231": "preheated",
"45232": "prelaunch",
"45233": "prelaw",
"45234": "prelude",
"45235": "premiere",
"45236": "premises",
"45241": "premium",
"45242": "prenatal",
"45243": "preoccupy",
"45244": "preorder",
"45245": "prepaid",
"45246": "prepay",
"45251": "preplan",
"45252": "preppy",
"45253": "preschool",
"45254": "prescribe",
"45255": "preseason",
"45256": "preset",
"45261": "preshow",
"45262": "president",
"45263": "presoak",
"45264": "press",
"45265": "presume",
"45266": "presuming",
"45311": "preteen",
"45312": "pretended",
"45313": "pretender",
"45314": "pretense",
"45315": "pretext",
"45316": "pretty",
"45321": "pretzel",
"45322": "prevail",
"45323": "prevalent",
"45324": "prevent",
"45325": "preview",
"45326": "previous",
"45331": "prewar",
"45332": "prewashed",
"45333": "prideful",
"45334": "pried",
"45335": "primal",
"45336": "primarily",
"45341": "primary",
"45342": "primate",
"45343": "primer",
"45344": "primp",
"45345": "princess",
"45346": "print",
"45351": "prior",
"45352": "prism",
"45353": "prison",
"45354": "prissy",
"45355": "pristine",
"45356": "privacy",
"45361": "private",
"45362": "privatize",
"45363": "prize",
"45364": "proactive",
"45365": "probable",
"45366": "probably",
"45411": "probation",
"45412": "probe",
"45413": "probing",
"45414": "probiotic",
"45415": "problem",
"45416": "procedure",
"45421": "process",
"45422": "proclaim",
"45423": "procreate",
"45424": "procurer",
"45425": "prodigal",
"45426": "prodigy",
"45431": "produce",
"45432": "product",
"45433": "profane",
"45434": "profanity",
"45435": "professed",
"45436": "professor",
"45441": "profile",
"45442": "profound",
"45443": "profusely",
"45444": "progeny",
"45445": "prognosis",
"45446": "program",
"45451": "progress",
"45452": "projector",
"45453": "prologue",
"45454": "prolonged",
"45455": "promenade",
"45456": "prominent",
"45461": "promoter",
"45462": "promotion",
"45463": "prompter",
"45464": "promptly",
"45465": "prone",
"45466": "prong",
"45511": "pronounce",
"45512": "pronto",
"45513": "proofing",
"45514": "proofread",
"45515": "proofs",
"45516": "propeller",
"45521": "properly",
"45522": "property",
"45523": "proponent",
"45524": "proposal",
"45525": "propose",
"45526": "props",
"45531": "prorate",
"45532": "protector",
"45533": "protegee",
"45534": "proton",
"45535": "prototype",
"45536": "protozoan",
"45541": "protract",
"45542": "protrude",
"45543": "proud",
"45544": "provable",
"45545": "proved",
"45546": "proven",
"45551": "provided",
"45552": "provider",
"45553": "providing",
"45554": "province",
"45555": "proving",
"45556": "provoke",
"45561": "provoking",
"45562": "provolone",
"45563": "prowess",
"45564": "prowler",
"45565": "prowling",
"45566": "proximity",
"45611": "proxy",
"45612": "prozac",
"45613": "prude",
"45614": "prudishly",
"45615": "prune",
"45616": "pruning",
"45621": "pry",
"45622": "psychic",
"45623": "public",
"45624": "publisher",
"45625": "pucker",
"45626": "pueblo",
"45631": "pug",
"45632": "pull",
"45633": "pulmonary",
"45634": "pulp",
"45635": "pulsate",
"45636": "pulse",
"45641": "pulverize",
"45642": "puma",
"45643": "pumice",
"45644": "pummel",
"45645": "punch",
"45646": "punctual",
"45651": "punctuate",
"45652": "punctured",
"45653": "pungent",
"45654": "punisher",
"45655": "punk",
"45656": "pupil",
"45661": "puppet",
"45662": "puppy",
"45663": "purchase",
"45664": "pureblood",
"45665": "purebred",
"45666": "purely",
"46111": "pureness",
"46112": "purgatory",
"46113": "purge",
"46114": "purging",
"46115": "purifier",
"46116": "purify",
"46121": "purist",
"46122": "puritan",
"46123": "purity",
"46124": "purple",
"46125": "purplish",
"46126": "purposely",
"46131": "purr",
"46132": "purse",
"46133": "pursuable",
"46134": "pursuant",
"46135": "pursuit",
"46136": "purveyor",
"46141": "pushcart",
"46142": "pushchair",
"46143": "pusher",
"46144": "pushiness",
"46145": "pushing",
"46146": "pushover",
"46151": "pushpin",
"46152": "pushup",
"46153": "pushy",
"46154": "putdown",
"46155": "putt",
"46156": "puzzle",
"46161": "puzzling",
"46162": "pyramid",
"46163": "pyromania",
"46164": "python",
"46165": "quack",
"46166": "quadrant",
"46211": "quail",
"46212": "quaintly",
"46213": "quake",
"46214": "quaking",
"46215": "qualified",
"46216": "qualifier",
"46221": "qualify",
"46222": "quality",
"46223": "qualm",
"46224": "quantum",
"46225": "quarrel",
"46226": "quarry",
"46231": "quartered",
"46232": "quarterly",
"46233": "quarters",
"46234": "quartet",
"46235": "quench",
"46236": "query",
"46241": "quicken",
"46242": "quickly",
"46243": "quickness",
"46244": "quicksand",
"46245": "quickstep",
"46246": "quiet",
"46251": "quill",
"46252": "quilt",
"46253": "quintet",
"46254": "quintuple",
"46255": "quirk",
"46256": "quit",
"46261": "quiver",
"46262": "quizzical",
"46263": "quotable",
"46264": "quotation",
"46265": "quote",
"46266": "rabid",
"46311": "race",
"46312": "racing",
"46313": "racism",
"46314": "rack",
"46315": "racoon",
"46316": "radar",
"46321": "radial",
"46322": "radiance",
"46323": "radiantly",
"46324": "radiated",
"46325": "radiation",
"46326": "radiator",
"46331": "radio",
"46332": "radish",
"46333": "raffle",
"46334": "raft",
"46335": "rage",
"46336": "ragged",
"46341": "raging",
"46342": "ragweed",
"46343": "raider",
"46344": "railcar",
"46345": "railing",
"46346": "railroad",
"46351": "railway",
"46352": "raisin",
"46353": "rake",
"46354": "raking",
"46355": "rally",
"46356": "ramble",
"46361": "rambling",
"46362": "ramp",
"46363": "ramrod",
"46364": "ranch",
"46365": "rancidity",
"46366": "random",
"46411": "ranged",
"46412": "ranger",
"46413": "ranging",
"46414": "ranked",
"46415": "ranking",
"46416": "ransack",
"46421": "ranting",
"46422": "rants",
"46423": "rare",
"46424": "rarity",
"46425": "rascal",
"46426": "rash",
"46431": "rasping",
"46432": "ravage",
"46433": "raven",
"46434": "ravine",
"46435": "raving",
"46436": "ravioli",
"46441": "ravishing",
"46442": "reabsorb",
"46443": "reach",
"46444": "reacquire",
"46445": "reaction",
"46446": "reactive",
"46451": "reactor",
"46452": "reaffirm",
"46453": "ream",
"46454": "reanalyze",
"46455": "reappear",
"46456": "reapply",
"46461": "reappoint",
"46462": "reapprove",
"46463": "rearrange",
"46464": "rearview",
"46465": "reason",
"46466": "reassign",
"46511": "reassure",
"46512": "reattach",
"46513": "reawake",
"46514": "rebalance",
"46515": "rebate",
"46516": "rebel",
"46521": "rebirth",
"46522": "reboot",
"46523": "reborn",
"46524": "rebound",
"46525": "rebuff",
"46526": "rebuild",
"46531": "rebuilt",
"46532": "reburial",
"46533": "rebuttal",
"46534": "recall",
"46535": "recant",
"46536": "recapture",
"46541": "recast",
"46542": "recede",
"46543": "recent",
"46544": "recess",
"46545": "recharger",
"46546": "recipient",
"46551": "recital",
"46552": "recite",
"46553": "reckless",
"46554": "reclaim",
"46555": "recliner",
"46556": "reclining",
"46561": "recluse",
"46562": "reclusive",
"46563": "recognize",
"46564": "recoil",
"46565": "recollect",
"46566": "recolor",
"46611": "reconcile",
"46612": "reconfirm",
"46613": "reconvene",
"46614": "recopy",
"46615": "record",
"46616": "recount",
"46621": "recoup",
"46622": "recovery",
"46623": "recreate",
"46624": "rectal",
"46625": "rectangle",
"46626": "rectified",
"46631": "rectify",
"46632": "recycled",
"46633": "recycler",
"46634": "recycling",
"46635": "reemerge",
"46636": "reenact",
"46641": "reenter",
"46642": "reentry",
"46643": "reexamine",
"46644": "referable",
"46645": "referee",
"46646": "reference",
"46651": "refill",
"46652": "refinance",
"46653": "refined",
"46654": "refinery",
"46655": "refining",
"46656": "refinish",
"46661": "reflected",
"46662": "reflector",
"46663": "reflex",
"46664": "reflux",
"46665": "refocus",
"46666": "refold",
"51111": "reforest",
"51112": "reformat",
"51113": "reformed",
"51114": "reformer",
"51115": "reformist",
"51116": "refract",
"51121": "refrain",
"51122": "refreeze",
"51123": "refresh",
"51124": "refried",
"51125": "refueling",
"51126": "refund",
"51131": "refurbish",
"51132": "refurnish",
"51133": "refusal",
"51134": "refuse",
"51135": "refusing",
"51136": "refutable",
"51141": "refute",
"51142": "regain",
"51143": "regalia",
"51144": "regally",
"51145": "reggae",
"51146": "regime",
"51151": "region",
"51152": "register",
"51153": "registrar",
"51154": "registry",
"51155": "regress",
"51156": "regretful",
"51161": "regroup",
"51162": "regular",
"51163": "regulate",
"51164": "regulator",
"51165": "rehab",
"51166": "reheat",
"51211": "rehire",
"51212": "rehydrate",
"51213": "reimburse",
"51214": "reissue",
"51215": "reiterate",
"51216": "rejoice",
"51221": "rejoicing",
"51222": "rejoin",
"51223": "rekindle",
"51224": "relapse",
"51225": "relapsing",
"51226": "relatable",
"51231": "related",
"51232": "relation",
"51233": "relative",
"51234": "relax",
"51235": "relay",
"51236": "relearn",
"51241": "release",
"51242": "relenting",
"51243": "reliable",
"51244": "reliably",
"51245": "reliance",
"51246": "reliant",
"51251": "relic",
"51252": "relieve",
"51253": "relieving",
"51254": "relight",
"51255": "relish",
"51256": "relive",
"51261": "reload",
"51262": "relocate",
"51263": "relock",
"51264": "reluctant",
"51265": "rely",
"51266": "remake",
"51311": "remark",
"51312": "remarry",
"51313": "rematch",
"51314": "remedial",
"51315": "remedy",
"51316": "remember",
"51321": "reminder",
"51322": "remindful",
"51323": "remission",
"51324": "remix",
"51325": "remnant",
"51326": "remodeler",
"51331": "remold",
"51332": "remorse",
"51333": "remote",
"51334": "removable",
"51335": "removal",
"51336": "removed",
"51341": "remover",
"51342": "removing",
"51343": "rename",
"51344": "renderer",
"51345": "rendering",
"51346": "rendition",
"51351": "renegade",
"51352": "renewable",
"51353": "renewably",
"51354": "renewal",
"51355": "renewed",
"51356": "renounce",
"51361": "renovate",
"51362": "renovator",
"51363": "rentable",
"51364": "rental",
"51365": "rented",
"51366": "renter",
"51411": "reoccupy",
"51412": "reoccur",
"51413": "reopen",
"51414": "reorder",
"51415": "repackage",
"51416": "repacking",
"51421": "repaint",
"51422": "repair",
"51423": "repave",
"51424": "repaying",
"51425": "repayment",
"51426": "repeal",
"51431": "repeated",
"51432": "repeater",
"51433": "repent",
"51434": "rephrase",
"51435": "replace",
"51436": "replay",
"51441": "replica",
"51442": "reply",
"51443": "reporter",
"51444": "repose",
"51445": "repossess",
"51446": "repost",
"51451": "repressed",
"51452": "reprimand",
"51453": "reprint",
"51454": "reprise",
"51455": "reproach",
"51456": "reprocess",
"51461": "reproduce",
"51462": "reprogram",
"51463": "reps",
"51464": "reptile",
"51465": "reptilian",
"51466": "repugnant",
"51511": "repulsion",
"51512": "repulsive",
"51513": "repurpose",
"51514": "reputable",
"51515": "reputably",
"51516": "request",
"51521": "require",
"51522": "requisite",
"51523": "reroute",
"51524": "rerun",
"51525": "resale",
"51526": "resample",
"51531": "rescuer",
"51532": "reseal",
"51533": "research",
"51534": "reselect",
"51535": "reseller",
"51536": "resemble",
"51541": "resend",
"51542": "resent",
"51543": "reset",
"51544": "reshape",
"51545": "reshoot",
"51546": "reshuffle",
"51551": "residence",
"51552": "residency",
"51553": "resident",
"51554": "residual",
"51555": "residue",
"51556": "resigned",
"51561": "resilient",
"51562": "resistant",
"51563": "resisting",
"51564": "resize",
"51565": "resolute",
"51566": "resolved",
"51611": "resonant",
"51612": "resonate",
"51613": "resort",
"51614": "resource",
"51615": "respect",
"51616": "resubmit",
"51621": "result",
"51622": "resume",
"51623": "resupply",
"51624": "resurface",
"51625": "resurrect",
"51626": "retail",
"51631": "retainer",
"51632": "retaining",
"51633": "retake",
"51634": "retaliate",
"51635": "retention",
"51636": "rethink",
"51641": "retinal",
"51642": "retired",
"51643": "retiree",
"51644": "retiring",
"51645": "retold",
"51646": "retool",
"51651": "retorted",
"51652": "retouch",
"51653": "retrace",
"51654": "retract",
"51655": "retrain",
"51656": "retread",
"51661": "retreat",
"51662": "retrial",
"51663": "retrieval",
"51664": "retriever",
"51665": "retry",
"51666": "return",
"52111": "retying",
"52112": "retype",
"52113": "reunion",
"52114": "reunite",
"52115": "reusable",
"52116": "reuse",
"52121": "reveal",
"52122": "reveler",
"52123": "revenge",
"52124": "revenue",
"52125": "reverb",
"52126": "revered",
"52131": "reverence",
"52132": "reverend",
"52133": "reversal",
"52134": "reverse",
"52135": "reversing",
"52136": "reversion",
"52141": "revert",
"52142": "revisable",
"52143": "revise",
"52144": "revision",
"52145": "revisit",
"52146": "revivable",
"52151": "revival",
"52152": "reviver",
"52153": "reviving",
"52154": "revocable",
"52155": "revoke",
"52156": "revolt",
"52161": "revolver",
"52162": "revolving",
"52163": "reward",
"52164": "rewash",
"52165": "rewind",
"52166": "rewire",
"52211": "reword",
"52212": "rework",
"52213": "rewrap",
"52214": "rewrite",
"52215": "rhyme",
"52216": "ribbon",
"52221": "ribcage",
"52222": "rice",
"52223": "riches",
"52224": "richly",
"52225": "richness",
"52226": "rickety",
"52231": "ricotta",
"52232": "riddance",
"52233": "ridden",
"52234": "ride",
"52235": "riding",
"52236": "rifling",
"52241": "rift",
"52242": "rigging",
"52243": "rigid",
"52244": "rigor",
"52245": "rimless",
"52246": "rimmed",
"52251": "rind",
"52252": "rink",
"52253": "rinse",
"52254": "rinsing",
"52255": "riot",
"52256": "ripcord",
"52261": "ripeness",
"52262": "ripening",
"52263": "ripping",
"52264": "ripple",
"52265": "rippling",
"52266": "riptide",
"52311": "rise",
"52312": "rising",
"52313": "risk",
"52314": "risotto",
"52315": "ritalin",
"52316": "ritzy",
"52321": "rival",
"52322": "riverbank",
"52323": "riverbed",
"52324": "riverboat",
"52325": "riverside",
"52326": "riveter",
"52331": "riveting",
"52332": "roamer",
"52333": "roaming",
"52334": "roast",
"52335": "robbing",
"52336": "robe",
"52341": "robin",
"52342": "robotics",
"52343": "robust",
"52344": "rockband",
"52345": "rocker",
"52346": "rocket",
"52351": "rockfish",
"52352": "rockiness",
"52353": "rocking",
"52354": "rocklike",
"52355": "rockslide",
"52356": "rockstar",
"52361": "rocky",
"52362": "rogue",
"52363": "roman",
"52364": "romp",
"52365": "rope",
"52366": "roping",
"52411": "roster",
"52412": "rosy",
"52413": "rotten",
"52414": "rotting",
"52415": "rotunda",
"52416": "roulette",
"52421": "rounding",
"52422": "roundish",
"52423": "roundness",
"52424": "roundup",
"52425": "roundworm",
"52426": "routine",
"52431": "routing",
"52432": "rover",
"52433": "roving",
"52434": "royal",
"52435": "rubbed",
"52436": "rubber",
"52441": "rubbing",
"52442": "rubble",
"52443": "rubdown",
"52444": "ruby",
"52445": "ruckus",
"52446": "rudder",
"52451": "rug",
"52452": "ruined",
"52453": "rule",
"52454": "rumble",
"52455": "rumbling",
"52456": "rummage",
"52461": "rumor",
"52462": "runaround",
"52463": "rundown",
"52464": "runner",
"52465": "running",
"52466": "runny",
"52511": "runt",
"52512": "runway",
"52513": "rupture",
"52514": "rural",
"52515": "ruse",
"52516": "rush",
"52521": "rust",
"52522": "rut",
"52523": "sabbath",
"52524": "sabotage",
"52525": "sacrament",
"52526": "sacred",
"52531": "sacrifice",
"52532": "sadden",
"52533": "saddlebag",
"52534": "saddled",
"52535": "saddling",
"52536": "sadly",
"52541": "sadness",
"52542": "safari",
"52543": "safeguard",
"52544": "safehouse",
"52545": "safely",
"52546": "safeness",
"52551": "saffron",
"52552": "saga",
"52553": "sage",
"52554": "sagging",
"52555": "saggy",
"52556": "said",
"52561": "saint",
"52562": "sake",
"52563": "salad",
"52564": "salami",
"52565": "salaried",
"52566": "salary",
"52611": "saline",
"52612": "salon",
"52613": "saloon",
"52614": "salsa",
"52615": "salt",
"52616": "salutary",
"52621": "salute",
"52622": "salvage",
"52623": "salvaging",
"52624": "salvation",
"52625": "same",
"52626": "sample",
"52631": "sampling",
"52632": "sanction",
"52633": "sanctity",
"52634": "sanctuary",
"52635": "sandal",
"52636": "sandbag",
"52641": "sandbank",
"52642": "sandbar",
"52643": "sandblast",
"52644": "sandbox",
"52645": "sanded",
"52646": "sandfish",
"52651": "sanding",
"52652": "sandlot",
"52653": "sandpaper",
"52654": "sandpit",
"52655": "sandstone",
"52656": "sandstorm",
"52661": "sandworm",
"52662": "sandy",
"52663": "sanitary",
"52664": "sanitizer",
"52665": "sank",
"52666": "santa",
"53111": "sapling",
"53112": "sappiness",
"53113": "sappy",
"53114": "sarcasm",
"53115": "sarcastic",
"53116": "sardine",
"53121": "sash",
"53122": "sasquatch",
"53123": "sassy",
"53124": "satchel",
"53125": "satiable",
"53126": "satin",
"53131": "satirical",
"53132": "satisfied",
"53133": "satisfy",
"53134": "saturate",
"53135": "saturday",
"53136": "sauciness",
"53141": "saucy",
"53142": "sauna",
"53143": "savage",
"53144": "savanna",
"53145": "saved",
"53146": "savings",
"53151": "savior",
"53152": "savor",
"53153": "saxophone",
"53154": "say",
"53155": "scabbed",
"53156": "scabby",
"53161": "scalded",
"53162": "scalding",
"53163": "scale",
"53164": "scaling",
"53165": "scallion",
"53166": "scallop",
"53211": "scalping",
"53212": "scam",
"53213": "scandal",
"53214": "scanner",
"53215": "scanning",
"53216": "scant",
"53221": "scapegoat",
"53222": "scarce",
"53223": "scarcity",
"53224": "scarecrow",
"53225": "scared",
"53226": "scarf",
"53231": "scarily",
"53232": "scariness",
"53233": "scarring",
"53234": "scary",
"53235": "scavenger",
"53236": "scenic",
"53241": "schedule",
"53242": "schematic",
"53243": "scheme",
"53244": "scheming",
"53245": "schilling",
"53246": "schnapps",
"53251": "scholar",
"53252": "science",
"53253": "scientist",
"53254": "scion",
"53255": "scoff",
"53256": "scolding",
"53261": "scone",
"53262": "scoop",
"53263": "scooter",
"53264": "scope",
"53265": "scorch",
"53266": "scorebook",
"53311": "scorecard",
"53312": "scored",
"53313": "scoreless",
"53314": "scorer",
"53315": "scoring",
"53316": "scorn",
"53321": "scorpion",
"53322": "scotch",
"53323": "scoundrel",
"53324": "scoured",
"53325": "scouring",
"53326": "scouting",
"53331": "scouts",
"53332": "scowling",
"53333": "scrabble",
"53334": "scraggly",
"53335": "scrambled",
"53336": "scrambler",
"53341": "scrap",
"53342": "scratch",
"53343": "scrawny",
"53344": "screen",
"53345": "scribble",
"53346": "scribe",
"53351": "scribing",
"53352": "scrimmage",
"53353": "script",
"53354": "scroll",
"53355": "scrooge",
"53356": "scrounger",
"53361": "scrubbed",
"53362": "scrubber",
"53363": "scruffy",
"53364": "scrunch",
"53365": "scrutiny",
"53366": "scuba",
"53411": "scuff",
"53412": "sculptor",
"53413": "sculpture",
"53414": "scurvy",
"53415": "scuttle",
"53416": "secluded",
"53421": "secluding",
"53422": "seclusion",
"53423": "second",
"53424": "secrecy",
"53425": "secret",
"53426": "sectional",
"53431": "sector",
"53432": "secular",
"53433": "securely",
"53434": "security",
"53435": "sedan",
"53436": "sedate",
"53441": "sedation",
"53442": "sedative",
"53443": "sediment",
"53444": "seduce",
"53445": "seducing",
"53446": "segment",
"53451": "seismic",
"53452": "seizing",
"53453": "seldom",
"53454": "selected",
"53455": "selection",
"53456": "selective",
"53461": "selector",
"53462": "self",
"53463": "seltzer",
"53464": "semantic",
"53465": "semester",
"53466": "semicolon",
"53511": "semifinal",
"53512": "seminar",
"53513": "semisoft",
"53514": "semisweet",
"53515": "senate",
"53516": "senator",
"53521": "send",
"53522": "senior",
"53523": "senorita",
"53524": "sensation",
"53525": "sensitive",
"53526": "sensitize",
"53531": "sensually",
"53532": "sensuous",
"53533": "sepia",
"53534": "september",
"53535": "septic",
"53536": "septum",
"53541": "sequel",
"53542": "sequence",
"53543": "sequester",
"53544": "series",
"53545": "sermon",
"53546": "serotonin",
"53551": "serpent",
"53552": "serrated",
"53553": "serve",
"53554": "service",
"53555": "serving",
"53556": "sesame",
"53561": "sessions",
"53562": "setback",
"53563": "setting",
"53564": "settle",
"53565": "settling",
"53566": "setup",
"53611": "sevenfold",
"53612": "seventeen",
"53613": "seventh",
"53614": "seventy",
"53615": "severity",
"53616": "shabby",
"53621": "shack",
"53622": "shaded",
"53623": "shadily",
"53624": "shadiness",
"53625": "shading",
"53626": "shadow",
"53631": "shady",
"53632": "shaft",
"53633": "shakable",
"53634": "shakily",
"53635": "shakiness",
"53636": "shaking",
"53641": "shaky",
"53642": "shale",
"53643": "shallot",
"53644": "shallow",
"53645": "shame",
"53646": "shampoo",
"53651": "shamrock",
"53652": "shank",
"53653": "shanty",
"53654": "shape",
"53655": "shaping",
"53656": "share",
"53661": "sharpener",
"53662": "sharper",
"53663": "sharpie",
"53664": "sharply",
"53665": "sharpness",
"53666": "shawl",
"54111": "sheath",
"54112": "shed",
"54113": "sheep",
"54114": "sheet",
"54115": "shelf",
"54116": "shell",
"54121": "shelter",
"54122": "shelve",
"54123": "shelving",
"54124": "sherry",
"54125": "shield",
"54126": "shifter",
"54131": "shifting",
"54132": "shiftless",
"54133": "shifty",
"54134": "shimmer",
"54135": "shimmy",
"54136": "shindig",
"54141": "shine",
"54142": "shingle",
"54143": "shininess",
"54144": "shining",
"54145": "shiny",
"54146": "ship",
"54151": "shirt",
"54152": "shivering",
"54153": "shock",
"54154": "shone",
"54155": "shoplift",
"54156": "shopper",
"54161": "shopping",
"54162": "shoptalk",
"54163": "shore",
"54164": "shortage",
"54165": "shortcake",
"54166": "shortcut",
"54211": "shorten",
"54212": "shorter",
"54213": "shorthand",
"54214": "shortlist",
"54215": "shortly",
"54216": "shortness",
"54221": "shorts",
"54222": "shortwave",
"54223": "shorty",
"54224": "shout",
"54225": "shove",
"54226": "showbiz",
"54231": "showcase",
"54232": "showdown",
"54233": "shower",
"54234": "showgirl",
"54235": "showing",
"54236": "showman",
"54241": "shown",
"54242": "showoff",
"54243": "showpiece",
"54244": "showplace",
"54245": "showroom",
"54246": "showy",
"54251": "shrank",
"54252": "shrapnel",
"54253": "shredder",
"54254": "shredding",
"54255": "shrewdly",
"54256": "shriek",
"54261": "shrill",
"54262": "shrimp",
"54263": "shrine",
"54264": "shrink",
"54265": "shrivel",
"54266": "shrouded",
"54311": "shrubbery",
"54312": "shrubs",
"54313": "shrug",
"54314": "shrunk",
"54315": "shucking",
"54316": "shudder",
"54321": "shuffle",
"54322": "shuffling",
"54323": "shun",
"54324": "shush",
"54325": "shut",
"54326": "shy",
"54331": "siamese",
"54332": "siberian",
"54333": "sibling",
"54334": "siding",
"54335": "sierra",
"54336": "siesta",
"54341": "sift",
"54342": "sighing",
"54343": "silenced",
"54344": "silencer",
"54345": "silent",
"54346": "silica",
"54351": "silicon",
"54352": "silk",
"54353": "silliness",
"54354": "silly",
"54355": "silo",
"54356": "silt",
"54361": "silver",
"54362": "similarly",
"54363": "simile",
"54364": "simmering",
"54365": "simple",
"54366": "simplify",
"54411": "simply",
"54412": "sincere",
"54413": "sincerity",
"54414": "singer",
"54415": "singing",
"54416": "single",
"54421": "singular",
"54422": "sinister",
"54423": "sinless",
"54424": "sinner",
"54425": "sinuous",
"54426": "sip",
"54431": "siren",
"54432": "sister",
"54433": "sitcom",
"54434": "sitter",
"54435": "sitting",
"54436": "situated",
"54441": "situation",
"54442": "sixfold",
"54443": "sixteen",
"54444": "sixth",
"54445": "sixties",
"54446": "sixtieth",
"54451": "sixtyfold",
"54452": "sizable",
"54453": "sizably",
"54454": "size",
"54455": "sizing",
"54456": "sizzle",
"54461": "sizzling",
"54462": "skater",
"54463": "skating",
"54464": "skedaddle",
"54465": "skeletal",
"54466": "skeleton",
"54511": "skeptic",
"54512": "sketch",
"54513": "skewed",
"54514": "skewer",
"54515": "skid",
"54516": "skied",
"54521": "skier",
"54522": "skies",
"54523": "skiing",
"54524": "skilled",
"54525": "skillet",
"54526": "skillful",
"54531": "skimmed",
"54532": "skimmer",
"54533": "skimming",
"54534": "skimpily",
"54535": "skincare",
"54536": "skinhead",
"54541": "skinless",
"54542": "skinning",
"54543": "skinny",
"54544": "skintight",
"54545": "skipper",
"54546": "skipping",
"54551": "skirmish",
"54552": "skirt",
"54553": "skittle",
"54554": "skydiver",
"54555": "skylight",
"54556": "skyline",
"54561": "skype",
"54562": "skyrocket",
"54563": "skyward",
"54564": "slab",
"54565": "slacked",
"54566": "slacker",
"54611": "slacking",
"54612": "slackness",
"54613": "slacks",
"54614": "slain",
"54615": "slam",
"54616": "slander",
"54621": "slang",
"54622": "slapping",
"54623": "slapstick",
"54624": "slashed",
"54625": "slashing",
"54626": "slate",
"54631": "slather",
"54632": "slaw",
"54633": "sled",
"54634": "sleek",
"54635": "sleep",
"54636": "sleet",
"54641": "sleeve",
"54642": "slept",
"54643": "sliceable",
"54644": "sliced",
"54645": "slicer",
"54646": "slicing",
"54651": "slick",
"54652": "slider",
"54653": "slideshow",
"54654": "sliding",
"54655": "slighted",
"54656": "slighting",
"54661": "slightly",
"54662": "slimness",
"54663": "slimy",
"54664": "slinging",
"54665": "slingshot",
"54666": "slinky",
"55111": "slip",
"55112": "slit",
"55113": "sliver",
"55114": "slobbery",
"55115": "slogan",
"55116": "sloped",
"55121": "sloping",
"55122": "sloppily",
"55123": "sloppy",
"55124": "slot",
"55125": "slouching",
"55126": "slouchy",
"55131": "sludge",
"55132": "slug",
"55133": "slum",
"55134": "slurp",
"55135": "slush",
"55136": "sly",
"55141": "small",
"55142": "smartly",
"55143": "smartness",
"55144": "smasher",
"55145": "smashing",
"55146": "smashup",
"55151": "smell",
"55152": "smelting",
"55153": "smile",
"55154": "smilingly",
"55155": "smirk",
"55156": "smite",
"55161": "smith",
"55162": "smitten",
"55163": "smock",
"55164": "smog",
"55165": "smoked",
"55166": "smokeless",
"55211": "smokiness",
"55212": "smoking",
"55213": "smoky",
"55214": "smolder",
"55215": "smooth",
"55216": "smother",
"55221": "smudge",
"55222": "smudgy",
"55223": "smuggler",
"55224": "smuggling",
"55225": "smugly",
"55226": "smugness",
"55231": "snack",
"55232": "snagged",
"55233": "snaking",
"55234": "snap",
"55235": "snare",
"55236": "snarl",
"55241": "snazzy",
"55242": "sneak",
"55243": "sneer",
"55244": "sneeze",
"55245": "sneezing",
"55246": "snide",
"55251": "sniff",
"55252": "snippet",
"55253": "snipping",
"55254": "snitch",
"55255": "snooper",
"55256": "snooze",
"55261": "snore",
"55262": "snoring",
"55263": "snorkel",
"55264": "snort",
"55265": "snout",
"55266": "snowbird",
"55311": "snowboard",
"55312": "snowbound",
"55313": "snowcap",
"55314": "snowdrift",
"55315": "snowdrop",
"55316": "snowfall",
"55321": "snowfield",
"55322": "snowflake",
"55323": "snowiness",
"55324": "snowless",
"55325": "snowman",
"55326": "snowplow",
"55331": "snowshoe",
"55332": "snowstorm",
"55333": "snowsuit",
"55334": "snowy",
"55335": "snub",
"55336": "snuff",
"55341": "snuggle",
"55342": "snugly",
"55343": "snugness",
"55344": "speak",
"55345": "spearfish",
"55346": "spearhead",
"55351": "spearman",
"55352": "spearmint",
"55353": "species",
"55354": "specimen",
"55355": "specked",
"55356": "speckled",
"55361": "specks",
"55362": "spectacle",
"55363": "spectator",
"55364": "spectrum",
"55365": "speculate",
"55366": "speech",
"55411": "speed",
"55412": "spellbind",
"55413": "speller",
"55414": "spelling",
"55415": "spendable",
"55416": "spender",
"55421": "spending",
"55422": "spent",
"55423": "spew",
"55424": "sphere",
"55425": "spherical",
"55426": "sphinx",
"55431": "spider",
"55432": "spied",
"55433": "spiffy",
"55434": "spill",
"55435": "spilt",
"55436": "spinach",
"55441": "spinal",
"55442": "spindle",
"55443": "spinner",
"55444": "spinning",
"55445": "spinout",
"55446": "spinster",
"55451": "spiny",
"55452": "spiral",
"55453": "spirited",
"55454": "spiritism",
"55455": "spirits",
"55456": "spiritual",
"55461": "splashed",
"55462": "splashing",
"55463": "splashy",
"55464": "splatter",
"55465": "spleen",
"55466": "splendid",
"55511": "splendor",
"55512": "splice",
"55513": "splicing",
"55514": "splinter",
"55515": "splotchy",
"55516": "splurge",
"55521": "spoilage",
"55522": "spoiled",
"55523": "spoiler",
"55524": "spoiling",
"55525": "spoils",
"55526": "spoken",
"55531": "spokesman",
"55532": "sponge",
"55533": "spongy",
"55534": "sponsor",
"55535": "spoof",
"55536": "spookily",
"55541": "spooky",
"55542": "spool",
"55543": "spoon",
"55544": "spore",
"55545": "sporting",
"55546": "sports",
"55551": "sporty",
"55552": "spotless",
"55553": "spotlight",
"55554": "spotted",
"55555": "spotter",
"55556": "spotting",
"55561": "spotty",
"55562": "spousal",
"55563": "spouse",
"55564": "spout",
"55565": "sprain",
"55566": "sprang",
"55611": "sprawl",
"55612": "spray",
"55613": "spree",
"55614": "sprig",
"55615": "spring",
"55616": "sprinkled",
"55621": "sprinkler",
"55622": "sprint",
"55623": "sprite",
"55624": "sprout",
"55625": "spruce",
"55626": "sprung",
"55631": "spry",
"55632": "spud",
"55633": "spur",
"55634": "sputter",
"55635": "spyglass",
"55636": "squabble",
"55641": "squad",
"55642": "squall",
"55643": "squander",
"55644": "squash",
"55645": "squatted",
"55646": "squatter",
"55651": "squatting",
"55652": "squeak",
"55653": "squealer",
"55654": "squealing",
"55655": "squeamish",
"55656": "squeegee",
"55661": "squeeze",
"55662": "squeezing",
"55663": "squid",
"55664": "squiggle",
"55665": "squiggly",
"55666": "squint",
"56111": "squire",
"56112": "squirt",
"56113": "squishier",
"56114": "squishy",
"56115": "stability",
"56116": "stabilize",
"56121": "stable",
"56122": "stack",
"56123": "stadium",
"56124": "staff",
"56125": "stage",
"56126": "staging",
"56131": "stagnant",
"56132": "stagnate",
"56133": "stainable",
"56134": "stained",
"56135": "staining",
"56136": "stainless",
"56141": "stalemate",
"56142": "staleness",
"56143": "stalling",
"56144": "stallion",
"56145": "stamina",
"56146": "stammer",
"56151": "stamp",
"56152": "stand",
"56153": "stank",
"56154": "staple",
"56155": "stapling",
"56156": "starboard",
"56161": "starch",
"56162": "stardom",
"56163": "stardust",
"56164": "starfish",
"56165": "stargazer",
"56166": "staring",
"56211": "stark",
"56212": "starless",
"56213": "starlet",
"56214": "starlight",
"56215": "starlit",
"56216": "starring",
"56221": "starry",
"56222": "starship",
"56223": "starter",
"56224": "starting",
"56225": "startle",
"56226": "startling",
"56231": "startup",
"56232": "starved",
"56233": "starving",
"56234": "stash",
"56235": "state",
"56236": "static",
"56241": "statistic",
"56242": "statue",
"56243": "stature",
"56244": "status",
"56245": "statute",
"56246": "statutory",
"56251": "staunch",
"56252": "stays",
"56253": "steadfast",
"56254": "steadier",
"56255": "steadily",
"56256": "steadying",
"56261": "steam",
"56262": "steed",
"56263": "steep",
"56264": "steerable",
"56265": "steering",
"56266": "steersman",
"56311": "stegosaur",
"56312": "stellar",
"56313": "stem",
"56314": "stench",
"56315": "stencil",
"56316": "step",
"56321": "stereo",
"56322": "sterile",
"56323": "sterility",
"56324": "sterilize",
"56325": "sterling",
"56326": "sternness",
"56331": "sternum",
"56332": "stew",
"56333": "stick",
"56334": "stiffen",
"56335": "stiffly",
"56336": "stiffness",
"56341": "stifle",
"56342": "stifling",
"56343": "stillness",
"56344": "stilt",
"56345": "stimulant",
"56346": "stimulate",
"56351": "stimuli",
"56352": "stimulus",
"56353": "stinger",
"56354": "stingily",
"56355": "stinging",
"56356": "stingray",
"56361": "stingy",
"56362": "stinking",
"56363": "stinky",
"56364": "stipend",
"56365": "stipulate",
"56366": "stir",
"56411": "stitch",
"56412": "stock",
"56413": "stoic",
"56414": "stoke",
"56415": "stole",
"56416": "stomp",
"56421": "stonewall",
"56422": "stoneware",
"56423": "stonework",
"56424": "stoning",
"56425": "stony",
"56426": "stood",
"56431": "stooge",
"56432": "stool",
"56433": "stoop",
"56434": "stoplight",
"56435": "stoppable",
"56436": "stoppage",
"56441": "stopped",
"56442": "stopper",
"56443": "stopping",
"56444": "stopwatch",
"56445": "storable",
"56446": "storage",
"56451": "storeroom",
"56452": "storewide",
"56453": "storm",
"56454": "stout",
"56455": "stove",
"56456": "stowaway",
"56461": "stowing",
"56462": "straddle",
"56463": "straggler",
"56464": "strained",
"56465": "strainer",
"56466": "straining",
"56511": "strangely",
"56512": "stranger",
"56513": "strangle",
"56514": "strategic",
"56515": "strategy",
"56516": "stratus",
"56521": "straw",
"56522": "stray",
"56523": "streak",
"56524": "stream",
"56525": "street",
"56526": "strength",
"56531": "strenuous",
"56532": "strep",
"56533": "stress",
"56534": "stretch",
"56535": "strewn",
"56536": "stricken",
"56541": "strict",
"56542": "stride",
"56543": "strife",
"56544": "strike",
"56545": "striking",
"56546": "strive",
"56551": "striving",
"56552": "strobe",
"56553": "strode",
"56554": "stroller",
"56555": "strongbox",
"56556": "strongly",
"56561": "strongman",
"56562": "struck",
"56563": "structure",
"56564": "strudel",
"56565": "struggle",
"56566": "strum",
"56611": "strung",
"56612": "strut",
"56613": "stubbed",
"56614": "stubble",
"56615": "stubbly",
"56616": "stubborn",
"56621": "stucco",
"56622": "stuck",
"56623": "student",
"56624": "studied",
"56625": "studio",
"56626": "study",
"56631": "stuffed",
"56632": "stuffing",
"56633": "stuffy",
"56634": "stumble",
"56635": "stumbling",
"56636": "stump",
"56641": "stung",
"56642": "stunned",
"56643": "stunner",
"56644": "stunning",
"56645": "stunt",
"56646": "stupor",
"56651": "sturdily",
"56652": "sturdy",
"56653": "styling",
"56654": "stylishly",
"56655": "stylist",
"56656": "stylized",
"56661": "stylus",
"56662": "suave",
"56663": "subarctic",
"56664": "subatomic",
"56665": "subdivide",
"56666": "subdued",
"61111": "subduing",
"61112": "subfloor",
"61113": "subgroup",
"61114": "subheader",
"61115": "subject",
"61116": "sublease",
"61121": "sublet",
"61122": "sublevel",
"61123": "sublime",
"61124": "submarine",
"61125": "submerge",
"61126": "submersed",
"61131": "submitter",
"61132": "subpanel",
"61133": "subpar",
"61134": "subplot",
"61135": "subprime",
"61136": "subscribe",
"61141": "subscript",
"61142": "subsector",
"61143": "subside",
"61144": "subsiding",
"61145": "subsidize",
"61146": "subsidy",
"61151": "subsoil",
"61152": "subsonic",
"61153": "substance",
"61154": "subsystem",
"61155": "subtext",
"61156": "subtitle",
"61161": "subtly",
"61162": "subtotal",
"61163": "subtract",
"61164": "subtype",
"61165": "suburb",
"61166": "subway",
"61211": "subwoofer",
"61212": "subzero",
"61213": "succulent",
"61214": "such",
"61215": "suction",
"61216": "sudden",
"61221": "sudoku",
"61222": "suds",
"61223": "sufferer",
"61224": "suffering",
"61225": "suffice",
"61226": "suffix",
"61231": "suffocate",
"61232": "suffrage",
"61233": "sugar",
"61234": "suggest",
"61235": "suing",
"61236": "suitable",
"61241": "suitably",
"61242": "suitcase",
"61243": "suitor",
"61244": "sulfate",
"61245": "sulfide",
"61246": "sulfite",
"61251": "sulfur",
"61252": "sulk",
"61253": "sullen",
"61254": "sulphate",
"61255": "sulphuric",
"61256": "sultry",
"61261": "superbowl",
"61262": "superglue",
"61263": "superhero",
"61264": "superior",
"61265": "superjet",
"61266": "superman",
"61311": "supermom",
"61312": "supernova",
"61313": "supervise",
"61314": "supper",
"61315": "supplier",
"61316": "supply",
"61321": "support",
"61322": "supremacy",
"61323": "supreme",
"61324": "surcharge",
"61325": "surely",
"61326": "sureness",
"61331": "surface",
"61332": "surfacing",
"61333": "surfboard",
"61334": "surfer",
"61335": "surgery",
"61336": "surgical",
"61341": "surging",
"61342": "surname",
"61343": "surpass",
"61344": "surplus",
"61345": "surprise",
"61346": "surreal",
"61351": "surrender",
"61352": "surrogate",
"61353": "surround",
"61354": "survey",
"61355": "survival",
"61356": "survive",
"61361": "surviving",
"61362": "survivor",
"61363": "sushi",
"61364": "suspect",
"61365": "suspend",
"61366": "suspense",
"61411": "sustained",
"61412": "sustainer",
"61413": "swab",
"61414": "swaddling",
"61415": "swagger",
"61416": "swampland",
"61421": "swan",
"61422": "swapping",
"61423": "swarm",
"61424": "sway",
"61425": "swear",
"61426": "sweat",
"61431": "sweep",
"61432": "swell",
"61433": "swept",
"61434": "swerve",
"61435": "swifter",
"61436": "swiftly",
"61441": "swiftness",
"61442": "swimmable",
"61443": "swimmer",
"61444": "swimming",
"61445": "swimsuit",
"61446": "swimwear",
"61451": "swinger",
"61452": "swinging",
"61453": "swipe",
"61454": "swirl",
"61455": "switch",
"61456": "swivel",
"61461": "swizzle",
"61462": "swooned",
"61463": "swoop",
"61464": "swoosh",
"61465": "swore",
"61466": "sworn",
"61511": "swung",
"61512": "sycamore",
"61513": "sympathy",
"61514": "symphonic",
"61515": "symphony",
"61516": "symptom",
"61521": "synapse",
"61522": "syndrome",
"61523": "synergy",
"61524": "synopses",
"61525": "synopsis",
"61526": "synthesis",
"61531": "synthetic",
"61532": "syrup",
"61533": "system",
"61534": "t-shirt",
"61535": "tabasco",
"61536": "tabby",
"61541": "tableful",
"61542": "tables",
"61543": "tablet",
"61544": "tableware",
"61545": "tabloid",
"61546": "tackiness",
"61551": "tacking",
"61552": "tackle",
"61553": "tackling",
"61554": "tacky",
"61555": "taco",
"61556": "tactful",
"61561": "tactical",
"61562": "tactics",
"61563": "tactile",
"61564": "tactless",
"61565": "tadpole",
"61566": "taekwondo",
"61611": "tag",
"61612": "tainted",
"61613": "take",
"61614": "taking",
"61615": "talcum",
"61616": "talisman",
"61621": "tall",
"61622": "talon",
"61623": "tamale",
"61624": "tameness",
"61625": "tamer",
"61626": "tamper",
"61631": "tank",
"61632": "tanned",
"61633": "tannery",
"61634": "tanning",
"61635": "tantrum",
"61636": "tapeless",
"61641": "tapered",
"61642": "tapering",
"61643": "tapestry",
"61644": "tapioca",
"61645": "tapping",
"61646": "taps",
"61651": "tarantula",
"61652": "target",
"61653": "tarmac",
"61654": "tarnish",
"61655": "tarot",
"61656": "tartar",
"61661": "tartly",
"61662": "tartness",
"61663": "task",
"61664": "tassel",
"61665": "taste",
"61666": "tastiness",
"62111": "tasting",
"62112": "tasty",
"62113": "tattered",
"62114": "tattle",
"62115": "tattling",
"62116": "tattoo",
"62121": "taunt",
"62122": "tavern",
"62123": "thank",
"62124": "that",
"62125": "thaw",
"62126": "theater",
"62131": "theatrics",
"62132": "thee",
"62133": "theft",
"62134": "theme",
"62135": "theology",
"62136": "theorize",
"62141": "thermal",
"62142": "thermos",
"62143": "thesaurus",
"62144": "these",
"62145": "thesis",
"62146": "thespian",
"62151": "thicken",
"62152": "thicket",
"62153": "thickness",
"62154": "thieving",
"62155": "thievish",
"62156": "thigh",
"62161": "thimble",
"62162": "thing",
"62163": "think",
"62164": "thinly",
"62165": "thinner",
"62166": "thinness",
"62211": "thinning",
"62212": "thirstily",
"62213": "thirsting",
"62214": "thirsty",
"62215": "thirteen",
"62216": "thirty",
"62221": "thong",
"62222": "thorn",
"62223": "those",
"62224": "thousand",
"62225": "thrash",
"62226": "thread",
"62231": "threaten",
"62232": "threefold",
"62233": "thrift",
"62234": "thrill",
"62235": "thrive",
"62236": "thriving",
"62241": "throat",
"62242": "throbbing",
"62243": "throng",
"62244": "throttle",
"62245": "throwaway",
"62246": "throwback",
"62251": "thrower",
"62252": "throwing",
"62253": "thud",
"62254": "thumb",
"62255": "thumping",
"62256": "thursday",
"62261": "thus",
"62262": "thwarting",
"62263": "thyself",
"62264": "tiara",
"62265": "tibia",
"62266": "tidal",
"62311": "tidbit",
"62312": "tidiness",
"62313": "tidings",
"62314": "tidy",
"62315": "tiger",
"62316": "tighten",
"62321": "tightly",
"62322": "tightness",
"62323": "tightrope",
"62324": "tightwad",
"62325": "tigress",
"62326": "tile",
"62331": "tiling",
"62332": "till",
"62333": "tilt",
"62334": "timid",
"62335": "timing",
"62336": "timothy",
"62341": "tinderbox",
"62342": "tinfoil",
"62343": "tingle",
"62344": "tingling",
"62345": "tingly",
"62346": "tinker",
"62351": "tinkling",
"62352": "tinsel",
"62353": "tinsmith",
"62354": "tint",
"62355": "tinwork",
"62356": "tiny",
"62361": "tipoff",
"62362": "tipped",
"62363": "tipper",
"62364": "tipping",
"62365": "tiptoeing",
"62366": "tiptop",
"62411": "tiring",
"62412": "tissue",
"62413": "trace",
"62414": "tracing",
"62415": "track",
"62416": "traction",
"62421": "tractor",
"62422": "trade",
"62423": "trading",
"62424": "tradition",
"62425": "traffic",
"62426": "tragedy",
"62431": "trailing",
"62432": "trailside",
"62433": "train",
"62434": "traitor",
"62435": "trance",
"62436": "tranquil",
"62441": "transfer",
"62442": "transform",
"62443": "translate",
"62444": "transpire",
"62445": "transport",
"62446": "transpose",
"62451": "trapdoor",
"62452": "trapeze",
"62453": "trapezoid",
"62454": "trapped",
"62455": "trapper",
"62456": "trapping",
"62461": "traps",
"62462": "trash",
"62463": "travel",
"62464": "traverse",
"62465": "travesty",
"62466": "tray",
"62511": "treachery",
"62512": "treading",
"62513": "treadmill",
"62514": "treason",
"62515": "treat",
"62516": "treble",
"62521": "tree",
"62522": "trekker",
"62523": "tremble",
"62524": "trembling",
"62525": "tremor",
"62526": "trench",
"62531": "trend",
"62532": "trespass",
"62533": "triage",
"62534": "trial",
"62535": "triangle",
"62536": "tribesman",
"62541": "tribunal",
"62542": "tribune",
"62543": "tributary",
"62544": "tribute",
"62545": "triceps",
"62546": "trickery",
"62551": "trickily",
"62552": "tricking",
"62553": "trickle",
"62554": "trickster",
"62555": "tricky",
"62556": "tricolor",
"62561": "tricycle",
"62562": "trident",
"62563": "tried",
"62564": "trifle",
"62565": "trifocals",
"62566": "trillion",
"62611": "trilogy",
"62612": "trimester",
"62613": "trimmer",
"62614": "trimming",
"62615": "trimness",
"62616": "trinity",
"62621": "trio",
"62622": "tripod",
"62623": "tripping",
"62624": "triumph",
"62625": "trivial",
"62626": "trodden",
"62631": "trolling",
"62632": "trombone",
"62633": "trophy",
"62634": "tropical",
"62635": "tropics",
"62636": "trouble",
"62641": "troubling",
"62642": "trough",
"62643": "trousers",
"62644": "trout",
"62645": "trowel",
"62646": "truce",
"62651": "truck",
"62652": "truffle",
"62653": "trump",
"62654": "trunks",
"62655": "trustable",
"62656": "trustee",
"62661": "trustful",
"62662": "trusting",
"62663": "trustless",
"62664": "truth",
"62665": "try",
"62666": "tubby",
"63111": "tubeless",
"63112": "tubular",
"63113": "tucking",
"63114": "tuesday",
"63115": "tug",
"63116": "tuition",
"63121": "tulip",
"63122": "tumble",
"63123": "tumbling",
"63124": "tummy",
"63125": "turban",
"63126": "turbine",
"63131": "turbofan",
"63132": "turbojet",
"63133": "turbulent",
"63134": "turf",
"63135": "turkey",
"63136": "turmoil",
"63141": "turret",
"63142": "turtle",
"63143": "tusk",
"63144": "tutor",
"63145": "tutu",
"63146": "tux",
"63151": "tweak",
"63152": "tweed",
"63153": "tweet",
"63154": "tweezers",
"63155": "twelve",
"63156": "twentieth",
"63161": "twenty",
"63162": "twerp",
"63163": "twice",
"63164": "twiddle",
"63165": "twiddling",
"63166": "twig",
"63211": "twilight",
"63212": "twine",
"63213": "twins",
"63214": "twirl",
"63215": "twistable",
"63216": "twisted",
"63221": "twister",
"63222": "twisting",
"63223": "twisty",
"63224": "twitch",
"63225": "twitter",
"63226": "tycoon",
"63231": "tying",
"63232": "tyke",
"63233": "udder",
"63234": "ultimate",
"63235": "ultimatum",
"63236": "ultra",
"63241": "umbilical",
"63242": "umbrella",
"63243": "umpire",
"63244": "unabashed",
"63245": "unable",
"63246": "unadorned",
"63251": "unadvised",
"63252": "unafraid",
"63253": "unaired",
"63254": "unaligned",
"63255": "unaltered",
"63256": "unarmored",
"63261": "unashamed",
"63262": "unaudited",
"63263": "unawake",
"63264": "unaware",
"63265": "unbaked",
"63266": "unbalance",
"63311": "unbeaten",
"63312": "unbend",
"63313": "unbent",
"63314": "unbiased",
"63315": "unbitten",
"63316": "unblended",
"63321": "unblessed",
"63322": "unblock",
"63323": "unbolted",
"63324": "unbounded",
"63325": "unboxed",
"63326": "unbraided",
"63331": "unbridle",
"63332": "unbroken",
"63333": "unbuckled",
"63334": "unbundle",
"63335": "unburned",
"63336": "unbutton",
"63341": "uncanny",
"63342": "uncapped",
"63343": "uncaring",
"63344": "uncertain",
"63345": "unchain",
"63346": "unchanged",
"63351": "uncharted",
"63352": "uncheck",
"63353": "uncivil",
"63354": "unclad",
"63355": "unclaimed",
"63356": "unclamped",
"63361": "unclasp",
"63362": "uncle",
"63363": "unclip",
"63364": "uncloak",
"63365": "unclog",
"63366": "unclothed",
"63411": "uncoated",
"63412": "uncoiled",
"63413": "uncolored",
"63414": "uncombed",
"63415": "uncommon",
"63416": "uncooked",
"63421": "uncork",
"63422": "uncorrupt",
"63423": "uncounted",
"63424": "uncouple",
"63425": "uncouth",
"63426": "uncover",
"63431": "uncross",
"63432": "uncrown",
"63433": "uncrushed",
"63434": "uncured",
"63435": "uncurious",
"63436": "uncurled",
"63441": "uncut",
"63442": "undamaged",
"63443": "undated",
"63444": "undaunted",
"63445": "undead",
"63446": "undecided",
"63451": "undefined",
"63452": "underage",
"63453": "underarm",
"63454": "undercoat",
"63455": "undercook",
"63456": "undercut",
"63461": "underdog",
"63462": "underdone",
"63463": "underfed",
"63464": "underfeed",
"63465": "underfoot",
"63466": "undergo",
"63511": "undergrad",
"63512": "underhand",
"63513": "underline",
"63514": "underling",
"63515": "undermine",
"63516": "undermost",
"63521": "underpaid",
"63522": "underpass",
"63523": "underpay",
"63524": "underrate",
"63525": "undertake",
"63526": "undertone",
"63531": "undertook",
"63532": "undertow",
"63533": "underuse",
"63534": "underwear",
"63535": "underwent",
"63536": "underwire",
"63541": "undesired",
"63542": "undiluted",
"63543": "undivided",
"63544": "undocked",
"63545": "undoing",
"63546": "undone",
"63551": "undrafted",
"63552": "undress",
"63553": "undrilled",
"63554": "undusted",
"63555": "undying",
"63556": "unearned",
"63561": "unearth",
"63562": "unease",
"63563": "uneasily",
"63564": "uneasy",
"63565": "uneatable",
"63566": "uneaten",
"63611": "unedited",
"63612": "unelected",
"63613": "unending",
"63614": "unengaged",
"63615": "unenvied",
"63616": "unequal",
"63621": "unethical",
"63622": "uneven",
"63623": "unexpired",
"63624": "unexposed",
"63625": "unfailing",
"63626": "unfair",
"63631": "unfasten",
"63632": "unfazed",
"63633": "unfeeling",
"63634": "unfiled",
"63635": "unfilled",
"63636": "unfitted",
"63641": "unfitting",
"63642": "unfixable",
"63643": "unfixed",
"63644": "unflawed",
"63645": "unfocused",
"63646": "unfold",
"63651": "unfounded",
"63652": "unframed",
"63653": "unfreeze",
"63654": "unfrosted",
"63655": "unfrozen",
"63656": "unfunded",
"63661": "unglazed",
"63662": "ungloved",
"63663": "unglue",
"63664": "ungodly",
"63665": "ungraded",
"63666": "ungreased",
"64111": "unguarded",
"64112": "unguided",
"64113": "unhappily",
"64114": "unhappy",
"64115": "unharmed",
"64116": "unhealthy",
"64121": "unheard",
"64122": "unhearing",
"64123": "unheated",
"64124": "unhelpful",
"64125": "unhidden",
"64126": "unhinge",
"64131": "unhitched",
"64132": "unholy",
"64133": "unhook",
"64134": "unicorn",
"64135": "unicycle",
"64136": "unified",
"64141": "unifier",
"64142": "uniformed",
"64143": "uniformly",
"64144": "unify",
"64145": "unimpeded",
"64146": "uninjured",
"64151": "uninstall",
"64152": "uninsured",
"64153": "uninvited",
"64154": "union",
"64155": "uniquely",
"64156": "unisexual",
"64161": "unison",
"64162": "unissued",
"64163": "unit",
"64164": "universal",
"64165": "universe",
"64166": "unjustly",
"64211": "unkempt",
"64212": "unkind",
"64213": "unknotted",
"64214": "unknowing",
"64215": "unknown",
"64216": "unlaced",
"64221": "unlatch",
"64222": "unlawful",
"64223": "unleaded",
"64224": "unlearned",
"64225": "unleash",
"64226": "unless",
"64231": "unleveled",
"64232": "unlighted",
"64233": "unlikable",
"64234": "unlimited",
"64235": "unlined",
"64236": "unlinked",
"64241": "unlisted",
"64242": "unlit",
"64243": "unlivable",
"64244": "unloaded",
"64245": "unloader",
"64246": "unlocked",
"64251": "unlocking",
"64252": "unlovable",
"64253": "unloved",
"64254": "unlovely",
"64255": "unloving",
"64256": "unluckily",
"64261": "unlucky",
"64262": "unmade",
"64263": "unmanaged",
"64264": "unmanned",
"64265": "unmapped",
"64266": "unmarked",
"64311": "unmasked",
"64312": "unmasking",
"64313": "unmatched",
"64314": "unmindful",
"64315": "unmixable",
"64316": "unmixed",
"64321": "unmolded",
"64322": "unmoral",
"64323": "unmovable",
"64324": "unmoved",
"64325": "unmoving",
"64326": "unnamable",
"64331": "unnamed",
"64332": "unnatural",
"64333": "unneeded",
"64334": "unnerve",
"64335": "unnerving",
"64336": "unnoticed",
"64341": "unopened",
"64342": "unopposed",
"64343": "unpack",
"64344": "unpadded",
"64345": "unpaid",
"64346": "unpainted",
"64351": "unpaired",
"64352": "unpaved",
"64353": "unpeeled",
"64354": "unpicked",
"64355": "unpiloted",
"64356": "unpinned",
"64361": "unplanned",
"64362": "unplanted",
"64363": "unpleased",
"64364": "unpledged",
"64365": "unplowed",
"64366": "unplug",
"64411": "unpopular",
"64412": "unproven",
"64413": "unquote",
"64414": "unranked",
"64415": "unrated",
"64416": "unraveled",
"64421": "unreached",
"64422": "unread",
"64423": "unreal",
"64424": "unreeling",
"64425": "unrefined",
"64426": "unrelated",
"64431": "unrented",
"64432": "unrest",
"64433": "unretired",
"64434": "unrevised",
"64435": "unrigged",
"64436": "unripe",
"64441": "unrivaled",
"64442": "unroasted",
"64443": "unrobed",
"64444": "unroll",
"64445": "unruffled",
"64446": "unruly",
"64451": "unrushed",
"64452": "unsaddle",
"64453": "unsafe",
"64454": "unsaid",
"64455": "unsalted",
"64456": "unsaved",
"64461": "unsavory",
"64462": "unscathed",
"64463": "unscented",
"64464": "unscrew",
"64465": "unsealed",
"64466": "unseated",
"64511": "unsecured",
"64512": "unseeing",
"64513": "unseemly",
"64514": "unseen",
"64515": "unselect",
"64516": "unselfish",
"64521": "unsent",
"64522": "unsettled",
"64523": "unshackle",
"64524": "unshaken",
"64525": "unshaved",
"64526": "unshaven",
"64531": "unsheathe",
"64532": "unshipped",
"64533": "unsightly",
"64534": "unsigned",
"64535": "unskilled",
"64536": "unsliced",
"64541": "unsmooth",
"64542": "unsnap",
"64543": "unsocial",
"64544": "unsoiled",
"64545": "unsold",
"64546": "unsolved",
"64551": "unsorted",
"64552": "unspoiled",
"64553": "unspoken",
"64554": "unstable",
"64555": "unstaffed",
"64556": "unstamped",
"64561": "unsteady",
"64562": "unsterile",
"64563": "unstirred",
"64564": "unstitch",
"64565": "unstopped",
"64566": "unstuck",
"64611": "unstuffed",
"64612": "unstylish",
"64613": "unsubtle",
"64614": "unsubtly",
"64615": "unsuited",
"64616": "unsure",
"64621": "unsworn",
"64622": "untagged",
"64623": "untainted",
"64624": "untaken",
"64625": "untamed",
"64626": "untangled",
"64631": "untapped",
"64632": "untaxed",
"64633": "unthawed",
"64634": "unthread",
"64635": "untidy",
"64636": "untie",
"64641": "until",
"64642": "untimed",
"64643": "untimely",
"64644": "untitled",
"64645": "untoasted",
"64646": "untold",
"64651": "untouched",
"64652": "untracked",
"64653": "untrained",
"64654": "untreated",
"64655": "untried",
"64656": "untrimmed",
"64661": "untrue",
"64662": "untruth",
"64663": "unturned",
"64664": "untwist",
"64665": "untying",
"64666": "unusable",
"65111": "unused",
"65112": "unusual",
"65113": "unvalued",
"65114": "unvaried",
"65115": "unvarying",
"65116": "unveiled",
"65121": "unveiling",
"65122": "unvented",
"65123": "unviable",
"65124": "unvisited",
"65125": "unvocal",
"65126": "unwanted",
"65131": "unwarlike",
"65132": "unwary",
"65133": "unwashed",
"65134": "unwatched",
"65135": "unweave",
"65136": "unwed",
"65141": "unwelcome",
"65142": "unwell",
"65143": "unwieldy",
"65144": "unwilling",
"65145": "unwind",
"65146": "unwired",
"65151": "unwitting",
"65152": "unwomanly",
"65153": "unworldly",
"65154": "unworn",
"65155": "unworried",
"65156": "unworthy",
"65161": "unwound",
"65162": "unwoven",
"65163": "unwrapped",
"65164": "unwritten",
"65165": "unzip",
"65166": "upbeat",
"65211": "upchuck",
"65212": "upcoming",
"65213": "upcountry",
"65214": "update",
"65215": "upfront",
"65216": "upgrade",
"65221": "upheaval",
"65222": "upheld",
"65223": "uphill",
"65224": "uphold",
"65225": "uplifted",
"65226": "uplifting",
"65231": "upload",
"65232": "upon",
"65233": "upper",
"65234": "upright",
"65235": "uprising",
"65236": "upriver",
"65241": "uproar",
"65242": "uproot",
"65243": "upscale",
"65244": "upside",
"65245": "upstage",
"65246": "upstairs",
"65251": "upstart",
"65252": "upstate",
"65253": "upstream",
"65254": "upstroke",
"65255": "upswing",
"65256": "uptake",
"65261": "uptight",
"65262": "uptown",
"65263": "upturned",
"65264": "upward",
"65265": "upwind",
"65266": "uranium",
"65311": "urban",
"65312": "urchin",
"65313": "urethane",
"65314": "urgency",
"65315": "urgent",
"65316": "urging",
"65321": "urologist",
"65322": "urology",
"65323": "usable",
"65324": "usage",
"65325": "useable",
"65326": "used",
"65331": "uselessly",
"65332": "user",
"65333": "usher",
"65334": "usual",
"65335": "utensil",
"65336": "utility",
"65341": "utilize",
"65342": "utmost",
"65343": "utopia",
"65344": "utter",
"65345": "vacancy",
"65346": "vacant",
"65351": "vacate",
"65352": "vacation",
"65353": "vagabond",
"65354": "vagrancy",
"65355": "vagrantly",
"65356": "vaguely",
"65361": "vagueness",
"65362": "valiant",
"65363": "valid",
"65364": "valium",
"65365": "valley",
"65366": "valuables",
"65411": "value",
"65412": "vanilla",
"65413": "vanish",
"65414": "vanity",
"65415": "vanquish",
"65416": "vantage",
"65421": "vaporizer",
"65422": "variable",
"65423": "variably",
"65424": "varied",
"65425": "variety",
"65426": "various",
"65431": "varmint",
"65432": "varnish",
"65433": "varsity",
"65434": "varying",
"65435": "vascular",
"65436": "vaseline",
"65441": "vastly",
"65442": "vastness",
"65443": "veal",
"65444": "vegan",
"65445": "veggie",
"65446": "vehicular",
"65451": "velcro",
"65452": "velocity",
"65453": "velvet",
"65454": "vendetta",
"65455": "vending",
"65456": "vendor",
"65461": "veneering",
"65462": "vengeful",
"65463": "venomous",
"65464": "ventricle",
"65465": "venture",
"65466": "venue",
"65511": "venus",
"65512": "verbalize",
"65513": "verbally",
"65514": "verbose",
"65515": "verdict",
"65516": "verify",
"65521": "verse",
"65522": "version",
"65523": "versus",
"65524": "vertebrae",
"65525": "vertical",
"65526": "vertigo",
"65531": "very",
"65532": "vessel",
"65533": "vest",
"65534": "veteran",
"65535": "veto",
"65536": "vexingly",
"65541": "viability",
"65542": "viable",
"65543": "vibes",
"65544": "vice",
"65545": "vicinity",
"65546": "victory",
"65551": "video",
"65552": "viewable",
"65553": "viewer",
"65554": "viewing",
"65555": "viewless",
"65556": "viewpoint",
"65561": "vigorous",
"65562": "village",
"65563": "villain",
"65564": "vindicate",
"65565": "vineyard",
"65566": "vintage",
"65611": "violate",
"65612": "violation",
"65613": "violator",
"65614": "violet",
"65615": "violin",
"65616": "viper",
"65621": "viral",
"65622": "virtual",
"65623": "virtuous",
"65624": "virus",
"65625": "visa",
"65626": "viscosity",
"65631": "viscous",
"65632": "viselike",
"65633": "visible",
"65634": "visibly",
"65635": "vision",
"65636": "visiting",
"65641": "visitor",
"65642": "visor",
"65643": "vista",
"65644": "vitality",
"65645": "vitalize",
"65646": "vitally",
"65651": "vitamins",
"65652": "vivacious",
"65653": "vividly",
"65654": "vividness",
"65655": "vixen",
"65656": "vocalist",
"65661": "vocalize",
"65662": "vocally",
"65663": "vocation",
"65664": "voice",
"65665": "voicing",
"65666": "void",
"66111": "volatile",
"66112": "volley",
"66113": "voltage",
"66114": "volumes",
"66115": "voter",
"66116": "voting",
"66121": "voucher",
"66122": "vowed",
"66123": "vowel",
"66124": "voyage",
"66125": "wackiness",
"66126": "wad",
"66131": "wafer",
"66132": "waffle",
"66133": "waged",
"66134": "wager",
"66135": "wages",
"66136": "waggle",
"66141": "wagon",
"66142": "wake",
"66143": "waking",
"66144": "walk",
"66145": "walmart",
"66146": "walnut",
"66151": "walrus",
"66152": "waltz",
"66153": "wand",
"66154": "wannabe",
"66155": "wanted",
"66156": "wanting",
"66161": "wasabi",
"66162": "washable",
"66163": "washbasin",
"66164": "washboard",
"66165": "washbowl",
"66166": "washcloth",
"66211": "washday",
"66212": "washed",
"66213": "washer",
"66214": "washhouse",
"66215": "washing",
"66216": "washout",
"66221": "washroom",
"66222": "washstand",
"66223": "washtub",
"66224": "wasp",
"66225": "wasting",
"66226": "watch",
"66231": "water",
"66232": "waviness",
"66233": "waving",
"66234": "wavy",
"66235": "whacking",
"66236": "whacky",
"66241": "wham",
"66242": "wharf",
"66243": "wheat",
"66244": "whenever",
"66245": "whiff",
"66246": "whimsical",
"66251": "whinny",
"66252": "whiny",
"66253": "whisking",
"66254": "whoever",
"66255": "whole",
"66256": "whomever",
"66261": "whoopee",
"66262": "whooping",
"66263": "whoops",
"66264": "why",
"66265": "wick",
"66266": "widely",
"66311": "widen",
"66312": "widget",
"66313": "widow",
"66314": "width",
"66315": "wieldable",
"66316": "wielder",
"66321": "wife",
"66322": "wifi",
"66323": "wikipedia",
"66324": "wildcard",
"66325": "wildcat",
"66326": "wilder",
"66331": "wildfire",
"66332": "wildfowl",
"66333": "wildland",
"66334": "wildlife",
"66335": "wildly",
"66336": "wildness",
"66341": "willed",
"66342": "willfully",
"66343": "willing",
"66344": "willow",
"66345": "willpower",
"66346": "wilt",
"66351": "wimp",
"66352": "wince",
"66353": "wincing",
"66354": "wind",
"66355": "wing",
"66356": "winking",
"66361": "winner",
"66362": "winnings",
"66363": "winter",
"66364": "wipe",
"66365": "wired",
"66366": "wireless",
"66411": "wiring",
"66412": "wiry",
"66413": "wisdom",
"66414": "wise",
"66415": "wish",
"66416": "wisplike",
"66421": "wispy",
"66422": "wistful",
"66423": "wizard",
"66424": "wobble",
"66425": "wobbling",
"66426": "wobbly",
"66431": "wok",
"66432": "wolf",
"66433": "wolverine",
"66434": "womanhood",
"66435": "womankind",
"66436": "womanless",
"66441": "womanlike",
"66442": "womanly",
"66443": "womb",
"66444": "woof",
"66445": "wooing",
"66446": "wool",
"66451": "woozy",
"66452": "word",
"66453": "work",
"66454": "worried",
"66455": "worrier",
"66456": "worrisome",
"66461": "worry",
"66462": "worsening",
"66463": "worshiper",
"66464": "worst",
"66465": "wound",
"66466": "woven",
"66511": "wow",
"66512": "wrangle",
"66513": "wrath",
"66514": "wreath",
"66515": "wreckage",
"66516": "wrecker",
"66521": "wrecking",
"66522": "wrench",
"66523": "wriggle",
"66524": "wriggly",
"66525": "wrinkle",
"66526": "wrinkly",
"66531": "wrist",
"66532": "writing",
"66533": "written",
"66534": "wrongdoer",
"66535": "wronged",
"66536": "wrongful",
"66541": "wrongly",
"66542": "wrongness",
"66543": "wrought",
"66544": "xbox",
"66545": "xerox",
"66546": "yahoo",
"66551": "yam",
"66552": "yanking",
"66553": "yapping",
"66554": "yard",
"66555": "yarn",
"66556": "yeah",
"66561": "yearbook",
"66562": "yearling",
"66563": "yearly",
"66564": "yearning",
"66565": "yeast",
"66566": "yelling",
"66611": "yelp",
"66612": "yen",
"66613": "yesterday",
"66614": "yiddish",
"66615": "yield",
"66616": "yin",
"66621": "yippee",
"66622": "yo-yo",
"66623": "yodel",
"66624": "yoga",
"66625": "yogurt",
"66626": "yonder",
"66631": "yoyo",
"66632": "yummy",
"66633": "zap",
"66634": "zealous",
"66635": "zebra",
"66636": "zen",
"66641": "zeppelin",
"66642": "zero",
"66643": "zestfully",
"66644": "zesty",
"66645": "zigzagged",
"66646": "zipfile",
"66651": "zipping",
"66652": "zippy",
"66653": "zips",
"66654": "zit",
"66655": "zodiac",
"66656": "zombie",
"66661": "zone",
"66662": "zoning",
"66663": "zookeeper",
"66664": "zoologist",
"66665": "zoology",
"66666": "zoom"}`)
} | diceware_dicedbstring.go | 0.712432 | 0.433202 | diceware_dicedbstring.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// OnPremisesAgent
type OnPremisesAgent struct {
Entity
// List of onPremisesAgentGroups that an onPremisesAgent is assigned to. Read-only. Nullable.
agentGroups []OnPremisesAgentGroupable
// The external IP address as detected by the service for the agent machine. Read-only
externalIp *string
// The name of the machine that the aggent is running on. Read-only
machineName *string
// Possible values are: active, inactive.
status *AgentStatus
// The supportedPublishingTypes property
supportedPublishingTypes []OnPremisesPublishingType
}
// NewOnPremisesAgent instantiates a new onPremisesAgent and sets the default values.
func NewOnPremisesAgent()(*OnPremisesAgent) {
m := &OnPremisesAgent{
Entity: *NewEntity(),
}
return m
}
// CreateOnPremisesAgentFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateOnPremisesAgentFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewOnPremisesAgent(), nil
}
// GetAgentGroups gets the agentGroups property value. List of onPremisesAgentGroups that an onPremisesAgent is assigned to. Read-only. Nullable.
func (m *OnPremisesAgent) GetAgentGroups()([]OnPremisesAgentGroupable) {
if m == nil {
return nil
} else {
return m.agentGroups
}
}
// GetExternalIp gets the externalIp property value. The external IP address as detected by the service for the agent machine. Read-only
func (m *OnPremisesAgent) GetExternalIp()(*string) {
if m == nil {
return nil
} else {
return m.externalIp
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *OnPremisesAgent) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["agentGroups"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateOnPremisesAgentGroupFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]OnPremisesAgentGroupable, len(val))
for i, v := range val {
res[i] = v.(OnPremisesAgentGroupable)
}
m.SetAgentGroups(res)
}
return nil
}
res["externalIp"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetExternalIp(val)
}
return nil
}
res["machineName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetMachineName(val)
}
return nil
}
res["status"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseAgentStatus)
if err != nil {
return err
}
if val != nil {
m.SetStatus(val.(*AgentStatus))
}
return nil
}
res["supportedPublishingTypes"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfEnumValues(ParseOnPremisesPublishingType)
if err != nil {
return err
}
if val != nil {
res := make([]OnPremisesPublishingType, len(val))
for i, v := range val {
res[i] = *(v.(*OnPremisesPublishingType))
}
m.SetSupportedPublishingTypes(res)
}
return nil
}
return res
}
// GetMachineName gets the machineName property value. The name of the machine that the aggent is running on. Read-only
func (m *OnPremisesAgent) GetMachineName()(*string) {
if m == nil {
return nil
} else {
return m.machineName
}
}
// GetStatus gets the status property value. Possible values are: active, inactive.
func (m *OnPremisesAgent) GetStatus()(*AgentStatus) {
if m == nil {
return nil
} else {
return m.status
}
}
// GetSupportedPublishingTypes gets the supportedPublishingTypes property value. The supportedPublishingTypes property
func (m *OnPremisesAgent) GetSupportedPublishingTypes()([]OnPremisesPublishingType) {
if m == nil {
return nil
} else {
return m.supportedPublishingTypes
}
}
// Serialize serializes information the current object
func (m *OnPremisesAgent) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
if m.GetAgentGroups() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAgentGroups()))
for i, v := range m.GetAgentGroups() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("agentGroups", cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("externalIp", m.GetExternalIp())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("machineName", m.GetMachineName())
if err != nil {
return err
}
}
if m.GetStatus() != nil {
cast := (*m.GetStatus()).String()
err = writer.WriteStringValue("status", &cast)
if err != nil {
return err
}
}
if m.GetSupportedPublishingTypes() != nil {
err = writer.WriteCollectionOfStringValues("supportedPublishingTypes", SerializeOnPremisesPublishingType(m.GetSupportedPublishingTypes()))
if err != nil {
return err
}
}
return nil
}
// SetAgentGroups sets the agentGroups property value. List of onPremisesAgentGroups that an onPremisesAgent is assigned to. Read-only. Nullable.
func (m *OnPremisesAgent) SetAgentGroups(value []OnPremisesAgentGroupable)() {
if m != nil {
m.agentGroups = value
}
}
// SetExternalIp sets the externalIp property value. The external IP address as detected by the service for the agent machine. Read-only
func (m *OnPremisesAgent) SetExternalIp(value *string)() {
if m != nil {
m.externalIp = value
}
}
// SetMachineName sets the machineName property value. The name of the machine that the aggent is running on. Read-only
func (m *OnPremisesAgent) SetMachineName(value *string)() {
if m != nil {
m.machineName = value
}
}
// SetStatus sets the status property value. Possible values are: active, inactive.
func (m *OnPremisesAgent) SetStatus(value *AgentStatus)() {
if m != nil {
m.status = value
}
}
// SetSupportedPublishingTypes sets the supportedPublishingTypes property value. The supportedPublishingTypes property
func (m *OnPremisesAgent) SetSupportedPublishingTypes(value []OnPremisesPublishingType)() {
if m != nil {
m.supportedPublishingTypes = value
}
} | models/on_premises_agent.go | 0.597373 | 0.432303 | on_premises_agent.go | starcoder |
package game
import (
"fmt"
"strconv"
)
// Board with Grid and Player
type Board struct {
Grid [][]int
Player int
SelectedCube Cube
}
// Coords x and y
type Coords struct {
X int
Y int
}
// Cube with Coords and value
type Cube struct {
Coords Coords
Value int
}
// GetBoardWithNoCubeSelected return a Board from grid and player
func GetBoardWithNoCubeSelected(grid [][]int, player int) Board {
newBoard := Board{
Grid: grid,
Player: player,
SelectedCube: Cube{
Coords: Coords{X: -1, Y: -1},
},
}
return newBoard
}
// GetBoard return a Board from grid, player and selectedCube
func GetBoard(grid [][]int, player int, selectedCube Cube) Board {
newBoard := Board{grid, player, selectedCube}
return newBoard
}
// GetMovablesCubes return an array of movables cube
func GetMovablesCubes(board Board) []Cube {
grid := board.Grid
player := board.Player
movables := []Cube{}
for x := 0; x < len(grid); x++ {
for y := 0; y < len(grid[x]); y++ {
cube := Cube{
Coords: Coords{X: x, Y: y},
Value: grid[x][y],
}
if isOutsideCube(cube.Coords, len(grid[x])) && cubeBelongsToPlayer(cube.Value, player) {
movables = append(movables, cube)
}
}
}
return movables
}
// GetAvailablesDestinations return an array of destinations
func GetAvailablesDestinations(grid [][]int, coords Coords) []Coords {
destinations := []Coords{}
x := coords.X
y := coords.Y
size := len(grid)
isOnStart := func(index int) bool {
return index == 0
}
isOnEnd := func(index int, size int) bool {
return index == size-1
}
isOnEdge := func(index int, size int) bool {
return isOnStart(index) || isOnEnd(index, size)
}
indexStart := 0
indexEnd := size - 1
// If on the top or the bottom of the board, else it's on the left or right
if isOnEdge(x, size) {
// If not on the left of the board, we can move it to the left
if !isOnStart(y) {
destinations = append(destinations, Coords{X: x, Y: indexStart})
}
// If not on the right of the board, we can move it to the right
if !isOnEnd(y, size) {
destinations = append(destinations, Coords{X: x, Y: indexEnd})
}
// If on the top of the board, we can move it to the bottom, else to the top
if isOnStart(x) {
destinations = append(destinations, Coords{X: indexEnd, Y: y})
} else {
destinations = append(destinations, Coords{X: indexStart, Y: y})
}
} else if isOnEdge(y, size) {
if !isOnStart(x) {
destinations = append(destinations, Coords{X: indexStart, Y: y})
}
if !isOnEnd(x, size) {
destinations = append(destinations, Coords{X: indexEnd, Y: y})
}
if isOnStart(y) {
destinations = append(destinations, Coords{X: x, Y: indexEnd})
} else {
destinations = append(destinations, Coords{X: x, Y: indexStart})
}
}
return destinations
}
// MoveCube from coordsStart to coordsEnd
func MoveCube(board Board, coordsStart Coords, coordsEnd Coords) [][]int {
newGrid := DuplicateGrid(board.Grid)
player := board.Player
if coordsStart.X == coordsEnd.X {
return shiftLine(newGrid, coordsStart.Y, coordsEnd.Y, coordsStart.X, player)
}
if coordsStart.Y == coordsEnd.Y {
flippedGrid := FlipRowsAndCols(newGrid)
shiftedFlippedGrid := shiftLine(flippedGrid, coordsStart.X, coordsEnd.X, coordsStart.Y, player)
return FlipRowsAndCols(shiftedFlippedGrid)
}
return nil
}
func isOutsideCube(coords Coords, size int) bool {
return coords.X == 0 || coords.Y == 0 || coords.X == size-1 || coords.Y == size-1
}
func cubeBelongsToPlayer(cubeValue int, playerValue int) bool {
return cubeValue == 0 || cubeValue == playerValue
}
// Shift all values from xEnd to xStart in one row
func shiftLine(grid [][]int, xStart int, xEnd int, rowIndex int, player int) [][]int {
newGrid := DuplicateGrid(grid)
step := 1
indexBound := xStart + 1
if xEnd > xStart {
step = -1
indexBound = xStart - 1
}
value := player
for index := xEnd; index != indexBound; index += step {
tmpValue := newGrid[rowIndex][index]
newGrid[rowIndex][index] = value
value = tmpValue
}
return newGrid
}
// FlipRowsAndCols return a 2d array with inverted cols and rows
func FlipRowsAndCols(source [][]int) [][]int {
size := len(source)
flipped := make([][]int, size)
for i := 0; i < size; i++ {
flipped[i] = make([]int, size)
}
for x := 0; x < len(source); x++ {
for y := 0; y < len(source[x]); y++ {
flipped[y][x] = source[x][y]
}
}
return flipped
}
// DuplicateGrid return the copy of a 2D array
func DuplicateGrid(grid [][]int) [][]int {
newGrid := make([][]int, len(grid))
for i := 0; i < len(grid); i++ {
newGrid[i] = make([]int, len(grid[i]))
copy(newGrid[i], grid[i])
}
return newGrid
}
// DrawBoard for debug purpose
func DrawBoard(grid [][]int) int {
for i := 0; i < len(grid); i++ {
for j := 0; j < len(grid[i]); j++ {
fmt.Printf(strconv.Itoa(grid[i][j]))
}
fmt.Printf("\n")
}
return 0
} | advisor-go/src/quixo/game/game.go | 0.594434 | 0.47792 | game.go | starcoder |
package genmai
import (
"database/sql"
"errors"
"fmt"
"strings"
"time"
)
// Dialect is an interface that the dialect of the database.
type Dialect interface {
// Name returns a name of the dialect.
// Return value must be same as the driver name.
Name() string
// Quote returns a quoted s.
// It is for a column name, not a value.
Quote(s string) string
// PlaceHolder returns the placeholder character of the database.
// A current number of placeholder will passed to i.
PlaceHolder(i int) string
// SQLType returns the SQL type of the v.
// autoIncrement is whether the field is auto increment.
// If "size" tag specified to struct field, it will passed to size
// argument. If it doesn't specify, size is 0.
SQLType(v interface{}, autoIncrement bool, size uint64) (name string, allowNull bool)
// AutoIncrement returns the keyword of auto increment.
AutoIncrement() string
// FormatBool returns boolean value as string according to the value of b.
FormatBool(b bool) string
// LastInsertId returns an SQL to get the last inserted id.
LastInsertId() string
}
var (
ErrUsingFloatType = errors.New("float types have a rounding error problem.\n" +
"Please use `genmai.Rat` if you want an exact value.\n" +
"However, if you still want a float types, please use `genmai.Float32` and `Float64`.")
)
const (
// Precision of the fixed-point number.
// Digits of precision before the decimal point.
decimalPrecision = 65
// Scale of the fixed-point number.
// Digits of precision after the decimal point.
decimalScale = 30
)
// SQLite3Dialect represents a dialect of the SQLite3.
// It implements the Dialect interface.
type SQLite3Dialect struct{}
// Name returns name of the dialect.
func (d *SQLite3Dialect) Name() string {
return "sqlite3"
}
// Quote returns a quoted s for a column name.
func (d *SQLite3Dialect) Quote(s string) string {
return fmt.Sprintf(`"%s"`, strings.Replace(s, `"`, `""`, -1))
}
// PlaceHolder returns the placeholder character of the SQLite3.
func (d *SQLite3Dialect) PlaceHolder(i int) string {
return "?"
}
// SQLType returns the SQL type of the v for SQLite3.
func (d *SQLite3Dialect) SQLType(v interface{}, autoIncrement bool, size uint64) (name string, allowNull bool) {
switch v.(type) {
case bool:
return "boolean", false
case *bool, sql.NullBool:
return "boolean", true
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
return "integer", false
case *int, *int8, *int16, *int32, *int64, *uint, *uint8, *uint16, *uint32, *uint64, sql.NullInt64:
return "integer", true
case string:
return "text", false
case *string, sql.NullString:
return "text", true
case []byte:
return "blob", true
case time.Time:
return "datetime", false
case *time.Time:
return "datetime", true
case Float32, Float64:
return "real", false
case *Float32, *Float64:
return "real", true
case Rat:
return "numeric", false
case *Rat:
return "numeric", true
case float32, *float32, float64, *float64, sql.NullFloat64:
panic(ErrUsingFloatType)
}
panic(fmt.Errorf("SQLite3Dialect: unsupported SQL type: %T", v))
}
func (d *SQLite3Dialect) AutoIncrement() string {
return "AUTOINCREMENT"
}
// FormatBool returns "1" or "0" according to the value of b as boolean for SQLite3.
func (d *SQLite3Dialect) FormatBool(b bool) string {
if b {
return "1"
} else {
return "0"
}
}
func (d *SQLite3Dialect) LastInsertId() string {
return `SELECT last_insert_rowid()`
}
// MySQLDialect represents a dialect of the MySQL.
// It implements the Dialect interface.
type MySQLDialect struct{}
// Name returns name of the MySQLDialect.
func (d *MySQLDialect) Name() string {
return "mysql"
}
// Quote returns a quoted s for a column name.
func (d *MySQLDialect) Quote(s string) string {
return fmt.Sprintf("`%s`", strings.Replace(s, "`", "``", -1))
}
// PlaceHolder returns the placeholder character of the MySQL.
func (d *MySQLDialect) PlaceHolder(i int) string {
return "?"
}
// SQLType returns the SQL type of the v for MySQL.
func (d *MySQLDialect) SQLType(v interface{}, autoIncrement bool, size uint64) (name string, allowNull bool) {
switch v.(type) {
case bool:
return "BOOLEAN", false
case *bool, sql.NullBool:
return "BOOLEAN", true
case int8, int16, uint8, uint16:
return "SMALLINT", false
case *int8, *int16, *uint8, *uint16:
return "SMALLINT", true
case int, int32, uint, uint32:
return "INT", false
case *int, *int32, *uint, *uint32:
return "INT", true
case int64, uint64:
return "BIGINT", false
case *int64, *uint64, sql.NullInt64:
return "BIGINT", true
case string:
return d.varchar(size), false
case *string, sql.NullString:
return d.varchar(size), true
case []byte:
switch {
case size == 0:
return "VARBINARY(255)", true // default.
case size < (1<<16)-1-2: // approximate 64KB.
// 65533 ((2^16) - 1) - (length of prefix)
// See http://dev.mysql.com/doc/refman/5.5/en/string-type-overview.html#idm47703458759504
return fmt.Sprintf("VARBINARY(%d)", size), true
case size < 1<<24: // 16MB.
return "MEDIUMBLOB", true
}
return "LONGBLOB", true
case time.Time:
return "DATETIME", false
case *time.Time:
return "DATETIME", true
case Rat:
return fmt.Sprintf("DECIMAL(%d, %d)", decimalPrecision, decimalScale), false
case *Rat:
return fmt.Sprintf("DECIMAL(%d, %d)", decimalPrecision, decimalScale), true
case Float32, Float64:
return "DOUBLE", false
case *Float32, *Float64:
return "DOUBLE", true
case float32, *float32, float64, *float64, sql.NullFloat64:
panic(ErrUsingFloatType)
}
panic(fmt.Errorf("MySQLDialect: unsupported SQL type: %T", v))
}
func (d *MySQLDialect) AutoIncrement() string {
return "AUTO_INCREMENT"
}
// FormatBool returns "TRUE" or "FALSE" according to the value of b as boolean for MySQL.
func (d *MySQLDialect) FormatBool(b bool) string {
if b {
return "TRUE"
} else {
return "FALSE"
}
}
func (d *MySQLDialect) LastInsertId() string {
return `SELECT LAST_INSERT_ID()`
}
func (d *MySQLDialect) varchar(size uint64) string {
switch {
case size == 0:
return "VARCHAR(255)" // default.
case size < (1<<16)-1-2: // approximate 64KB.
// 65533 ((2^16) - 1) - (length of prefix)
// See http://dev.mysql.com/doc/refman/5.5/en/string-type-overview.html#idm47703458792704
return fmt.Sprintf("VARCHAR(%d)", size)
case size < 1<<24: // 16MB.
return "MEDIUMTEXT"
}
return "LONGTEXT"
}
// PostgresDialect represents a dialect of the PostgreSQL.
// It implements the Dialect interface.
type PostgresDialect struct{}
// Name returns name of the PostgresDialect.
func (d *PostgresDialect) Name() string {
return "postgres"
}
// Quote returns a quoted s for a column name.
func (d *PostgresDialect) Quote(s string) string {
return fmt.Sprintf(`"%s"`, strings.Replace(s, `"`, `""`, -1))
}
// PlaceHolder returns the placeholder character of the PostgreSQL.
func (d *PostgresDialect) PlaceHolder(i int) string {
return fmt.Sprintf("$%d", i+1)
}
// SQLType returns the SQL type of the v for PostgreSQL.
func (d *PostgresDialect) SQLType(v interface{}, autoIncrement bool, size uint64) (name string, allowNull bool) {
switch v.(type) {
case bool:
return "boolean", false
case *bool, sql.NullBool:
return "boolean", true
case int8, int16, uint8, uint16:
return d.smallint(autoIncrement), false
case *int8, *int16, *uint8, *uint16:
return d.smallint(autoIncrement), true
case int, int32, uint, uint32:
return d.integer(autoIncrement), false
case *int, *int32, *uint, *uint32:
return d.integer(autoIncrement), true
case int64, uint64:
return d.bigint(autoIncrement), false
case *int64, *uint64, sql.NullInt64:
return d.bigint(autoIncrement), true
case string:
return d.varchar(size), false
case *string, sql.NullString:
return d.varchar(size), true
case []byte:
return "bytea", true
case time.Time:
return "timestamp with time zone", false
case *time.Time:
return "timestamp with time zone", true
case Rat:
return fmt.Sprintf("numeric(%d, %d)", decimalPrecision, decimalScale), false
case *Rat:
return fmt.Sprintf("numeric(%d, %d)", decimalPrecision, decimalScale), true
case Float32, Float64:
return "double precision", false
case *Float32, *Float64:
return "double precision", true
case float32, *float32, float64, *float64, sql.NullFloat64:
panic(ErrUsingFloatType)
}
panic(fmt.Errorf("PostgresDialect: unsupported SQL type: %T", v))
}
func (d *PostgresDialect) AutoIncrement() string {
return ""
}
// FormatBool returns "TRUE" or "FALSE" according to the value of b as boolean for PostgreSQL.
func (d *PostgresDialect) FormatBool(b bool) string {
if b {
return "TRUE"
} else {
return "FALSE"
}
}
func (d *PostgresDialect) LastInsertId() string {
return `SELECT lastval()`
}
func (d *PostgresDialect) smallint(autoIncrement bool) string {
if autoIncrement {
return "smallserial"
}
return "smallint"
}
func (d *PostgresDialect) integer(autoIncrement bool) string {
if autoIncrement {
return "serial"
}
return "integer"
}
func (d *PostgresDialect) bigint(autoIncrement bool) string {
if autoIncrement {
return "bigserial"
}
return "bigint"
}
func (d *PostgresDialect) varchar(size uint64) string {
switch {
case size == 0:
return "varchar(255)" // default.
case size < (1<<16)-1-2: // approximate 64KB.
// This isn't required in PostgreSQL, but defined in order to match to the MySQLDialect.
return fmt.Sprintf("varchar(%d)", size)
}
return "text"
} | dialect.go | 0.788665 | 0.407805 | dialect.go | starcoder |
package xorfilter
import (
"math"
)
// XorN offers a configurable false-positive probability
type XorN struct {
XorFilterCommon
// Bits in 9..32
Bits int
// Fingerprints should be serialized as keeping the low XorN.Bits of each entry
Fingerprints []uint32
}
// PopulateN creates an xor filter with tunable number of bits per element.
func PopulateN(keys []uint64, bits int) (*XorN, error) {
var bld Builder
return bld.PopulateN(keys, bits)
}
func (filter *XorN) mask() uint32 {
return uint32(0x00000000ffffffff >> (32 - filter.Bits))
}
// Contains tell you whether the key is likely part of the set
func (filter *XorN) Contains(key uint64) bool {
hash := mixsplit(key, filter.Seed)
mask := filter.mask()
f := uint32(fingerprint(hash)) & mask
r0 := uint32(hash)
r1 := uint32(rotl64(hash, 21))
r2 := uint32(rotl64(hash, 42))
h0 := reduce(r0, filter.BlockLength)
h1 := reduce(r1, filter.BlockLength) + filter.BlockLength
h2 := reduce(r2, filter.BlockLength) + 2*filter.BlockLength
return f == (filter.Fingerprints[h0] ^ filter.Fingerprints[h1] ^ filter.Fingerprints[h2])
}
func (filter *XorN) allocate(size int) {
capacity := 32 + uint32(math.Ceil(1.23*float64(size)))
capacity = capacity / 3 * 3 // round it down to a multiple of 3
// slice capacity defaults to length
filter.Fingerprints = make([]uint32, capacity)
filter.BlockLength = capacity / 3
}
// PopulateN creates an xor filter with tunable number of bits per element.
func (bld *Builder) PopulateN(keys []uint64, bits int) (*XorN, error) {
size := len(keys)
filter := new(XorN)
filter.Bits = bits
filter.allocate(size)
stack, err := bld.populateCommon(keys, &filter.XorFilterCommon)
if err != nil {
return nil, err
}
mask := filter.mask()
stacksize := size
for stacksize > 0 {
stacksize--
ki := stack[stacksize]
val := uint32(fingerprint(ki.hash)) & mask
if ki.index < filter.BlockLength {
val ^= filter.Fingerprints[filter.geth1(ki.hash)+filter.BlockLength] ^ filter.Fingerprints[filter.geth2(ki.hash)+2*filter.BlockLength]
} else if ki.index < 2*filter.BlockLength {
val ^= filter.Fingerprints[filter.geth0(ki.hash)] ^ filter.Fingerprints[filter.geth2(ki.hash)+2*filter.BlockLength]
} else {
val ^= filter.Fingerprints[filter.geth0(ki.hash)] ^ filter.Fingerprints[filter.geth1(ki.hash)+filter.BlockLength]
}
filter.Fingerprints[ki.index] = val
}
return filter, nil
} | xorN.go | 0.616705 | 0.485722 | xorN.go | starcoder |
package mino
import (
"sort"
)
// A Transform operates over a data source and calculates "some"
// value for it.
type Transform interface {
Transform(analyzer *Analyzer, data Collection) (result interface{}, err error)
}
type DataPoint struct {
Value float64
Weight float64
}
// Sorting function for data points. Returns true iff a < b
type Sorter func(a, b DataPoint) bool
// A collection represents a group of values that can be analyzed.
// Data values in the collection are immutable, but the list's sorting
// may change over time.
type Collection interface {
// Returns the next data point in the collection, and increments the
// internal "reader". If there are no more data points available, the
// method may panic.
Next() DataPoint
// Decrements the internal reader by the given amount.
Rewind(n int)
// Returns whether there are more data points available.
HasMore() bool
// Resets the collection counter, so that calling Next() once again
// returns the first item in the collection.
Reset()
// Returns the total weight of all DataPoints.
Weight() float64
// Returns the number of values contained in the collection.
Len() int
// Sorts the underlying data list.
Sort(Sorter)
}
type dataSet struct {
data []DataPoint
sortFn Sorter
}
var _ sort.Interface = dataSet{}
func (d dataSet) Len() int {
return len(d.data)
}
func (d dataSet) Swap(i, j int) {
d.data[i], d.data[j] = d.data[j], d.data[i]
}
func (d dataSet) Less(i, j int) bool {
return d.sortFn(d.data[i], d.data[j])
}
// Basic implementation of the Collection interface.
type baseCollection struct {
d dataSet
idx int
weight float64
}
var _ Collection = new(baseCollection)
func (b *baseCollection) calculateWeight() {
b.weight = 0
for _, pnt := range b.d.data {
b.weight += pnt.Weight
}
}
func (b *baseCollection) Next() DataPoint {
pnt := b.d.data[b.idx]
b.idx += 1
return pnt
}
func (b *baseCollection) Rewind(n int) {
b.idx -= n
}
func (b *baseCollection) Reset() {
b.idx = 0
}
func (b *baseCollection) HasMore() bool {
return b.idx < len(b.d.data)
}
func (b *baseCollection) Weight() float64 {
return b.weight
}
func (b *baseCollection) Len() int {
return len(b.d.data)
}
func (b *baseCollection) Sort(fn Sorter) {
b.d.sortFn = fn
sort.Sort(b.d)
}
// Returns a new collection formed from the slice of unweighted float values.
func FromList(list []float64) *Analyzer {
points := make([]DataPoint, len(list))
for i, value := range list {
points[i] = DataPoint{Value: value, Weight: 1.0}
}
return FromPoints(points)
}
// Returns a collection formed from the slice of data points.
func FromPoints(list []DataPoint) *Analyzer {
col := &baseCollection{d: dataSet{data: list}}
col.calculateWeight()
return Analyze(col)
} | types.go | 0.816113 | 0.544256 | types.go | starcoder |
// Package deque implements a deque using a circular array.
package deque
const (
initialQueueSize = 4
)
// T is the type of queues.
type T struct {
contents []interface{}
// Boundary cases.
// o If full, size==len and fx==bx
// o If empty, size==0 and fx==bx
// o On initialization, contents=nil, size==0, fx==bx.
size int // Number of elements in the queue.
fx int // Index of the first element.
bx int // Index one past the last element (the index of the last element is (bx-1)%len).
}
// Size returns the number of items in the queue.
func (q *T) Size() int {
return q.size
}
// Clear removes all the elements of the queue.
func (q *T) Clear() {
q.fx = 0
q.bx = 0
q.size = 0
q.contents = nil
}
// PushBack adds an element to the back of the queue.
func (q *T) PushBack(item interface{}) {
q.reserve()
q.contents[q.bx] = item
q.bx = (q.bx + 1) % len(q.contents)
q.size++
}
// PushFront adds an element to the front of the deque.
func (q *T) PushFront(item interface{}) {
q.reserve()
q.fx = (q.fx + len(q.contents) - 1) % len(q.contents)
q.contents[q.fx] = item
q.size++
}
// Front returns the first element of the deque, or nil if there is none.
func (q *T) Front() interface{} {
if q.size == 0 {
return nil
}
return q.contents[q.fx]
}
// Back returns the last element of the deque, or nil if there is none.
func (q *T) Back() interface{} {
if q.size == 0 {
return nil
}
return q.contents[(q.bx+len(q.contents)-1)%len(q.contents)]
}
// PopFront removes an element from the front of the queue and returns it.
func (q *T) PopFront() interface{} {
if q.size == 0 {
return nil
}
item := q.contents[q.fx]
q.contents[q.fx] = nil
q.fx = (q.fx + 1) % len(q.contents)
q.size--
return item
}
// PopBack removes an element from the front of the queue and returns it.
func (q *T) PopBack() interface{} {
if q.size == 0 {
return nil
}
q.bx = (q.bx + len(q.contents) - 1) % len(q.contents)
item := q.contents[q.bx]
q.contents[q.bx] = nil
q.size--
return item
}
// Iter iterates over the elements of the deque. f should return false to
// terminate the iteration early.
func (q *T) Iter(f func(item interface{}) bool) {
for i := 0; i != q.size; i++ {
ix := (q.fx + i) % len(q.contents)
if !f(q.contents[ix]) {
break
}
}
}
// Reserve space for at least one additional element.
func (q *T) reserve() {
if q.size == len(q.contents) {
if q.contents == nil {
q.contents = make([]interface{}, initialQueueSize)
return
}
contents := make([]interface{}, q.size*2)
i := copy(contents, q.contents[q.fx:])
copy(contents[i:], q.contents[:q.fx])
q.contents = contents
q.fx = 0
q.bx = q.size
}
} | x/ref/runtime/internal/lib/deque/deque.go | 0.625667 | 0.468243 | deque.go | starcoder |
package staticarray
import (
"github.com/influxdata/flux/array"
"github.com/influxdata/flux/memory"
"github.com/influxdata/flux/semantic"
)
type uints struct {
data []uint64
alloc *memory.Allocator
}
func UInt(data []uint64) array.UInt {
return &uints{data: data}
}
func (a *uints) Type() semantic.Type {
return semantic.UInt
}
func (a *uints) IsNull(i int) bool {
return false
}
func (a *uints) IsValid(i int) bool {
return i >= 0 && i < len(a.data)
}
func (a *uints) Len() int {
return len(a.data)
}
func (a *uints) NullN() int {
return 0
}
func (a *uints) Value(i int) uint64 {
return a.data[i]
}
func (a *uints) Copy() array.Base {
panic("implement me")
}
func (a *uints) Free() {
if a.alloc != nil {
a.alloc.Free(cap(a.data) * uint64Size)
}
a.data = nil
}
func (a *uints) Slice(start, stop int) array.BaseRef {
return a.UIntSlice(start, stop)
}
func (a *uints) UIntSlice(start, stop int) array.UIntRef {
return &uints{data: a.data[start:stop]}
}
func (a *uints) Uint64Values() []uint64 {
return a.data
}
func UIntBuilder(a *memory.Allocator) array.UIntBuilder {
return &uintBuilder{alloc: a}
}
type uintBuilder struct {
data []uint64
alloc *memory.Allocator
}
func (b *uintBuilder) Type() semantic.Type {
return semantic.UInt
}
func (b *uintBuilder) Len() int {
return len(b.data)
}
func (b *uintBuilder) Cap() int {
return cap(b.data)
}
func (b *uintBuilder) Reserve(n int) {
newCap := len(b.data) + n
if newCap := len(b.data) + n; newCap <= cap(b.data) {
return
}
if err := b.alloc.Allocate(newCap * uint64Size); err != nil {
panic(err)
}
data := make([]uint64, len(b.data), newCap)
copy(data, b.data)
b.alloc.Free(cap(b.data) * uint64Size)
b.data = data
}
func (b *uintBuilder) BuildArray() array.Base {
return b.BuildUIntArray()
}
func (b *uintBuilder) Free() {
panic("implement me")
}
func (b *uintBuilder) Append(v uint64) {
if len(b.data) == cap(b.data) {
// Grow the slice in the same way as built-in append.
n := len(b.data)
if n == 0 {
n = 2
}
b.Reserve(n)
}
b.data = append(b.data, v)
}
func (b *uintBuilder) AppendNull() {
// The staticarray does not support nulls so it will do the current behavior of just appending
// the zero value.
b.Append(0)
}
func (b *uintBuilder) AppendValues(v []uint64, valid ...[]bool) {
if newCap := len(b.data) + len(v); newCap > cap(b.data) {
b.Reserve(newCap - cap(b.data))
}
b.data = append(b.data, v...)
}
func (b *uintBuilder) BuildUIntArray() array.UInt {
return &uints{
data: b.data,
alloc: b.alloc,
}
} | internal/staticarray/uint.go | 0.609292 | 0.474753 | uint.go | starcoder |
package functions
import (
"fmt"
"math"
"reflect"
"strconv"
"strings"
)
var FuncToString = Function{
Description: `Converts the given argument to a string.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(in ...interface{}) string {
return fmt.Sprint(in)
})
var FuncParseInt = Function{
Description: "Interprets a given string <str> as an int and returns the result. If this is not a valid number it will fail.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(parseInt)
var FuncParseInt64 = Function{
Description: "Interprets a given string <str> as a int64 and returns the result. If this is not a valid number it will fail.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(parseInt64)
var FuncParseFloat = Function{
Description: "Interprets a given string <str> as a float and returns the result. If this is not a valid number it will fail.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(parseFloat32)
var FuncParseDouble = Function{
Description: "Interprets a given string <str> as a float64 and returns the result. If this is not a valid number it will fail.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(parseFloat64)
var FuncIsInt = Function{
Description: "Will return <true> if the given string <str> is a valid int.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(isInt)
var FuncIsInt64 = Function{
Description: "Will return <true> if the given string <str> is a valid int64.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(isInt)
var FuncIsFloat = Function{
Description: "Will return <true> if the given string <str> is a valid float.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(isFloat)
var FuncIsDouble = Function{
Description: "Will return <true> if the given string <str> is a valid float64.",
Parameters: Parameters{{
Name: "str",
}},
}.MustWithFunc(isFloat)
var FuncToInt = Function{
Description: "Will try to interpret the given <value> as an int. If this is not possible <0> is returned.",
Parameters: Parameters{{
Name: "value",
}},
}.MustWithFunc(toInt)
var FuncToInt64 = Function{
Description: "Will try to interpret the given <value> as a int64. If this is not possible <0> is returned.",
Parameters: Parameters{{
Name: "value",
}},
}.MustWithFunc(toInt64)
var FuncToFloat = Function{
Description: "Will try to interpret the given <value> as a float. If this is not possible <0.0> is returned.",
Parameters: Parameters{{
Name: "value",
}},
}.MustWithFunc(toFloat32)
var FuncToDouble = Function{
Description: "Will try to interpret the given <value> as a float64. If this is not possible <0.0> is returned.",
Parameters: Parameters{{
Name: "value",
}},
}.MustWithFunc(toFloat64)
var FuncToBool = Function{
Description: "Will try to interpret the given <value> as a bool. If this is not possible <false> is returned.",
Parameters: Parameters{{
Name: "value",
}},
}.MustWithFunc(toBool)
var FuncsConversations = Functions{
"toString": FuncToString,
"parseInt": FuncParseInt,
"parseInt64": FuncParseInt64,
"parseFloat32": FuncParseFloat,
"parseFloat64": FuncParseDouble,
"isInt": FuncIsInt,
"isInt64": FuncIsInt64,
"isFloat32": FuncIsFloat,
"isFloat64": FuncIsDouble,
"toInt": FuncToInt,
"toInt64": FuncToInt64,
"toFloat32": FuncToFloat,
"toFloat64": FuncToDouble,
"toBool": FuncToBool,
}
var CategoryConversations = Category{
Functions: FuncsConversations,
}
func isFloat(v string) bool {
_, err := strconv.ParseFloat(v, 0)
return err == nil
}
func parseFloat32(v string) (float32, error) {
i, err := parseFloat64(v)
return float32(i), err
}
func parseFloat64(v string) (float64, error) {
return strconv.ParseFloat(v, 64)
}
func toFloat32(v interface{}) float32 {
return float32(toFloat64(v))
}
func toFloat64(v interface{}) float64 {
if str, ok := v.(string); ok {
iv, err := strconv.ParseFloat(str, 64)
if err != nil {
return 0
}
return iv
}
val := reflect.Indirect(reflect.ValueOf(v))
switch val.Kind() {
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
return float64(val.Int())
case reflect.Uint8, reflect.Uint16, reflect.Uint32:
return float64(val.Uint())
case reflect.Uint, reflect.Uint64:
return float64(val.Uint())
case reflect.Float32, reflect.Float64:
return val.Float()
case reflect.Bool:
if val.Bool() == true {
return 1
}
return 0
default:
return 0
}
}
func isInt(v string) bool {
_, err := strconv.ParseInt(v, 10, 64)
return err == nil
}
func parseInt(v string) (int, error) {
i, err := parseFloat64(v)
return int(i), err
}
func parseInt64(v string) (int64, error) {
return strconv.ParseInt(v, 10, 64)
}
func toInt(v interface{}) int {
return int(toInt64(v))
}
func toInt64(v interface{}) int64 {
if str, ok := v.(string); ok {
iv, err := strconv.ParseInt(str, 10, 64)
if err != nil {
return 0
}
return iv
}
val := reflect.Indirect(reflect.ValueOf(v))
switch val.Kind() {
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
return val.Int()
case reflect.Uint8, reflect.Uint16, reflect.Uint32:
return int64(val.Uint())
case reflect.Uint, reflect.Uint64:
tv := val.Uint()
if tv <= math.MaxInt64 {
return int64(tv)
}
return math.MaxInt64
case reflect.Float32, reflect.Float64:
return int64(val.Float())
case reflect.Bool:
if val.Bool() == true {
return 1
}
return 0
default:
return 0
}
}
func toBool(v interface{}) bool {
if b, ok := v.(bool); ok {
return b
}
if str, ok := v.(string); ok {
switch strings.TrimSpace(strings.ToLower(str)) {
case "true":
return true
case "on":
return true
case "yes":
return true
default:
return false
}
}
val := reflect.Indirect(reflect.ValueOf(v))
switch val.Kind() {
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
return val.Int() > 1
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint, reflect.Uint64:
return val.Uint() > 1
case reflect.Float32, reflect.Float64:
return val.Float() > 1
default:
return false
}
} | template/functions/conversations.go | 0.750187 | 0.553747 | conversations.go | starcoder |
package digraph
// Index provides indexed access to all nodes and edges of an
// underlying graph. The index is lazily constructed to include all
// nodes and edges. Index is constructed by accessible nodes and
// edges, thus the underlying graph should not be modified.
type Index struct {
g *Graph
allNodes []Node
allNodesByLabel map[interface{}][]Node
incomingEdges map[Node][]Edge
incomingEdgesByLabel map[Node]map[interface{}][]Edge
}
// GetIndex returns an uninitialized index for the graph
func (g *Graph) GetIndex() *Index {
return &Index{g: g}
}
// NodesSlice returns all accessible nodes as a slice
func (index *Index) NodesSlice() []Node {
if index.allNodes == nil {
index.allNodes = make([]Node, 0)
seen := map[Node]struct{}{}
for node := range index.g.nodes {
IterateUnique(node, func(n Node) bool {
index.allNodes = append(index.allNodes, n)
return true
}, func(e Edge) bool { return true }, seen)
}
}
return index.allNodes
}
// Nodes returns an iterator over all nodes
func (index *Index) Nodes() Nodes {
return Nodes{&nodeSliceIterator{index.NodesSlice()}}
}
// NodesByLabel returns an iterator of nodes with the given label
func (index *Index) NodesByLabelSlice(label interface{}) []Node {
if index.allNodesByLabel == nil {
index.allNodesByLabel = make(map[interface{}][]Node)
seen := map[Node]struct{}{}
for node := range index.g.nodes {
IterateUnique(node, func(n Node) bool {
index.allNodesByLabel[n.GetLabel()] = append(index.allNodesByLabel[n.GetLabel()], n)
return true
}, func(e Edge) bool { return true }, seen)
}
}
return index.allNodesByLabel[label]
}
// NodesByLabel returns nodes by label
func (index *Index) NodesByLabel(label interface{}) Nodes {
return Nodes{&nodeSliceIterator{index.NodesByLabelSlice(label)}}
}
// Out returns the outgoing edges of a node
func (index *Index) Out(node Node) Edges {
return node.Out()
}
// Out returns the outgoing edges of a node
func (index *Index) OutSlice(node Node) []Edge {
return node.Out().All()
}
// OutWith returns the outgoing edges of a node with a label
func (index *Index) OutWith(node Node, label interface{}) Edges {
return node.OutWith(label)
}
// OutWithSlice returns the outgoing edges of a node with a label
func (index *Index) OutWithSlice(node Node, label interface{}) []Edge {
return node.OutWith(label).All()
}
// InSlice returns the incoming edges of a node. These will include only
// those edges that are from the nodes included in this graph
func (index *Index) InSlice(node Node) []Edge {
if index.incomingEdges == nil {
index.incomingEdges = make(map[Node][]Edge)
seen := map[Node]struct{}{}
for node := range index.g.nodes {
IterateUnique(node, func(n Node) bool {
return true
}, func(e Edge) bool {
index.incomingEdges[e.GetTo()] = append(index.incomingEdges[e.GetTo()], e)
return true
},
seen)
}
}
return index.incomingEdges[node]
}
// In returns the incoming edges of a node. These will include only
// those edges that are from the nodes included in this graph
func (index *Index) In(node Node) Edges {
return Edges{&edgeSliceIterator{index.InSlice(node)}}
}
// InWithSlice returns the incoming edges of a node by label. These will include only
// those edges that are from the nodes included in this graph
func (index *Index) InWithSlice(node Node, label interface{}) []Edge {
if index.incomingEdgesByLabel == nil {
index.incomingEdgesByLabel = make(map[Node]map[interface{}][]Edge)
seen := map[Node]struct{}{}
for node := range index.g.nodes {
IterateUnique(node, func(n Node) bool {
return true
}, func(e Edge) bool {
m := index.incomingEdgesByLabel[e.GetTo()]
if m == nil {
m = make(map[interface{}][]Edge)
index.incomingEdgesByLabel[e.GetTo()] = m
}
m[e.GetLabel()] = append(m[e.GetLabel()], e)
return true
},
seen)
}
}
m := index.incomingEdgesByLabel[node]
if m != nil {
return m[label]
}
return nil
}
// InWith returns the incoming edges of a node by label. These will include only
// those edges that are from the nodes included in this graph
func (index *Index) InWith(node Node, label interface{}) Edges {
return Edges{&edgeSliceIterator{index.InWithSlice(node, label)}}
} | index.go | 0.848314 | 0.697802 | index.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.