code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package treemap
import (
"context"
"encoding/hex"
"encoding/json"
"fmt"
"math/rand"
"reflect"
)
// NewTree returns a Block with the received info with all the injected children
// already positioned and sized. This function is intended to be used when programmatically
// building the root node of a tree, but it is safe to be called more than once. The returned Block
// can also be used as part of a bigger treemap.
func NewTree(ctx context.Context, info BlockInfo, children ...*Block) *Block {
b := NewBlock(info, children...)
prepareNode(ctx, b, 0)
return b
}
// NewBlock returns a Block with the received info, containing the injected children
// but without setting the node details. This function is intended to be used when programmatically
// building all the nodes to be placed in a tree but the root.
func NewBlock(info BlockInfo, children ...*Block) *Block {
return &Block{
BlockInfo: info,
BlockNode: BlockNode{},
Children: children,
}
}
// BlockNode contains information regarding the size and position of every Block in the tree map
type BlockNode struct {
Depth float64 `json:"depth"`
Height float64 `json:"height"`
Width float64 `json:"width"`
Position Position `json:"position"`
}
// BlockInfo contains all the user defined details of every Block in the tree map
type BlockInfo struct {
Name string `json:"name"`
Dimm1 int `json:"dimm1"`
Dimm2 int `json:"dimm2"`
Dimm3 int `json:"dimm3"`
Color Color `json:"color,omitempty"`
}
// Generate implements the quick.Generator interface (https://golang.org/pkg/testing/quick/#Generator)
func (b BlockInfo) Generate(rand *rand.Rand, size int) reflect.Value {
name := make([]byte, size)
color := make([]byte, 3)
rand.Read(name)
rand.Read(color)
return reflect.ValueOf(BlockInfo{
Name: hex.EncodeToString(name),
Dimm1: rand.Intn(2*size + 1),
Dimm2: rand.Intn(2*size + 1),
Dimm3: rand.Intn(2*size + 1),
Color: Color(fmt.Sprintf("0x%x", color)),
})
}
// Block is a tree of blocks. Every Block contains a list of children, a BlockInfo with the input values
// and a BlockNode with all the information regardind the block position and its dimensions
type Block struct {
BlockNode
BlockInfo
Children []*Block `json:"children,omitempty"`
}
// String implements the fmt.Stringer interace by returning an indentated
// json serialization of the Block
func (b *Block) String() string {
buf, _ := json.MarshalIndent(b, "", "\t")
return string(buf)
}
// Generate implements the quick.Generator interface (https://golang.org/pkg/testing/quick/#Generator)
func (b *Block) Generate(rand *rand.Rand, size int) reflect.Value {
return reflect.ValueOf(generateTree(rand, size))
}
func prepareNode(ctx context.Context, b *Block, z float64) {
b.Position.Z = z
b.Height = float64(b.Dimm3) + 3
if b.Children == nil || len(b.Children) == 0 {
b.Width = float64(b.Dimm1) + 3
b.Depth = float64(b.Dimm2) + 3
return
}
tiler := NewTiler(len(b.Children))
for _, child := range b.Children {
select {
case <-ctx.Done():
return
default:
}
prepareNode(ctx, child, z+b.Height)
cp := tiler.NextPosition(child.Width, child.Depth)
child.Position.X = cp.X
child.Position.Y = cp.Y
}
bounds := tiler.GetBounds()
b.Width, b.Depth = bounds.X, bounds.Y
for _, child := range b.Children {
child.Position.X -= b.Width / 2.0
child.Position.Y -= b.Depth / 2.0
}
b.Width += float64(b.Dimm1)
b.Depth += float64(b.Dimm2)
}
func generateTree(rand *rand.Rand, size int) *Block {
infoTree := (&TreeInfo{}).Generate(rand, size).Interface().(*TreeInfo)
return infoTree.Tree(context.TODO())
} | block.go | 0.736211 | 0.488161 | block.go | starcoder |
package gsm7bit
/*
GSM 7-bit default alphabet and extension table
Source: https://en.wikipedia.org/wiki/GSM_03.38#GSM_7-bit_default_alphabet_and_extension_table_of_3GPP_TS_23.038_/_GSM_03.38
*/
const escapeSequence byte = 0x1B
var forwardLookup = map[rune]byte{
'@': 0x00, '£': 0x01, '$': 0x02, '¥': 0x03, 'è': 0x04, 'é': 0x05, 'ù': 0x06, 'ì': 0x07,
'ò': 0x08, 'Ç': 0x09, '\n': 0x0a, 'Ø': 0x0b, 'ø': 0x0c, '\r': 0x0d, 'Å': 0x0e, 'å': 0x0f,
'Δ': 0x10, '_': 0x11, 'Φ': 0x12, 'Γ': 0x13, 'Λ': 0x14, 'Ω': 0x15, 'Π': 0x16, 'Ψ': 0x17,
'Σ': 0x18, 'Θ': 0x19, 'Ξ': 0x1a /* 0x1B */, 'Æ': 0x1c, 'æ': 0x1d, 'ß': 0x1e, 'É': 0x1f,
' ': 0x20, '!': 0x21, '"': 0x22, '#': 0x23, '¤': 0x24, '%': 0x25, '&': 0x26, '\'': 0x27,
'(': 0x28, ')': 0x29, '*': 0x2a, '+': 0x2b, ',': 0x2c, '-': 0x2d, '.': 0x2e, '/': 0x2f,
'0': 0x30, '1': 0x31, '2': 0x32, '3': 0x33, '4': 0x34, '5': 0x35, '6': 0x36, '7': 0x37,
'8': 0x38, '9': 0x39, ':': 0x3a, ';': 0x3b, '<': 0x3c, '=': 0x3d, '>': 0x3e, '?': 0x3f,
'¡': 0x40, 'A': 0x41, 'B': 0x42, 'C': 0x43, 'D': 0x44, 'E': 0x45, 'F': 0x46, 'G': 0x47,
'H': 0x48, 'I': 0x49, 'J': 0x4a, 'K': 0x4b, 'L': 0x4c, 'M': 0x4d, 'N': 0x4e, 'O': 0x4f,
'P': 0x50, 'Q': 0x51, 'R': 0x52, 'S': 0x53, 'T': 0x54, 'U': 0x55, 'V': 0x56, 'W': 0x57,
'X': 0x58, 'Y': 0x59, 'Z': 0x5a, 'Ä': 0x5b, 'Ö': 0x5c, 'Ñ': 0x5d, 'Ü': 0x5e, '§': 0x5f,
'¿': 0x60, 'a': 0x61, 'b': 0x62, 'c': 0x63, 'd': 0x64, 'e': 0x65, 'f': 0x66, 'g': 0x67,
'h': 0x68, 'i': 0x69, 'j': 0x6a, 'k': 0x6b, 'l': 0x6c, 'm': 0x6d, 'n': 0x6e, 'o': 0x6f,
'p': 0x70, 'q': 0x71, 'r': 0x72, 's': 0x73, 't': 0x74, 'u': 0x75, 'v': 0x76, 'w': 0x77,
'x': 0x78, 'y': 0x79, 'z': 0x7a, 'ä': 0x7b, 'ö': 0x7c, 'ñ': 0x7d, 'ü': 0x7e, 'à': 0x7f,
}
var forwardEscape = map[rune]byte{
'\f': 0x0A, '^': 0x14, '{': 0x28, '}': 0x29, '\\': 0x2F, '[': 0x3C, '~': 0x3D, ']': 0x3E, '|': 0x40, '€': 0x65,
}
var reverseLookup = map[byte]rune{
0x00: '@', 0x01: '£', 0x02: '$', 0x03: '¥', 0x04: 'è', 0x05: 'é', 0x06: 'ù', 0x07: 'ì',
0x08: 'ò', 0x09: 'Ç', 0x0a: '\n', 0x0b: 'Ø', 0x0c: 'ø', 0x0d: '\r', 0x0e: 'Å', 0x0f: 'å',
0x10: 'Δ', 0x11: '_', 0x12: 'Φ', 0x13: 'Γ', 0x14: 'Λ', 0x15: 'Ω', 0x16: 'Π', 0x17: 'Ψ',
0x18: 'Σ', 0x19: 'Θ', 0x1a: 'Ξ' /* 0x1B */, 0x1c: 'Æ', 0x1d: 'æ', 0x1e: 'ß', 0x1f: 'É',
0x20: ' ', 0x21: '!', 0x22: '"', 0x23: '#', 0x24: '¤', 0x25: '%', 0x26: '&', 0x27: '\'',
0x28: '(', 0x29: ')', 0x2a: '*', 0x2b: '+', 0x2c: ',', 0x2d: '-', 0x2e: '.', 0x2f: '/',
0x30: '0', 0x31: '1', 0x32: '2', 0x33: '3', 0x34: '4', 0x35: '5', 0x36: '6', 0x37: '7',
0x38: '8', 0x39: '9', 0x3a: ':', 0x3b: ';', 0x3c: '<', 0x3d: '=', 0x3e: '>', 0x3f: '?',
0x40: '¡', 0x41: 'A', 0x42: 'B', 0x43: 'C', 0x44: 'D', 0x45: 'E', 0x46: 'F', 0x47: 'G',
0x48: 'H', 0x49: 'I', 0x4a: 'J', 0x4b: 'K', 0x4c: 'L', 0x4d: 'M', 0x4e: 'N', 0x4f: 'O',
0x50: 'P', 0x51: 'Q', 0x52: 'R', 0x53: 'S', 0x54: 'T', 0x55: 'U', 0x56: 'V', 0x57: 'W',
0x58: 'X', 0x59: 'Y', 0x5a: 'Z', 0x5b: 'Ä', 0x5c: 'Ö', 0x5d: 'Ñ', 0x5e: 'Ü', 0x5f: '§',
0x60: '¿', 0x61: 'a', 0x62: 'b', 0x63: 'c', 0x64: 'd', 0x65: 'e', 0x66: 'f', 0x67: 'g',
0x68: 'h', 0x69: 'i', 0x6a: 'j', 0x6b: 'k', 0x6c: 'l', 0x6d: 'm', 0x6e: 'n', 0x6f: 'o',
0x70: 'p', 0x71: 'q', 0x72: 'r', 0x73: 's', 0x74: 't', 0x75: 'u', 0x76: 'v', 0x77: 'w',
0x78: 'x', 0x79: 'y', 0x7a: 'z', 0x7b: 'ä', 0x7c: 'ö', 0x7d: 'ñ', 0x7e: 'ü', 0x7f: 'à',
}
var reverseEscape = map[byte]rune{
0x0A: '\f', 0x14: '^', 0x28: '{', 0x29: '}', 0x2F: '\\', 0x3C: '[', 0x3D: '~', 0x3E: ']', 0x40: '|', 0x65: '€',
} | coding/gsm7bit/table.go | 0.522933 | 0.656177 | table.go | starcoder |
package gographviz
import (
"sort"
)
//Represents an Edge.
type Edge struct {
Src string
SrcPort string
Dst string
DstPort string
Dir bool
Attrs Attrs
}
//Represents a set of Edges.
type Edges struct {
SrcToDsts map[string]map[string][]*Edge
DstToSrcs map[string]map[string][]*Edge
Edges []*Edge
}
//Creates a blank set of Edges.
func NewEdges() *Edges {
return &Edges{make(map[string]map[string][]*Edge), make(map[string]map[string][]*Edge), make([]*Edge, 0)}
}
//Adds an Edge to the set of Edges.
func (this *Edges) Add(edge *Edge) {
if _, ok := this.SrcToDsts[edge.Src]; !ok {
this.SrcToDsts[edge.Src] = make(map[string][]*Edge)
}
if _, ok := this.SrcToDsts[edge.Src][edge.Dst]; !ok {
this.SrcToDsts[edge.Src][edge.Dst] = make([]*Edge, 0)
}
this.SrcToDsts[edge.Src][edge.Dst] = append(this.SrcToDsts[edge.Src][edge.Dst], edge)
if _, ok := this.DstToSrcs[edge.Dst]; !ok {
this.DstToSrcs[edge.Dst] = make(map[string][]*Edge)
}
if _, ok := this.DstToSrcs[edge.Dst][edge.Src]; !ok {
this.DstToSrcs[edge.Dst][edge.Src] = make([]*Edge, 0)
}
this.DstToSrcs[edge.Dst][edge.Src] = append(this.DstToSrcs[edge.Dst][edge.Src], edge)
this.Edges = append(this.Edges, edge)
}
//Returns a sorted list of Edges.
func (this Edges) Sorted() []*Edge {
es := make(edgeSorter, len(this.Edges))
copy(es, this.Edges)
sort.Sort(es)
return es
}
type edgeSorter []*Edge
func (es edgeSorter) Len() int { return len(es) }
func (es edgeSorter) Swap(i, j int) { es[i], es[j] = es[j], es[i] }
func (es edgeSorter) Less(i, j int) bool {
if es[i].Src < es[j].Src {
return true
} else if es[i].Src > es[j].Src {
return false
}
if es[i].Dst < es[j].Dst {
return true
} else if es[i].Dst > es[j].Dst {
return false
}
if es[i].SrcPort < es[j].SrcPort {
return true
} else if es[i].SrcPort > es[j].SrcPort {
return false
}
if es[i].DstPort < es[j].DstPort {
return true
} else if es[i].DstPort > es[j].DstPort {
return false
}
if es[i].Dir != es[j].Dir {
return es[i].Dir
}
attrs := es[i].Attrs.Copy()
for k, v := range es[j].Attrs {
attrs[k] = v
}
for _, k := range attrs.SortedNames() {
if es[i].Attrs[k] < es[j].Attrs[k] {
return true
} else if es[i].Attrs[k] > es[j].Attrs[k] {
return false
}
}
return false
} | vendor/github.com/awalterschulze/gographviz/edges.go | 0.597021 | 0.50531 | edges.go | starcoder |
package data
import (
mat "github.com/nlpodyssey/spago/pkg/mat32"
"github.com/nlpodyssey/spago/pkg/mat32/rand"
"github.com/nlpodyssey/spago/pkg/utils"
)
// GenerateBatches generates a list of batches so that the classes distribution among them is approximately the same.
// The class is given by the callback for each i-th element up to size.
// The size of each batch depends on number of classes (batchFactor * nClasses).
// Each batch consists in a list of indices.
func GenerateBatches(size, batchFactor int, class func(i int) int) [][]int {
groupsByClass := make(map[int][]int)
for i := 0; i < size; i++ {
c := class(i)
groupsByClass[c] = append(groupsByClass[c], i)
}
nClasses := len(groupsByClass)
batchSize := batchFactor * nClasses
batchList := make([][]int, 0)
for k := 0; k < size; k++ {
if k%batchSize == 0 {
batchList = append(batchList, []int{})
}
}
distribution := make([]mat.Float, nClasses)
for i := 0; i < nClasses; i++ {
distribution[i] = mat.Float(len(groupsByClass[i])) / mat.Float(size)
}
k := 0
for k < size {
class := rand.WeightedChoice(distribution) // this uses the global random
if len(groupsByClass[class]) > 0 {
var exampleIndex int
exampleIndex, groupsByClass[class] = groupsByClass[class][0], groupsByClass[class][1:] // pop
index := k % len(batchList)
batchList[index] = append(batchList[index], exampleIndex)
k++
}
}
return batchList
}
// ForEachBatch divides the dataset into batches, returning the start-end of each batch with a callback.
// This function assumes that the dataset has already been shuffled.
func ForEachBatch(datasetSize, batchSize int, callback func(start, end int)) {
for start := 0; start < datasetSize; start += batchSize {
end := utils.MinInt(start+batchSize, datasetSize-1)
callback(start, end)
}
}
// SplitDataset splits the dataset into two parts. Each part consists in a list of indices.
// The split ratio regulates the percentage of the total assigned to `b` so that `a` contains the rest.
// For example a split ratio of 0.20 means that `b` should contain the 20% of the total and `a` the rest 80%.
func SplitDataset(size int, splitRatio mat.Float, seed uint64, class func(i int) string) (a []int, b []int) {
classCount := make(map[string]int)
for i := 0; i < size; i++ {
c := class(i)
classCount[c] = classCount[c] + 1
}
usedClassCount := make(map[string]int)
indices := utils.MakeIndices(size)
rand.ShuffleInPlace(indices, rand.NewLockedRand(seed))
for _, i := range indices {
c := class(i)
usedClassCount[c] = usedClassCount[c] + 1
if usedClassCount[c] <= int(splitRatio*mat.Float(classCount[c])) {
b = append(b, i)
} else {
a = append(a, i)
}
}
return
} | pkg/utils/data/data.go | 0.742235 | 0.410697 | data.go | starcoder |
package output
import (
"github.com/benthosdev/benthos/v4/internal/batch/policy"
"github.com/benthosdev/benthos/v4/internal/component/metrics"
"github.com/benthosdev/benthos/v4/internal/component/output"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/impl/aws/session"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/metadata"
"github.com/benthosdev/benthos/v4/internal/old/output/writer"
)
func init() {
Constructors[TypeAWSS3] = TypeSpec{
constructor: fromSimpleConstructor(func(conf Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (output.Streamed, error) {
return newAmazonS3(TypeAWSS3, conf.AWSS3, mgr, log, stats)
}),
Version: "3.36.0",
Summary: `
Sends message parts as objects to an Amazon S3 bucket. Each object is uploaded
with the path specified with the ` + "`path`" + ` field.`,
Description: `
In order to have a different path for each object you should use function
interpolations described [here](/docs/configuration/interpolation#bloblang-queries), which are
calculated per message of a batch.
### Metadata
Metadata fields on messages will be sent as headers, in order to mutate these values (or remove them) check out the [metadata docs](/docs/configuration/metadata).
### Tags
The tags field allows you to specify key/value pairs to attach to objects as tags, where the values support
[interpolation functions](/docs/configuration/interpolation#bloblang-queries):
` + "```yaml" + `
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.tar.gz
tags:
Key1: Value1
Timestamp: ${!meta("Timestamp")}
` + "```" + `
### Credentials
By default Benthos will use a shared credentials file when connecting to AWS
services. It's also possible to set them explicitly at the component level,
allowing you to transfer data across accounts. You can find out more
[in this document](/docs/guides/cloud/aws).
### Batching
It's common to want to upload messages to S3 as batched archives, the easiest
way to do this is to batch your messages at the output level and join the batch
of messages with an
` + "[`archive`](/docs/components/processors/archive)" + ` and/or
` + "[`compress`](/docs/components/processors/compress)" + ` processor.
For example, if we wished to upload messages as a .tar.gz archive of documents
we could achieve that with the following config:
` + "```yaml" + `
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.tar.gz
batching:
count: 100
period: 10s
processors:
- archive:
format: tar
- compress:
algorithm: gzip
` + "```" + `
Alternatively, if we wished to upload JSON documents as a single large document
containing an array of objects we can do that with:
` + "```yaml" + `
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.json
batching:
count: 100
processors:
- archive:
format: json_array
` + "```" + ``,
Async: true,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("bucket", "The bucket to upload messages to."),
docs.FieldCommon(
"path", "The path of each message to upload.",
`${!count("files")}-${!timestamp_unix_nano()}.txt`,
`${!meta("kafka_key")}.json`,
`${!json("doc.namespace")}/${!json("doc.id")}.json`,
).IsInterpolated(),
docs.FieldString(
"tags", "Key/value pairs to store with the object as tags.",
map[string]string{
"Key1": "Value1",
"Timestamp": `${!meta("Timestamp")}`,
},
).IsInterpolated().Map(),
docs.FieldCommon("content_type", "The content type to set for each object.").IsInterpolated(),
docs.FieldAdvanced("content_encoding", "An optional content encoding to set for each object.").IsInterpolated(),
docs.FieldString("cache_control", "The cache control to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("content_disposition", "The content disposition to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("content_language", "The content language to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("website_redirect_location", "The website redirect location to set for each object.").Advanced().IsInterpolated(),
docs.FieldCommon("metadata", "Specify criteria for which metadata values are attached to objects as headers.").WithChildren(metadata.ExcludeFilterFields()...),
docs.FieldAdvanced("storage_class", "The storage class to set for each object.").HasOptions(
"STANDARD", "REDUCED_REDUNDANCY", "GLACIER", "STANDARD_IA", "ONEZONE_IA", "INTELLIGENT_TIERING", "DEEP_ARCHIVE",
).IsInterpolated(),
docs.FieldAdvanced("kms_key_id", "An optional server side encryption key."),
docs.FieldAdvanced("server_side_encryption", "An optional server side encryption algorithm.").AtVersion("3.63.0"),
docs.FieldAdvanced("force_path_style_urls", "Forces the client API to use path style URLs, which helps when connecting to custom endpoints."),
docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."),
docs.FieldAdvanced("timeout", "The maximum period to wait on an upload before abandoning it and reattempting."),
policy.FieldSpec(),
}.Merge(session.FieldSpecs()),
Categories: []Category{
CategoryServices,
CategoryAWS,
},
}
}
//------------------------------------------------------------------------------
func newAmazonS3(name string, conf writer.AmazonS3Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (output.Streamed, error) {
sthree, err := writer.NewAmazonS3V2(conf, mgr, log, stats)
if err != nil {
return nil, err
}
w, err := NewAsyncWriter(name, conf.MaxInFlight, sthree, log, stats)
if err != nil {
return nil, err
}
return NewBatcherFromConfig(conf.Batching, w, mgr, log, stats)
} | internal/old/output/aws_s3.go | 0.767516 | 0.670967 | aws_s3.go | starcoder |
package tui
import (
"image"
)
// Surface defines a surface that can be painted on.
type Surface interface {
SetCell(x, y int, ch rune, s Style)
SetCursor(x, y int)
Begin()
End()
Size() image.Point
}
// Painter provides operations to paint on a surface.
type Painter struct {
Theme *Theme
// Surface to paint on.
surface Surface
// Current brush.
style Style
// Transform stack
transforms []image.Point
mask image.Rectangle
}
// NewPainter returns a new instance of Painter.
func NewPainter(s Surface, p *Theme) *Painter {
return &Painter{
Theme: p,
surface: s,
style: p.Style("normal"),
}
}
// Translate pushes a new translation transform to the stack.
func (p *Painter) Translate(x, y int) {
p.transforms = append(p.transforms, image.Point{x, y})
}
// Restore pops the latest transform from the stack.
func (p *Painter) Restore() {
if len(p.transforms) > 0 {
p.transforms = p.transforms[:len(p.transforms)-1]
}
}
// Begin prepares the surface for painting.
func (p *Painter) Begin() {
p.surface.Begin()
}
// End finalizes any painting that has been made.
func (p *Painter) End() {
p.surface.End()
}
// Repaint clears the surface, draws the scene and flushes it.
func (p *Painter) Repaint(w Widget) {
p.Begin()
w.Resize(p.surface.Size())
w.Draw(p)
p.End()
}
func (p *Painter) DrawCursor(x, y int) {
wp := p.mapLocalToWorld(image.Point{x, y})
p.surface.SetCursor(wp.X, wp.Y)
}
// DrawRune paints a rune at the given coordinate.
func (p *Painter) DrawRune(x, y int, r rune) {
// If a mask is set, only draw if the mask contains the coordinate.
if p.mask != image.ZR {
if (x < p.mask.Min.X) || (x > p.mask.Max.X) ||
(y < p.mask.Min.Y) || (y > p.mask.Max.Y) {
return
}
}
wp := p.mapLocalToWorld(image.Point{x, y})
p.surface.SetCell(wp.X, wp.Y, r, p.style)
}
// DrawText paints a string starting at the given coordinate.
func (p *Painter) DrawText(x, y int, text string) {
for _, r := range text {
p.DrawRune(x, y, r)
x += runeWidth(r)
}
}
func (p *Painter) DrawHorizontalLine(x1, x2, y int) {
for x := x1; x < x2; x++ {
p.DrawRune(x, y, '─')
}
}
func (p *Painter) DrawVerticalLine(x, y1, y2 int) {
for y := y1; y < y2; y++ {
p.DrawRune(x, y, '│')
}
}
// DrawRect paints a rectangle.
func (p *Painter) DrawRect(x, y, w, h int) {
for j := 0; j < h; j++ {
for i := 0; i < w; i++ {
m := i + x
n := j + y
switch {
case i == 0 && j == 0:
p.DrawRune(m, n, '┌')
case i == w-1 && j == 0:
p.DrawRune(m, n, '┐')
case i == 0 && j == h-1:
p.DrawRune(m, n, '└')
case i == w-1 && j == h-1:
p.DrawRune(m, n, '┘')
case i == 0 || i == w-1:
p.DrawRune(m, n, '│')
case j == 0 || j == h-1:
p.DrawRune(m, n, '─')
}
}
}
}
func (p *Painter) FillRect(x, y, w, h int) {
for j := 0; j < h; j++ {
for i := 0; i < w; i++ {
p.DrawRune(i+x, j+y, ' ')
}
}
}
func (p *Painter) SetStyle(s Style) {
p.style = s
}
func (p *Painter) RestoreStyle() {
p.SetStyle(p.Theme.Style("normal"))
}
func (p *Painter) WithStyle(n string, fn func(*Painter)) {
p.SetStyle(p.Theme.Style(n))
fn(p)
p.RestoreStyle()
}
func (p *Painter) WithMask(r image.Rectangle) *Painter {
p.mask = r
return p
}
func (p *Painter) mapLocalToWorld(point image.Point) image.Point {
var offset image.Point
for _, s := range p.transforms {
offset = offset.Add(s)
}
return point.Add(offset)
} | vendor/github.com/marcusolsson/tui-go/painter.go | 0.767254 | 0.459137 | painter.go | starcoder |
// Package errors provides a simple API to create and compare errors.
// It captures the stacktrace when an error is created or wrapped, which can be then be inspected for debugging purposes.
// This package, compiled with the "debug" build tag is only meant to ease development and should not be used otherwise.
package errors
import (
baseErrors "errors"
"runtime"
"github.com/genjidb/genji/internal/stringutil"
)
// New takes a string and returns a wrapped error that allows to inspect the stracktrace
// captured when this function was called.
func New(s string) error {
err := _new(s)
if len(err.stack) > 1 {
// Truncate the call to _new
err.stack = err.stack[1:]
}
return err
}
// Errorf creates an error that includes the stracktrace, out of a string. If %w is used to format an error, it will
// only wrap it by concatenation, the wrapped error won't be accessible directly and
// thus cannot be accessed through the Is or As functions from the standard error package.
func Errorf(format string, a ...interface{}) error {
return errorf(format, a...)
}
// Is performs a value comparison between err and the target, unwrapping them if necessary.
func Is(err, target error) bool {
if err == target {
return true
}
if e, ok := err.(*Error); ok {
if t, ok := target.(*Error); ok {
return e.Err == t.Err
} else {
return e.Err == target
}
}
if target, ok := target.(*Error); ok {
return err == target.Err
}
return false
}
// Unwrap returns the underlying error, or the error itself if err is not an *errors.Error.
func Unwrap(err error) error {
if err == nil {
return nil
}
if e, ok := err.(*Error); ok {
return e.Err
}
return err
}
// TODO
func Wrap(e error) error {
if e == nil {
return nil
}
return wrap(e, 1)
}
// The maximum number of stackframes on any error.
var MaxStackDepth = 32
func _new(s string) *Error {
err := baseErrors.New(s)
return wrap(err, 1)
}
// wrap makes an Error from the given value. If that value is already an
// error then it will be used directly, if not, it will be passed to
// stringutil.Errorf("%v"). The skip parameter indicates how far up the stack
// to start the stacktrace. 0 is from the current call, 1 from its caller, etc.
func wrap(e interface{}, skip int) *Error {
if e == nil {
return nil
}
var err error
switch e := e.(type) {
case *Error:
return e
case error:
err = e
default:
err = stringutil.Errorf("%v", e)
}
stack := make([]uintptr, MaxStackDepth)
length := runtime.Callers(2+skip, stack[:])
return &Error{
Err: err,
stack: stack[:length],
}
}
func errorf(format string, a ...interface{}) *Error {
return wrap(stringutil.Errorf(format, a...), 1)
} | internal/errors/errors_debug.go | 0.660172 | 0.437223 | errors_debug.go | starcoder |
package output
import (
"github.com/Jeffail/benthos/v3/internal/component/output"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/output/writer"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/util/tls"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeAMQP09] = TypeSpec{
constructor: fromSimpleConstructor(NewAMQP09),
Summary: `
Sends messages to an AMQP (0.91) exchange. AMQP is a messaging protocol used by
various message brokers, including RabbitMQ.`,
Description: `
The metadata from each message are delivered as headers.
It's possible for this output type to create the target exchange by setting
` + "`exchange_declare.enabled` to `true`" + `, if the exchange already exists
then the declaration passively verifies that the settings match.
TLS is automatic when connecting to an ` + "`amqps`" + ` URL, but custom
settings can be enabled in the ` + "`tls`" + ` section.
The fields 'key' and 'type' can be dynamically set using function interpolations described
[here](/docs/configuration/interpolation#bloblang-queries).`,
Async: true,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("url",
"A URL to connect to.",
"amqp://localhost:5672/",
"amqps://guest:guest@localhost:5672/",
),
docs.FieldCommon("exchange", "An AMQP exchange to publish to."),
docs.FieldAdvanced("exchange_declare", "Optionally declare the target exchange (passive).").WithChildren(
docs.FieldCommon("enabled", "Whether to declare the exchange."),
docs.FieldCommon("type", "The type of the exchange.").HasOptions(
"direct", "fanout", "topic", "x-custom",
),
docs.FieldCommon("durable", "Whether the exchange should be durable."),
),
docs.FieldCommon("key", "The binding key to set for each message.").IsInterpolated(),
docs.FieldCommon("type", "The type property to set for each message.").IsInterpolated(),
docs.FieldAdvanced("content_type", "The content type attribute to set for each message.").IsInterpolated(),
docs.FieldAdvanced("content_encoding", "The content encoding attribute to set for each message.").IsInterpolated(),
docs.FieldCommon("metadata", "Specify criteria for which metadata values are attached to messages as headers.").WithChildren(output.MetadataFields()...),
docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."),
docs.FieldAdvanced("persistent", "Whether message delivery should be persistent (transient by default)."),
docs.FieldAdvanced("mandatory", "Whether to set the mandatory flag on published messages. When set if a published message is routed to zero queues it is returned."),
docs.FieldAdvanced("immediate", "Whether to set the immediate flag on published messages. When set if there are no ready consumers of a queue then the message is dropped instead of waiting."),
tls.FieldSpec(),
},
Categories: []Category{
CategoryServices,
},
}
}
//------------------------------------------------------------------------------
// NewAMQP09 creates a new AMQP output type.
func NewAMQP09(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
a, err := writer.NewAMQP(conf.AMQP09, log, stats)
if err != nil {
return nil, err
}
w, err := NewAsyncWriter(
TypeAMQP09, conf.AMQP09.MaxInFlight, a, log, stats,
)
if err != nil {
return nil, err
}
return OnlySinglePayloads(w), nil
}
//------------------------------------------------------------------------------ | lib/output/amqp_0_9.go | 0.740174 | 0.47098 | amqp_0_9.go | starcoder |
* Implementation of the Secure Hashing Algorithm (SHA-256)
*
* Generates a 256 bit message digest. It should be impossible to come
* come up with two messages that hash to the same value ("collision free").
*
* For use with byte-oriented messages only.
*/
package amcl
const SHA256 int = 32
const hash256_H0 uint32 = 0x6A09E667
const hash256_H1 uint32 = 0xBB67AE85
const hash256_H2 uint32 = 0x3C6EF372
const hash256_H3 uint32 = 0xA54FF53A
const hash256_H4 uint32 = 0x510E527F
const hash256_H5 uint32 = 0x9B05688C
const hash256_H6 uint32 = 0x1F83D9AB
const hash256_H7 uint32 = 0x5BE0CD19
var hash256_K = [...]uint32{
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2}
type HASH256 struct {
length [2]uint32
h [8]uint32
w [64]uint32
}
/* functions */
func hash256_S(n uint32, x uint32) uint32 {
return (((x) >> n) | ((x) << (32 - n)))
}
func hash256_R(n uint32, x uint32) uint32 {
return ((x) >> n)
}
func hash256_Ch(x, y, z uint32) uint32 {
return ((x & y) ^ (^(x) & z))
}
func hash256_Maj(x, y, z uint32) uint32 {
return ((x & y) ^ (x & z) ^ (y & z))
}
func hash256_Sig0(x uint32) uint32 {
return (hash256_S(2, x) ^ hash256_S(13, x) ^ hash256_S(22, x))
}
func hash256_Sig1(x uint32) uint32 {
return (hash256_S(6, x) ^ hash256_S(11, x) ^ hash256_S(25, x))
}
func hash256_theta0(x uint32) uint32 {
return (hash256_S(7, x) ^ hash256_S(18, x) ^ hash256_R(3, x))
}
func hash256_theta1(x uint32) uint32 {
return (hash256_S(17, x) ^ hash256_S(19, x) ^ hash256_R(10, x))
}
func (H *HASH256) transform() { /* basic transformation step */
for j := 16; j < 64; j++ {
H.w[j] = hash256_theta1(H.w[j-2]) + H.w[j-7] + hash256_theta0(H.w[j-15]) + H.w[j-16]
}
a := H.h[0]
b := H.h[1]
c := H.h[2]
d := H.h[3]
e := H.h[4]
f := H.h[5]
g := H.h[6]
hh := H.h[7]
for j := 0; j < 64; j++ { /* 64 times - mush it up */
t1 := hh + hash256_Sig1(e) + hash256_Ch(e, f, g) + hash256_K[j] + H.w[j]
t2 := hash256_Sig0(a) + hash256_Maj(a, b, c)
hh = g
g = f
f = e
e = d + t1
d = c
c = b
b = a
a = t1 + t2
}
H.h[0] += a
H.h[1] += b
H.h[2] += c
H.h[3] += d
H.h[4] += e
H.h[5] += f
H.h[6] += g
H.h[7] += hh
}
/* Initialise Hash function */
func (H *HASH256) Init() { /* initialise */
for i := 0; i < 64; i++ {
H.w[i] = 0
}
H.length[0] = 0
H.length[1] = 0
H.h[0] = hash256_H0
H.h[1] = hash256_H1
H.h[2] = hash256_H2
H.h[3] = hash256_H3
H.h[4] = hash256_H4
H.h[5] = hash256_H5
H.h[6] = hash256_H6
H.h[7] = hash256_H7
}
func NewHASH256() *HASH256 {
H := new(HASH256)
H.Init()
return H
}
/* process a single byte */
func (H *HASH256) Process(byt byte) { /* process the next message byte */
cnt := (H.length[0] / 32) % 16
H.w[cnt] <<= 8
H.w[cnt] |= uint32(byt & 0xFF)
H.length[0] += 8
if H.length[0] == 0 {
H.length[1]++
H.length[0] = 0
}
if (H.length[0] % 512) == 0 {
H.transform()
}
}
/* process an array of bytes */
func (H *HASH256) Process_array(b []byte) {
for i := 0; i < len(b); i++ {
H.Process((b[i]))
}
}
/* process a 32-bit integer */
func (H *HASH256) Process_num(n int32) {
H.Process(byte((n >> 24) & 0xff))
H.Process(byte((n >> 16) & 0xff))
H.Process(byte((n >> 8) & 0xff))
H.Process(byte(n & 0xff))
}
/* Generate 32-byte Hash */
func (H *HASH256) Hash() []byte { /* pad message and finish - supply digest */
var digest [32]byte
len0 := H.length[0]
len1 := H.length[1]
H.Process(0x80)
for (H.length[0] % 512) != 448 {
H.Process(0)
}
H.w[14] = len1
H.w[15] = len0
H.transform()
for i := 0; i < 32; i++ { /* convert to bytes */
digest[i] = byte((H.h[i/4] >> uint(8*(3-i%4))) & 0xff)
}
H.Init()
return digest[0:32]
}
/* test program: should produce digest */
//248d6a61 d20638b8 e5c02693 0c3e6039 a33ce459 64ff2167 f6ecedd4 19db06c1
/*
func main() {
test := []byte("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")
sh:=NewHASH256()
for i:=0;i<len(test);i++ {
sh.Process(test[i])
}
digest:=sh.Hash()
for i:=0;i<32;i++ {fmt.Printf("%02x",digest[i])}
} */ | exchanger/vendor/github.com/hyperledger/fabric-amcl/amcl/HASH256.go | 0.629775 | 0.484075 | HASH256.go | starcoder |
package iterator
import (
"github.com/syndtr/goleveldb/leveldb/util"
)
// BasicArray is the interface that wraps basic Len and Search method.
type BasicArray interface {
// Len returns length of the array.
Len() int
// Search finds smallest index that point to a key that is greater
// than or equal to the given key.
Search(key []byte) int
}
// Array is the interface that wraps BasicArray and basic Index method.
type Array interface {
BasicArray
// Index returns key/value pair with index of i.
Index(i int) (key, value []byte)
}
// Array is the interface that wraps BasicArray and basic Get method.
type ArrayIndexer interface {
BasicArray
// Get returns a new data iterator with index of i.
Get(i int) Iterator
}
type basicArrayIterator struct {
util.BasicReleaser
array BasicArray
pos int
err error
}
func (i *basicArrayIterator) Valid() bool {
return i.pos >= 0 && i.pos < i.array.Len() && !i.Released()
}
func (i *basicArrayIterator) First() bool {
if i.Released() {
i.err = ErrIterReleased
return false
}
if i.array.Len() == 0 {
i.pos = -1
return false
}
i.pos = 0
return true
}
func (i *basicArrayIterator) Last() bool {
if i.Released() {
i.err = ErrIterReleased
return false
}
n := i.array.Len()
if n == 0 {
i.pos = 0
return false
}
i.pos = n - 1
return true
}
func (i *basicArrayIterator) Seek(key []byte) bool {
if i.Released() {
i.err = ErrIterReleased
return false
}
n := i.array.Len()
if n == 0 {
i.pos = 0
return false
}
i.pos = i.array.Search(key)
if i.pos >= n {
return false
}
return true
}
func (i *basicArrayIterator) Next() bool {
if i.Released() {
i.err = ErrIterReleased
return false
}
i.pos++
if n := i.array.Len(); i.pos >= n {
i.pos = n
return false
}
return true
}
func (i *basicArrayIterator) Prev() bool {
if i.Released() {
i.err = ErrIterReleased
return false
}
i.pos--
if i.pos < 0 {
i.pos = -1
return false
}
return true
}
func (i *basicArrayIterator) Error() error { return i.err }
type arrayIterator struct {
basicArrayIterator
array Array
pos int
key, value []byte
}
func (i *arrayIterator) updateKV() {
if i.pos == i.basicArrayIterator.pos {
return
}
i.pos = i.basicArrayIterator.pos
if i.Valid() {
i.key, i.value = i.array.Index(i.pos)
} else {
i.key = nil
i.value = nil
}
}
func (i *arrayIterator) Key() []byte {
i.updateKV()
return i.key
}
func (i *arrayIterator) Value() []byte {
i.updateKV()
return i.value
}
type arrayIteratorIndexer struct {
basicArrayIterator
array ArrayIndexer
}
func (i *arrayIteratorIndexer) Get() Iterator {
if i.Valid() {
return i.array.Get(i.basicArrayIterator.pos)
}
return nil
}
// NewArrayIterator returns an iterator from the given array.
func NewArrayIterator(array Array) Iterator {
return &arrayIterator{
basicArrayIterator: basicArrayIterator{array: array, pos: -1},
array: array,
pos: -1,
}
}
// NewArrayIndexer returns an index iterator from the given array.
func NewArrayIndexer(array ArrayIndexer) IteratorIndexer {
return &arrayIteratorIndexer{
basicArrayIterator: basicArrayIterator{array: array, pos: -1},
array: array,
}
} | vendor/github.com/syndtr/goleveldb/leveldb/iterator/array_iter.go | 0.716516 | 0.472927 | array_iter.go | starcoder |
Cylinder Pattern and Core Box
*/
//-----------------------------------------------------------------------------
package main
import (
"math"
"github.com/deadsy/sdfx/obj"
"github.com/deadsy/sdfx/sdf"
)
//-----------------------------------------------------------------------------
const cylinderBaseOffset = 3.0 / 16.0
const cylinderBaseThickness = 0.25
const cylinderWaistLength = 0.75
const cylinderBodyLength = 1.75
const cylinderCoreLength = 4.0 + (7.0 / 16.0)
const cylinderInnerRadius = 1.0 * 0.5
const cylinderWaistRadius = 1.5 * 0.5
const cylinderBodyRadius = 2.0 * 0.5
func cylinderCoreBox() sdf.SDF3 {
return nil
}
func cylinderBase() sdf.SDF3 {
const draft = 3.0
const x = cylinderBodyRadius * 2.0
const y = cylinderBaseThickness * 2.0
const z = cylinderBodyRadius
const round = 0.125
k := obj.TruncRectPyramidParms{
Size: sdf.V3{x, y, z},
BaseAngle: sdf.DtoR(90 - draft),
BaseRadius: round,
RoundRadius: round * 1.5,
}
base0, _ := obj.TruncRectPyramid3D(&k)
base1 := sdf.Transform3D(base0, sdf.MirrorXY())
base := sdf.Union3D(base0, base1)
base = sdf.Cut3D(base, sdf.V3{0, 0, 0}, sdf.V3{0, 1, 0})
return sdf.Transform3D(base, sdf.RotateX(sdf.DtoR(90)))
}
func cylinderPattern(core, split bool) sdf.SDF3 {
draft := math.Tan(sdf.DtoR(3.0))
const smooth0 = 0.125
const smooth1 = smooth0 * 0.5
const smoothN = 5
const l0 = cylinderBaseOffset + cylinderBaseThickness + cylinderWaistLength
const l1 = cylinderBodyLength
const l2 = cylinderCoreLength
const r0 = cylinderInnerRadius
const r1 = cylinderWaistRadius
const r2 = cylinderBodyRadius
// cylinder body
p := sdf.NewPolygon()
p.Add(0, 0)
p.Add(r1, draft*r1).Rel().Smooth(smooth1, smoothN)
p.Add(0, l0).Rel().Smooth(smooth0, smoothN)
p.Add(r2-r1, draft*(r2-r1)).Rel().Smooth(smooth0, smoothN)
p.Add(0, l1).Rel().Smooth(smooth1, smoothN)
p.Add(-r2, draft*r2).Rel()
body := sdf.Revolve3D(sdf.Polygon2D(p.Vertices()))
// cylinder base
base := cylinderBase()
base = sdf.Transform3D(base, sdf.Translate3d(sdf.V3{0, 0, cylinderBaseOffset}))
// add the base to the body pattern
body = sdf.Union3D(body, base)
// core print
p = sdf.NewPolygon()
p.Add(0, -0.75)
p.Add(r0, draft*r0).Rel().Smooth(smooth1, smoothN)
p.Add(0, l2).Rel().Smooth(smooth1, smoothN)
p.Add(-r0, draft*r0).Rel()
corePrint := sdf.Revolve3D(sdf.Polygon2D(p.Vertices()))
var cylinder sdf.SDF3
if core {
cylinder = sdf.Union3D(body, corePrint)
} else {
cylinder = sdf.Difference3D(body, corePrint)
}
if split {
cylinder = sdf.Cut3D(cylinder, sdf.V3{0, 0, 0}, sdf.V3{0, 1, 0})
}
return cylinder
}
//----------------------------------------------------------------------------- | examples/midget/cylinder.go | 0.691393 | 0.40028 | cylinder.go | starcoder |
package element
import (
"github.com/bhollier/ui/pkg/ui/util"
"github.com/faiface/pixel"
)
// Function to initialise an image
// as if it were an SVG icon. Should
// be called if ImageIsSVG is true
func initIcon(e Element, i Image) error {
// If the SVG hasn't been loaded yet
if i.GetSVG() == nil {
// Load the svg
svg, err := util.LoadSVG(
e.GetFS(), i.GetField(), i.GetColor())
if err != nil {
return err
}
// Set the SVG
i.SetSVG(svg)
}
// Get the scale
scale := i.GetScale()
if scale == util.ZeroScaleOption {
if i.GetField()[0] == '#' {
scale = util.Stretch
} else {
scale = util.DefaultScaleOption
}
}
// If the svg has been loaded
if i.GetSVG() != nil {
// Convert the SVG's view box to a pixel rect
viewbox := pixel.R(i.GetSVG().ViewBox.X, i.GetSVG().ViewBox.Y,
i.GetSVG().ViewBox.W, i.GetSVG().ViewBox.H)
// If the view box's size is 0, 0
if viewbox.Size() == pixel.ZV {
// Change it to a 16x16 box
viewbox = pixel.R(
0, 0, 16, 16)
}
scale := i.GetScale()
if scale == util.ZeroScaleOption {
scale = util.DefaultScaleOption
}
// If the element's width isn't known
// and the width should be the content
if e.GetActualWidth() == nil && e.GetRelWidth().MatchContent {
switch scale {
case util.ScaleToFill:
fallthrough
case util.ScaleToFit:
fallthrough
case util.Stretch:
// If the height is knowable
if !e.GetRelHeight().MatchContent {
// If the height is known
if e.GetActualHeight() != nil {
// Calculate the scale factor of the height
scale := *e.GetActualHeight() / viewbox.Size().Y
// Set the width as the image's
// width multiplied by the scale factor
newWidth := viewbox.Size().X * scale
e.SetActualWidth(&newWidth)
}
} else {
// If it isn't knowable, just set the
// width as the width of the image
newWidth := viewbox.Size().X
e.SetActualWidth(&newWidth)
}
default:
// Set the actual width as the size of the image
newWidth := viewbox.Size().X
e.SetActualWidth(&newWidth)
}
}
// If the element's height isn't known
// and the height should be the content
if e.GetActualHeight() == nil && e.GetRelHeight().MatchContent {
switch scale {
case util.ScaleToFill:
fallthrough
case util.ScaleToFit:
fallthrough
case util.Stretch:
// If the width is knowable
if !e.GetRelWidth().MatchContent {
// If the width is known
if e.GetActualWidth() != nil {
// Calculate the scale factor of the width
scale := *e.GetActualWidth() / viewbox.Size().X
// Set the height as the image's
// height multiplied by the scale factor
newHeight := viewbox.Size().Y * scale
e.SetActualHeight(&newHeight)
}
} else {
// If it isn't knowable, just set the
// height as the height of the image
newHeight := viewbox.Size().Y
e.SetActualWidth(&newHeight)
}
default:
// Set the actual height as the size of the image
newHeight := viewbox.Size().Y
e.SetActualWidth(&newHeight)
}
}
// If the image hasn't been created
// and the width and height are known
if i.GetSprite() == nil &&
e.GetActualWidth() != nil &&
e.GetActualHeight() != nil {
// Create a picture from the SVG
pic := util.CreatePictureFromSVG(i.GetSVG(), scale,
*e.GetActualWidth(), *e.GetActualHeight())
// Create a sprite with the picture and set it
i.SetSprite(pixel.NewSprite(pic, pic.Bounds()))
}
}
return nil
} | pkg/ui/element/icon.go | 0.514888 | 0.423279 | icon.go | starcoder |
package runtime
import (
"math"
)
func Unm(x Value) (Value, bool) {
switch x.iface.(type) {
case int64:
return IntValue(-x.AsInt()), true
case float64:
return FloatValue(-x.AsFloat()), true
default:
return NilValue, false
}
}
func Add(x, y Value) (Value, bool) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
return IntValue(x.AsInt() + y.AsInt()), true
case float64:
return FloatValue(float64(x.AsInt()) + y.AsFloat()), true
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(x.AsFloat() + float64(y.AsInt())), true
case float64:
return FloatValue(x.AsFloat() + y.AsFloat()), true
}
}
return NilValue, false
}
func Sub(x, y Value) (Value, bool) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
return IntValue(x.AsInt() - y.AsInt()), true
case float64:
return FloatValue(float64(x.AsInt()) - y.AsFloat()), true
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(x.AsFloat() - float64(y.AsInt())), true
case float64:
return FloatValue(x.AsFloat() - y.AsFloat()), true
}
}
return NilValue, false
}
func Mul(x, y Value) (Value, bool) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
return IntValue(x.AsInt() * y.AsInt()), true
case float64:
return FloatValue(float64(x.AsInt()) * y.AsFloat()), true
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(x.AsFloat() * float64(y.AsInt())), true
case float64:
return FloatValue(x.AsFloat() * y.AsFloat()), true
}
}
return NilValue, false
}
func Div(x, y Value) (Value, bool) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
return FloatValue(float64(x.AsInt()) / float64(y.AsInt())), true
case float64:
return FloatValue(float64(x.AsInt()) / y.AsFloat()), true
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(x.AsFloat() / float64(y.AsInt())), true
case float64:
return FloatValue(x.AsFloat() / y.AsFloat()), true
}
}
return NilValue, false
}
func floordivInt(x, y int64) int64 {
r := x % y
q := x / y
if r != 0 && (r < 0) != (y < 0) {
q--
}
return q
}
func floordivFloat(x, y float64) float64 {
return math.Floor(x / y)
}
func Idiv(x Value, y Value) (Value, bool, *Error) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
ny := y.AsInt()
if ny == 0 {
return NilValue, true, NewErrorS("attempt to divide by zero")
}
return IntValue(floordivInt(x.AsInt(), ny)), true, nil
case float64:
return FloatValue(floordivFloat(float64(x.AsInt()), y.AsFloat())), true, nil
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(floordivFloat(x.AsFloat(), float64(y.AsInt()))), true, nil
case float64:
return FloatValue(floordivFloat(x.AsFloat(), y.AsFloat())), true, nil
}
}
return NilValue, false, nil
}
func modInt(x, y int64) int64 {
r := x % y
if r != 0 && (r < 0) != (y < 0) {
r += y
}
return r
}
func modFloat(x, y float64) float64 {
r := math.Mod(x, y)
if r != 0 && (r < 0) != (y < 0) {
r += y
}
return r
}
// Mod returns x % y.
func Mod(x Value, y Value) (Value, bool, *Error) {
switch x.iface.(type) {
case int64:
switch y.iface.(type) {
case int64:
ny := y.AsInt()
if ny == 0 {
return NilValue, true, NewErrorS("attempt to perform 'n%0'")
}
return IntValue(modInt(x.AsInt(), ny)), true, nil
case float64:
return FloatValue(modFloat(float64(x.AsInt()), y.AsFloat())), true, nil
}
case float64:
switch y.iface.(type) {
case int64:
return FloatValue(modFloat(x.AsFloat(), float64(y.AsInt()))), true, nil
case float64:
return FloatValue(modFloat(x.AsFloat(), y.AsFloat())), true, nil
}
}
return NilValue, false, nil
}
func powFloat(x, y float64) float64 {
return math.Pow(x, y)
}
func Pow(x, y Value) (Value, bool) {
var fx, fy float64
switch x.iface.(type) {
case int64:
fx = float64(x.AsInt())
case float64:
fx = x.AsFloat()
default:
return NilValue, false
}
switch y.iface.(type) {
case int64:
fy = float64(y.AsInt())
case float64:
fy = y.AsFloat()
default:
return NilValue, false
}
return FloatValue(powFloat(fx, fy)), true
}
func BinaryArithFallback(t *Thread, op string, x, y Value) (Value, *Error) {
res, err, ok := metabin(t, op, x, y)
if ok {
return res, err
}
return NilValue, BinaryArithmeticError(op[2:], x, y)
}
func BinaryArithmeticError(op string, x, y Value) *Error {
var wrongVal Value
switch {
case numberType(y) != NaN:
wrongVal = x
case numberType(x) != NaN:
wrongVal = y
default:
return NewErrorF("attempt to %s a '%s' with a '%s'", op, x.CustomTypeName(), y.CustomTypeName())
}
return NewErrorF("attempt to perform arithmetic on a %s value", wrongVal.CustomTypeName())
}
func UnaryArithFallback(t *Thread, op string, x Value) (Value, *Error) {
res, err, ok := metaun(t, op, x)
if ok {
return res, err
}
return NilValue, UnaryArithmeticError(op[2:], x)
}
func UnaryArithmeticError(op string, x Value) *Error {
return NewErrorF("attempt to %s a '%s'", op, x.CustomTypeName())
} | runtime/arith.go | 0.713831 | 0.571169 | arith.go | starcoder |
package hashtree
import (
"crypto/sha256"
"hash"
)
const (
//LeafBlockSize is the max size in bytes
//of a data block on the leaf of a hash tree.
LeafBlockSize = 1024
)
// fileDigest represents the partial evaluation of a file hash.
type fileDigest struct {
len int64 // processed length
leaf hash.Hash // a hash, used for hashing leaf nodes
leafBlockSize int64 // size of base block in bytes
tree CopyableHashTree // the digest used for inner and root nodes
}
type fileDigestSample struct {
HashTree
}
//I is a sample of the standared HashTree, to make some methods accessable
//without creating a new HashTree. Do not use it's write or sum functions.
var I = fileDigestSample{NewFile()}
// Create the standard file tree hash using leaf blocks of LeafBlockSize (1kB)
// and "crypto/sha256", and inner hash using sha256 (244's IHV) without padding.
func NewFile() HashTree {
return NewFile2(LeafBlockSize, sha256.New(), NewTree2(NoPad32bytes, ht_sha256block))
}
// Create any tree hash using leaf blocks of size and leaf hash,
// and inner hash using tree hash, the tree stucture is internal to the tree hash.
func NewFile2(leafBlockSize int64, leaf hash.Hash, tree CopyableHashTree) HashTree {
d := new(fileDigest)
d.len = 0
d.leafBlockSize = leafBlockSize
d.leaf = leaf
d.tree = tree
return d
}
func (d *fileDigest) Nodes(len int64) Nodes {
if len == 0 {
return 1
}
return Nodes((len-1)/d.leafBlockSize) + 1
}
func (d *fileDigest) SetInnerHashListener(l func(level Level, index Nodes, hash, left, right *H256)) {
d.tree.SetInnerHashListener(l)
}
func (d *fileDigest) Size() int { return d.tree.Size() }
func (d *fileDigest) BlockSize() int { return int(d.leafBlockSize) }
func (d *fileDigest) BlockSizeBytes() int64 { return d.leafBlockSize }
func (d *fileDigest) Reset() {
d.tree.Reset()
d.leaf.Reset()
d.len = 0
}
func (d *fileDigestSample) Write(p []byte) (int, error) {
panic("the sample can not be writen to")
}
func (d *fileDigest) Write(p []byte) (int, error) {
startLength := int64(len(p))
xn := d.len % d.leafBlockSize
for int64(len(p))+xn >= d.leafBlockSize {
writeLength := d.leafBlockSize - xn
d.leaf.Write(p[0:writeLength])
p = p[writeLength:]
d.tree.Write(d.leaf.Sum(nil))
d.leaf.Reset()
xn = 0
}
if len(p) > 0 {
d.leaf.Write(p)
}
d.len += startLength
return int(startLength), nil
}
func (d *fileDigestSample) Sum(in []byte) []byte {
panic("the sample can not be summed")
}
func (d *fileDigest) Sum(in []byte) []byte {
if d.len%d.leafBlockSize != 0 || d.len == 0 {
// Make a copy of d.tree so that caller can keep writing and summing.
tree := d.tree.Copy()
tree.Write(d.leaf.Sum(nil))
return tree.Sum(in)
}
return d.tree.Sum(in)
} | hashtree/file_processor.go | 0.647464 | 0.472866 | file_processor.go | starcoder |
package Euler2D
import (
"fmt"
"math"
"strings"
"github.com/notargets/gocfd/DG2D"
"github.com/notargets/gocfd/utils"
)
type SolutionLimiter struct {
limiterType LimiterType
Element *DG2D.LagrangeElement2D
Tris *DG2D.Triangulation
Partitions *PartitionMap
ShockFinder []*ModeAliasShockFinder // Sharded
UElement, dUdr, dUds []utils.Matrix // Sharded scratch areas for assembly and testing of solution values
FS *FreeStream
}
type LimiterType uint8
const (
None LimiterType = iota
BarthJesperson
PerssonC0
)
var (
LimiterNames = map[string]LimiterType{
"barthjesperson": BarthJesperson,
"<NAME>person": BarthJesperson,
"perssonC0": PerssonC0,
"<NAME>": PerssonC0,
}
LimiterNamesRev = map[LimiterType]string{
BarthJesperson: "BarthJesperson",
PerssonC0: "Persson, C0 viscosity",
}
)
func (lt LimiterType) Print() (txt string) {
if val, ok := LimiterNamesRev[lt]; !ok {
txt = "None"
} else {
txt = val
}
return
}
func NewLimiterType(label string) (lt LimiterType) {
var (
ok bool
err error
)
if len(label) == 0 {
return None
}
label = strings.ToLower(strings.TrimSpace(label))
if lt, ok = LimiterNames[label]; !ok {
err = fmt.Errorf("unable to use limiter named [%s]", label)
panic(err)
}
return
}
func NewSolutionLimiter(t LimiterType, kappa float64, dfr *DG2D.DFR2D, pm *PartitionMap, fs *FreeStream) (bjl *SolutionLimiter) {
var (
Np = dfr.SolutionElement.Np
Nthreads = pm.ParallelDegree
)
bjl = &SolutionLimiter{
limiterType: t,
Element: dfr.SolutionElement,
Tris: dfr.Tris,
ShockFinder: make([]*ModeAliasShockFinder, Nthreads),
FS: fs,
Partitions: pm,
// Sharded working matrices
UElement: make([]utils.Matrix, Nthreads),
dUdr: make([]utils.Matrix, Nthreads),
dUds: make([]utils.Matrix, Nthreads),
}
for np := 0; np < Nthreads; np++ {
bjl.ShockFinder[np] = NewAliasShockFinder(dfr.SolutionElement, kappa)
bjl.UElement[np] = utils.NewMatrix(Np, 1)
bjl.dUdr[np] = utils.NewMatrix(Np, 1)
bjl.dUds[np] = utils.NewMatrix(Np, 1)
}
return
}
func (bjl *SolutionLimiter) LimitSolution(myThread int, Qall, Residual [][4]utils.Matrix) (points int) {
var (
Q = Qall[myThread]
Np, Kmax = Q[0].Dims()
UE = bjl.UElement[myThread]
//FSFar = bjl.FSFar
)
if bjl.limiterType == None {
return
}
for k := 0; k < Kmax; k++ {
for i := 0; i < Np; i++ {
ind := k + Kmax*i
UE.DataP[i] = Q[0].DataP[ind]
}
if bjl.ShockFinder[myThread].ElementHasShock(UE.DataP) { // Element has a shock
switch bjl.limiterType {
case BarthJesperson:
bjl.limitScalarFieldBarthJesperson(k, myThread, Qall)
}
for n := 0; n < 4; n++ {
for i := 0; i < Np; i++ {
ind := k + Kmax*i
Residual[myThread][n].DataP[ind] = 0.
}
}
points++
}
}
return
}
func (bjl *SolutionLimiter) limitScalarFieldBarthJesperson(k, myThread int, Qall [][4]utils.Matrix) {
var (
el = bjl.Element
Np, Kmax = Qall[myThread][0].Dims()
Dr, Ds = el.Dr, el.Ds
MMD = el.MassMatrix.DataP
UE = bjl.UElement[myThread]
dUdr, dUds = bjl.dUdr[myThread], bjl.dUds[myThread]
min, max = math.Min, math.Max
)
getElAvg := func(f utils.Matrix, kkk, kMx int) (ave float64) {
var massTotal float64
for i := 0; i < Np; i++ {
ind := kkk + kMx*i
mass := MMD[i+i*Np]
massTotal += mass
ave += mass * f.DataP[ind]
}
ave /= massTotal
return
}
psiCalc := func(corner int, Umin, Umax, Uave, dUdrAve, dUdsAve float64) (psi float64) {
var (
del2 float64
)
/*
For each corner of the unit triangle, the vector from center to corner is:
ri[0] = [ -2/3, -2/3 ]
ri[1] = [ 4/3, -2/3 ]
ri[2] = [ -2/3, 4/3 ]
*/
switch corner {
case 0:
del2 = -2. / 3. * (dUdrAve + dUdsAve)
case 1:
del2 = (4./3.)*dUdrAve - (2./3.)*dUdsAve
case 2:
del2 = -(2./3.)*dUdrAve + (4./3.)*dUdsAve
}
oodel2 := 1. / del2
// Calculate limiter function Psi
switch {
case del2 > 0:
psi = min(1, oodel2*(Umax-Uave))
case del2 == 0:
psi = 1
case del2 < 0:
psi = min(1, oodel2*(Umin-Uave))
}
return
}
for n := 0; n < 4; n++ {
var (
U = Qall[myThread][n]
Uave, Umin, Umax float64
)
// Apply limiting procedure
// Get average and min/max solution value for element and neighbors
Uave = getElAvg(U, k, Kmax)
Umin, Umax = Uave, Uave
// Loop over connected tris to get Umin, Umax
for ii := 0; ii < 3; ii++ {
kk := bjl.Tris.EtoE[k][ii]
if kk != -1 {
remoteK, remoteKmax, rThread := bjl.Partitions.GetLocalK(kk)
UU := getElAvg(Qall[rThread][n], remoteK, remoteKmax)
Umax = max(UU, Umax)
Umin = min(UU, Umin)
}
}
for i := 0; i < Np; i++ {
ind := k + Kmax*i
UE.DataP[i] = U.DataP[ind]
}
// Obtain average gradient of this cell
getAvgDeriv := func(deriv utils.Matrix) (derivAve float64) {
var (
massTotal float64
derivD = deriv.DataP
)
for i := 0; i < Np; i++ {
mass := MMD[i+i*Np]
massTotal += mass
derivAve += mass * derivD[i]
}
derivAve /= massTotal
return
}
dUdrAve, dUdsAve := getAvgDeriv(Dr.Mul(UE, dUdr)), getAvgDeriv(Ds.Mul(UE, dUds))
// Form psi as the minimum of all three corners
var psi float64
psi = 10000.
for nn := 0; nn < 3; nn++ {
psi = min(psi, psiCalc(nn, Umin, Umax, Uave, dUdrAve, dUdsAve))
}
// Limit the solution using psi and the average gradient
for i := 0; i < Np; i++ {
// Vector from node points to center of element
dR, dS := bjl.Element.R.DataP[i]-(-1./3), bjl.Element.S.DataP[i]-(-1./3.)
ind := k + Kmax*i
U.DataP[ind] = Uave + psi*(dR*dUdrAve+dS*dUdsAve)
}
}
}
type ModeAliasShockFinder struct {
Element *DG2D.LagrangeElement2D
Clipper utils.Matrix // Matrix used to clip the topmost mode from the solution polynomial, used in shockfinder
Np int
q, qalt utils.Matrix // scratch storage for evaluating the moment
Kappa float64
}
func NewAliasShockFinder(element *DG2D.LagrangeElement2D, Kappa float64) (sf *ModeAliasShockFinder) {
var (
Np = element.Np
)
sf = &ModeAliasShockFinder{
Element: element,
Np: Np,
q: utils.NewMatrix(Np, 1),
qalt: utils.NewMatrix(Np, 1),
Kappa: Kappa,
}
data := make([]float64, Np)
for i := 0; i < Np; i++ {
if i != Np-1 {
data[i] = 1.
} else {
data[i] = 0.
}
}
diag := utils.NewDiagMatrix(Np, data)
/*
The "Clipper" matrix drops the last mode from the polynomial and forms an alternative field of values at the node
points based on a polynomial with one less term. In other words, if we have a polynomial of degree "p", expressed
as values at Np node points, multiplying the Node point values vector by Clipper produces an alternative version
of the node values based on truncating the last polynomial mode.
*/
sf.Clipper = element.JB2D.V.Mul(diag).Mul(element.JB2D.Vinv)
return
}
func (sf *ModeAliasShockFinder) ElementHasShock(q []float64) (i bool) {
// Zhiqiang uses a threshold of sigma<0.99 to indicate "troubled cell"
if sf.ShockIndicator(q) < 0.99 {
i = true
}
return
}
func (sf *ModeAliasShockFinder) ShockIndicator(q []float64) (sigma float64) {
/*
Original method by Persson, constants chosen to match Zhiqiang, et. al.
*/
var (
Se = math.Log10(sf.moment(q))
k = float64(sf.Element.N)
kappa = sf.Kappa
//C0 = 3.
//S0 = -C0 * math.Log(k)
S0 = 4. / math.Pow(k, 4)
left, right = S0 - kappa, S0 + kappa
ookappa = 1. / kappa
)
switch {
case Se < left:
sigma = 1.
case Se >= left && Se <= right:
sigma = 0.5 * (1. - math.Sin(0.5*math.Pi*ookappa*(Se-S0)))
case Se > right:
sigma = 0.
}
return
}
func (sf *ModeAliasShockFinder) moment(q []float64) (m float64) {
var (
Np = sf.Np
U, UClipped = sf.q, sf.qalt
UD, UClippedD = U.DataP, UClipped.DataP
MD = sf.Element.MassMatrix.DataP
)
copy(sf.q.DataP, q)
/*
Evaluate the L2 moment of (q - qalt) over the element, where qalt is the truncated version of q
Here we don't bother using quadrature, we do a simple sum
*/
UClipped = sf.Clipper.Mul(U, UClipped)
var mNum, mDenom float64
for i := 0; i < Np; i++ {
mass := MD[i+i*Np]
t1 := UD[i] - UClippedD[i]
mNum += mass * (t1 * t1)
mDenom += mass * (UD[i] * UD[i])
}
m = mNum / mDenom
return
} | model_problems/Euler2D/filter.go | 0.569613 | 0.409575 | filter.go | starcoder |
package full
import (
"github.com/OpenWhiteBox/primitives/encoding"
"github.com/OpenWhiteBox/primitives/matrix"
)
// blockAffine is a modification of encoding.BlockAffine that allows non-bijective transformations.
type blockAffine struct {
linear matrix.Matrix
constant matrix.Row
}
func parseBlockAffine(in []byte) (*blockAffine, []byte) {
out := &blockAffine{linear: matrix.Matrix{}}
h, w := 8*int(in[0]), int(in[1])
in = in[2:]
for i := 0; i < h; i++ {
out.linear = append(out.linear, matrix.Row(in[:w]))
in = in[w:]
}
out.constant = matrix.Row(in[:(h / 8)])
in = in[(h / 8):]
return out, in
}
func (ba *blockAffine) compose(in *blockAffine) *blockAffine {
return &blockAffine{
linear: ba.linear.Compose(in.linear),
constant: ba.linear.Mul(in.constant).Add(ba.constant),
}
}
func (ba *blockAffine) transform(in []byte) []byte {
temp := ba.linear.Mul(matrix.Row(in))
temp = temp.Add(ba.constant)
return []byte(temp)
}
func (ba *blockAffine) BlockAffine() encoding.BlockAffine {
out := encoding.BlockAffine{
BlockLinear: encoding.NewBlockLinear(ba.linear),
}
copy(out.BlockAdditive[:], ba.constant)
return out
}
func (ba *blockAffine) serialize(out *[]byte) {
h, w := ba.linear.Size()
*out = append(*out, byte(h/8), byte(w/8))
for _, row := range ba.linear {
*out = append(*out, row...)
}
*out = append(*out, ba.constant...)
}
// compress compute the AND of neighboring bits in src and stores the result in dst.
func compress(dst, src []byte) {
for i := 0; i < 8*len(dst); i++ {
b1 := src[(2*i+0)/8] >> uint((2*i+0)%8)
b2 := src[(2*i+1)/8] >> uint((2*i+1)%8)
dst[i/8] += (b1 & b2 & 1) << uint(i%8)
}
}
type Construction [41]*blockAffine
// BlockSize returns the block size of AES. (Necessary to implement cipher.Block.)
func (constr Construction) BlockSize() int { return 16 }
// Encrypt encrypts the first block in src into dst. Dst and src may point at the same memory.
func (constr Construction) Encrypt(dst, src []byte) {
state := src[:16]
for i, m := range constr[:len(constr)-1] {
temp := m.transform(state)
state = make([]byte, stateSize[i%4])
cs := compressSize[i%4]
compress(state[:cs], temp[:2*cs])
copy(state[cs:], temp[2*cs:])
}
state = constr[40].transform(state)
copy(dst[:16], state[:16])
}
// Decrypt is not implemented.
func (constr Construction) Decrypt(_, _ []byte) {} | constructions/full/full.go | 0.706494 | 0.410343 | full.go | starcoder |
package shuffle
import (
"fmt"
"math"
"math/rand"
"github.com/spaolacci/murmur3"
)
// SimpleShuffleShard implementation uses simple probabilistic hashing to
// compute shuffle shards. This function takes an existing lattice and
// generates a new sharded lattice for the given indentification and
// required number of endpoints with the sharded endpoints.
func (l *Lattice) SimpleShuffleShard(id []byte, epc int) (*Lattice, error) {
var (
r *rand.Rand
shdSeed int64
shuffled [][]string
eps []string
coords []string
minDim int64
dimVals []string
dimC map[string]int
shard *Lattice
err error
)
// Create a seed a random generator.
shdSeed = int64(murmur3.Sum64WithSeed(id, uint32(l.Seed)))
r = rand.New(rand.NewSource(l.Seed * shdSeed * 42))
// The "chosen" lattice, which will have the sharded endpoints.
shard, err = NewLatticeWithSeed(l.Seed, l.GetDimensionNames())
if err != nil {
return nil, fmt.Errorf(
"shard: unable to create a sharded lattice: %v", err,
)
}
// Shuffle the order of the values in each dimension.
shuffled = [][]string{}
for _, d := range l.GetDimensionNames() {
dimVals = l.GetDimensionValues(d)
r.Shuffle(len(dimVals), func(x, y int) {
dimVals[x], dimVals[y] = dimVals[y], dimVals[x]
})
shuffled = append(shuffled, dimVals)
}
// Get the dimensionality of the lattice.
dimC = l.GetDimensionality()
// One dimensional lattices are a special case. For a one dimensional
// lattice, we select end-points from each cell, since there is no other
// dimension to consider.
if len(dimC) == 1 {
for _, dimVal := range shuffled[0] {
eps, err = l.GetEndpointsForSector([]string{dimVal})
if err != nil {
return nil, err
}
r.Shuffle(len(eps), func(x, y int) {
eps[x], eps[y] = eps[y], eps[x]
})
err = shard.AddEndpointsForSector([]string{dimVal}, eps[:epc])
if err != nil {
return nil, fmt.Errorf(
"shard: unable to add endpoints: %v", err,
)
}
}
return shard, nil
}
// Otherwise, this is a multi-dimensional lattice.
minDim = math.MaxInt64
// Which dimension has the smallest number of values in it?
for _, v := range dimC {
if int64(v) < minDim {
minDim = int64(v)
}
}
// Build a coordinate to the chosen cells by picking the current top
// item on each list of dimension values.
for i := int64(0); i < minDim; i++ {
coords = []string{}
for j := 0; j < len(l.GetDimensionNames()); j++ {
coords = append(coords, shuffled[j][0])
shuffled[j] = shuffled[j][1:]
}
eps, err = l.GetEndpointsForSector(coords)
if err != nil {
return nil, fmt.Errorf("shard: unable to get endpoints: %v", err)
} else if len(eps) <= 0 {
return nil, fmt.Errorf("shard: no endpoints available")
}
r.Shuffle(len(eps), func(x, y int) {
eps[x], eps[y] = eps[y], eps[x]
})
shard.AddEndpointsForSector(coords, eps[:epc])
}
return shard, nil
} | shard.go | 0.708616 | 0.579876 | shard.go | starcoder |
package distuv
import (
"math"
"golang.org/x/exp/rand"
)
// LogNormal represents a random variable whose log is normally distributed.
// The probability density function is given by
// 1/(x σ √2π) exp(-(ln(x)-μ)^2)/(2σ^2))
type LogNormal struct {
Mu float64
Sigma float64
Src rand.Source
}
// CDF computes the value of the cumulative density function at x.
func (l LogNormal) CDF(x float64) float64 {
return 0.5 + 0.5*math.Erf((math.Log(x)-l.Mu)/(math.Sqrt2*l.Sigma))
}
// Entropy returns the differential entropy of the distribution.
func (l LogNormal) Entropy() float64 {
return 0.5 + 0.5*math.Log(2*math.Pi*l.Sigma*l.Sigma) + l.Mu
}
// ExKurtosis returns the excess kurtosis of the distribution.
func (l LogNormal) ExKurtosis() float64 {
s2 := l.Sigma * l.Sigma
return math.Exp(4*s2) + 2*math.Exp(3*s2) + 3*math.Exp(2*s2) - 6
}
// LogProb computes the natural logarithm of the value of the probability density function at x.
func (l LogNormal) LogProb(x float64) float64 {
if x < 0 {
return math.Inf(-1)
}
logx := math.Log(x)
normdiff := (logx - l.Mu) / l.Sigma
return -0.5*normdiff*normdiff - logx - math.Log(l.Sigma) - logRoot2Pi
}
// Mean returns the mean of the probability distribution.
func (l LogNormal) Mean() float64 {
return math.Exp(l.Mu + 0.5*l.Sigma*l.Sigma)
}
// Median returns the median of the probability distribution.
func (l LogNormal) Median() float64 {
return math.Exp(l.Mu)
}
// Mode returns the mode of the probability distribution.
func (l LogNormal) Mode() float64 {
return l.Mu
}
// NumParameters returns the number of parameters in the distribution.
func (LogNormal) NumParameters() int {
return 2
}
// Prob computes the value of the probability density function at x.
func (l LogNormal) Prob(x float64) float64 {
return math.Exp(l.LogProb(x))
}
// Quantile returns the inverse of the cumulative probability distribution.
func (l LogNormal) Quantile(p float64) float64 {
if p < 0 || p > 1 {
panic(badPercentile)
}
// Formula from http://www.math.uah.edu/stat/special/LogNormal.html.
return math.Exp(l.Mu + l.Sigma*UnitNormal.Quantile(p))
}
// Rand returns a random sample drawn from the distribution.
func (l LogNormal) Rand() float64 {
var rnd float64
if l.Src == nil {
rnd = rand.NormFloat64()
} else {
rnd = rand.New(l.Src).NormFloat64()
}
return math.Exp(rnd*l.Sigma + l.Mu)
}
// Skewness returns the skewness of the distribution.
func (l LogNormal) Skewness() float64 {
s2 := l.Sigma * l.Sigma
return (math.Exp(s2) + 2) * math.Sqrt(math.Exp(s2)-1)
}
// StdDev returns the standard deviation of the probability distribution.
func (l LogNormal) StdDev() float64 {
return math.Sqrt(l.Variance())
}
// Survival returns the survival function (complementary CDF) at x.
func (l LogNormal) Survival(x float64) float64 {
return 0.5 * (1 - math.Erf((math.Log(x)-l.Mu)/(math.Sqrt2*l.Sigma)))
}
// Variance returns the variance of the probability distribution.
func (l LogNormal) Variance() float64 {
s2 := l.Sigma * l.Sigma
return (math.Exp(s2) - 1) * math.Exp(2*l.Mu+s2)
} | stat/distuv/lognormal.go | 0.896246 | 0.718088 | lognormal.go | starcoder |
package tetra3d
const (
FogOff = iota // No fog
FogAdd // Additive blended fog
FogMultiply // Multiplicative blended fog
FogOverwrite // Color overwriting fog (mixing base with fog color over depth distance)
)
type FogMode int
// Scene represents a world of sorts, and can contain a variety of Meshes and Nodes, which organize the scene into a
// graph of parents and children. Models (visual instances of Meshes), Cameras, and "empty" NodeBases all are kinds of Nodes.
type Scene struct {
Name string // The name of the Scene. Set automatically to the scene name in your 3D modeler if the DAE file exports it.
library *Library // The library from which this Scene was created. If the Scene was instantiated through code, this will be nil.
// Root indicates the root node for the scene hierarchy. For visual Models to be displayed, they must be added to the
// scene graph by simply adding them into the tree via parenting anywhere under the Root. For them to be removed from rendering,
// they simply need to be removed from the tree.
// See this page for more information on how a scene graph works: https://webglfundamentals.org/webgl/lessons/webgl-scene-graph.html
Root INode
FogColor *Color // The Color of any fog present in the Scene.
FogMode FogMode // The FogMode, indicating how the fog color is blended if it's on (not FogOff).
// FogRange is the depth range at which the fog is active. FogRange consists of two numbers,
// ranging from 0 to 1. The first indicates the start of the fog, and the second the end, in
// terms of total depth of the near / far clipping plane. The default is [0, 1].
FogRange []float32
LightingOn bool // If lighting is enabled when rendering the scene.
}
// NewScene creates a new Scene by the name given.
func NewScene(name string) *Scene {
scene := &Scene{
Name: name,
Root: NewNode("Root"),
FogColor: NewColor(0, 0, 0, 0),
FogRange: []float32{0, 1},
LightingOn: true,
}
scene.Root.(*Node).scene = scene
return scene
}
// Clone clones the Scene, returning a copy. Models and Meshes are shared between them.
func (scene *Scene) Clone() *Scene {
newScene := NewScene(scene.Name)
newScene.library = scene.library
newScene.LightingOn = scene.LightingOn
// newScene.Models = append(newScene.Models, scene.Models...)
newScene.Root = scene.Root.Clone()
newScene.Root.(*Node).scene = newScene
newScene.FogColor = scene.FogColor.Clone()
newScene.FogMode = scene.FogMode
newScene.FogRange[0] = scene.FogRange[0]
newScene.FogRange[1] = scene.FogRange[1]
return newScene
}
func (scene *Scene) fogAsFloatSlice() []float32 {
fog := []float32{
float32(scene.FogColor.R),
float32(scene.FogColor.G),
float32(scene.FogColor.B),
float32(scene.FogMode),
}
if scene.FogMode == FogMultiply {
fog[0] = 1 - fog[0]
fog[1] = 1 - fog[1]
fog[2] = 1 - fog[2]
}
return fog
}
// Library returns the Library from which this Scene was loaded. If it was created through code and not associated with a Library, this function will return nil.
func (scene *Scene) Library() *Library {
return scene.library
} | scene.go | 0.81648 | 0.549036 | scene.go | starcoder |
This library is compatible with the original Perl implementation:
http://search.cpan.org/dist/ShardedKV/
https://github.com/tsee/p5-ShardedKV
*/
package shardedkv
import (
"sync"
)
// Storage is a key-value storage backend
type Storage interface {
// Get returns the value for a given key and a bool indicating if the key was present
Get(key string) ([]byte, bool, error)
// Set sets the value for key
Set(key string, value []byte) error
// Delete removes a key from the storage, and returns a bool indicating if the key was found
Delete(key string) (bool, error)
// ResetConnection reinitializes the connection for the shard responsible for a key
ResetConnection(key string) error
}
// KVStore is a sharded key-value store
type KVStore struct {
continuum Chooser
storages map[string]Storage
migration Chooser
mstorages map[string]Storage
// we avoid holding the lock during a call to a storage engine, which may block
mu sync.Mutex
}
// Chooser maps keys to shards
type Chooser interface {
// SetBuckets sets the list of known buckets from which the chooser should select
SetBuckets([]string) error
// Choose returns a bucket for a given key
Choose(key string) string
// Buckets returns the list of known buckets
Buckets() []string
}
// Shard is a named storage backend
type Shard struct {
Name string
Backend Storage
}
// New returns a KVStore that uses chooser to shard the keys across the provided shards
func New(chooser Chooser, shards []Shard) *KVStore {
var buckets []string
kv := &KVStore{
continuum: chooser,
storages: make(map[string]Storage),
// what about migration?
}
for _, shard := range shards {
buckets = append(buckets, shard.Name)
kv.AddShard(shard.Name, shard.Backend)
}
chooser.SetBuckets(buckets)
return kv
}
// Get implements Storage.Get()
func (kv *KVStore) Get(key string) ([]byte, bool, error) {
var storage Storage
var migStorage Storage
kv.mu.Lock()
if kv.migration != nil {
shard := kv.migration.Choose(key)
migStorage = kv.mstorages[shard]
}
shard := kv.continuum.Choose(key)
storage = kv.storages[shard]
kv.mu.Unlock()
if migStorage != nil {
val, ok, err := migStorage.Get(key)
if err != nil {
return nil, false, err
}
if ok {
return val, ok, nil
}
}
return storage.Get(key)
}
// Set implements Storage.Set()
func (kv *KVStore) Set(key string, val []byte) error {
var storage Storage
kv.mu.Lock()
if kv.migration != nil {
shard := kv.migration.Choose(key)
storage = kv.mstorages[shard]
} else {
shard := kv.continuum.Choose(key)
storage = kv.storages[shard]
}
kv.mu.Unlock()
return storage.Set(key, val)
}
// Delete implements Storage.Delete()
func (kv *KVStore) Delete(key string) (bool, error) {
var storage Storage
var migStorage Storage
kv.mu.Lock()
if kv.migration != nil {
shard := kv.migration.Choose(key)
migStorage = kv.mstorages[shard]
}
shard := kv.continuum.Choose(key)
storage = kv.storages[shard]
kv.mu.Unlock()
var migOk bool
if migStorage != nil {
var err error
migOk, err = migStorage.Delete(key)
if err != nil {
return false, err
}
}
ok, err := storage.Delete(key)
// true if we deleted it from at least one of the shards
return (ok || migOk), err
}
// ResetConnection implements Storage.ResetConnection()
func (kv *KVStore) ResetConnection(key string) error {
var storage Storage
var migStorage Storage
kv.mu.Lock()
if kv.migration != nil {
shard := kv.migration.Choose(key)
migStorage = kv.mstorages[shard]
}
shard := kv.continuum.Choose(key)
storage = kv.storages[shard]
kv.mu.Unlock()
if migStorage != nil {
err := migStorage.ResetConnection(key)
if err != nil {
return err
}
}
return storage.ResetConnection(key)
}
// AddShard adds a shard from the list of known shards
func (kv *KVStore) AddShard(shard string, storage Storage) {
kv.mu.Lock()
defer kv.mu.Unlock()
kv.storages[shard] = storage
}
// DeleteShard removes a shard from the list of known shards
func (kv *KVStore) DeleteShard(shard string) {
kv.mu.Lock()
defer kv.mu.Unlock()
delete(kv.storages, shard)
}
// BeginMigration begins a continuum migration. All the shards in the new
// continuum must already be known to the KVStore via AddShard().
func (kv *KVStore) BeginMigration(continuum Chooser) {
kv.mu.Lock()
defer kv.mu.Unlock()
kv.migration = continuum
kv.mstorages = kv.storages
}
// BeginMigrationWithShards begins a continuum migration using the new set of shards.
func (kv *KVStore) BeginMigrationWithShards(continuum Chooser, shards []Shard) {
kv.mu.Lock()
defer kv.mu.Unlock()
var buckets []string
mstorages := make(map[string]Storage)
for _, shard := range shards {
buckets = append(buckets, shard.Name)
mstorages[shard.Name] = shard.Backend
}
continuum.SetBuckets(buckets)
kv.migration = continuum
kv.mstorages = mstorages
}
// EndMigration ends a continuum migration and marks the migration continuum
// as the new primary
func (kv *KVStore) EndMigration() {
kv.mu.Lock()
defer kv.mu.Unlock()
kv.continuum = kv.migration
kv.migration = nil
kv.storages = kv.mstorages
kv.mstorages = nil
} | shardedkv.go | 0.769384 | 0.490907 | shardedkv.go | starcoder |
package scheduling
import (
"time"
"go.thethings.network/lorawan-stack/v3/pkg/frequencyplans"
)
// ConcentratorTime is the time relative to the concentrator start time (nanoseconds).
type ConcentratorTime int64
// NewEmission returns a new Emission with the given values.
func NewEmission(starts ConcentratorTime, duration time.Duration) Emission {
return Emission{t: starts, d: duration}
}
// Emission contains the scheduled time and duration of an emission.
type Emission struct {
t ConcentratorTime
d time.Duration
}
// Starts returns the time when the emission starts.
func (em Emission) Starts() ConcentratorTime { return em.t }
// Ends returns the time when the emission ends.
func (em Emission) Ends() ConcentratorTime { return em.t + ConcentratorTime(em.d) }
// Duration returns the duration of the emission.
func (em Emission) Duration() time.Duration { return em.d }
// OffAir returns the time-off-air of the emission.
func (em Emission) OffAir(toa frequencyplans.TimeOffAir) time.Duration {
d := time.Duration(float32(em.d) * toa.Fraction)
if d < toa.Duration {
d = toa.Duration
}
return d
}
// Within returns the duration of the emission that happens within the given window.
func (em Emission) Within(from, to ConcentratorTime) time.Duration {
if em.Ends() < from || em.t > to {
return 0
}
if em.t < from {
return time.Duration(em.Ends() - from)
}
return em.d
}
// EndsWithOffAir returns the time when the emission ends plus the time-off-air.
func (em Emission) EndsWithOffAir(toa frequencyplans.TimeOffAir) ConcentratorTime {
return em.Ends() + ConcentratorTime(em.OffAir(toa))
}
// BeforeWithOffAir returns the time between the end of this emission to the start of the given other emission, considering time-off-air.
func (em Emission) BeforeWithOffAir(other Emission, toa frequencyplans.TimeOffAir) time.Duration {
return time.Duration(other.Starts() - em.EndsWithOffAir(toa))
}
// AfterWithOffAir returns the time between the end of the given other emission to the start of this emission, considering time-off-air.
func (em Emission) AfterWithOffAir(other Emission, toa frequencyplans.TimeOffAir) time.Duration {
return time.Duration(em.Starts() - other.EndsWithOffAir(toa))
}
// OverlapsWithOffAir returns whether the given emission overlaps with this emission, considering time-off-air.
func (em Emission) OverlapsWithOffAir(other Emission, toa frequencyplans.TimeOffAir) bool {
emBegins, emEnds := em.Starts(), em.EndsWithOffAir(toa)
otherBegins, otherEnds := other.Starts(), other.EndsWithOffAir(toa)
return emEnds > otherBegins && emBegins < otherEnds ||
emBegins < otherEnds && emEnds > otherEnds
}
// Emissions is an list of emissions.
type Emissions []Emission
// Insert inserts the given emission to the emissions by preserving order.
func (ems Emissions) Insert(em Emission) Emissions {
for i := range ems {
if ems[i].t > em.t {
return append(ems[:i], append([]Emission{em}, ems[i:]...)...)
}
}
return append(ems, em)
} | pkg/gatewayserver/scheduling/emission.go | 0.827619 | 0.532972 | emission.go | starcoder |
package css
import (
"errors"
"strings"
)
// A SegmentType is the type of a Segment.
type SegmentType uint32
const (
// ByteType is everything not of the other types below.
ByteType SegmentType = iota
// ImageURLType is a URL for an image. Note that the value has been
// stripped of the surrounding 'url' identity token. To reconstruct a stylesheet,
// this value must be enclosed into a url token or function token with name 'url'.
ImageURLType
// FontURLType is similar to an image URL, but for fonts.
FontURLType
)
// Segments is a slice of individual Segment structs.
type Segments []Segment
// Segment is a portion of CSS.
type Segment struct {
Type SegmentType
Data string
}
const (
quoteOrWhitespace = "\"' \t\n\f\r"
)
// ParseURLs chops a style sheet into Segments. Each segment is
// either a UTF8 encoded byte string, or an image or font URL.
// This is used to modify the URLs to point at a CDN.
// Note that when combining the segments back to a stylesheet,
// the client code must emit url() around URLs. This is done so that
// client code can choose the quote character as in
// url("http://foo.com") or url('http://foo.com/') or even leave out
// the quote character as in url(http://foo.com/). Note that CSS supports
// escaping quote characters within a string by prefixing with a backslash,
// so " inside a URL may be written as \".
func ParseURLs(css string) (Segments, error) {
z := NewTokenizer(css)
segments := Segments{}
var sb strings.Builder
var endOfFontFaceIdx int
tokens := z.All()
loop:
for i, token := range tokens {
switch token.Type {
case EOFToken:
break loop
case ErrorToken:
return Segments{}, errors.New(token.Value)
case AtKeywordToken:
if token.Value == "font-face" {
endOfFontFaceIdx = i + consumeAnAtRule(tokens[i:])
}
case URLToken:
// Emit a segment which contains all non-URL CSS seen so far.
if sb.Len() > 0 {
segments = append(segments, Segment{ByteType, sb.String()})
sb.Reset()
}
// Now emit a URL segment
t := ImageURLType
if endOfFontFaceIdx > i {
t = FontURLType
}
segments = append(segments, Segment{t, token.Value})
continue
}
sb.WriteString(token.String())
}
segments = append(segments, Segment{ByteType, sb.String()})
return segments, nil
}
// consumeAnAtRule returns the index which marks the end of the at rule as per
// 5.4.2 https://www.w3.org/TR/css-syntax-3/#consume-an-at-rule
func consumeAnAtRule(tokens []Token) int {
if len(tokens) == 0 || tokens[0].Type != AtKeywordToken {
return -1 // should be impossible
}
i := 1
for ; i < len(tokens); i++ {
if tokens[i].Type == SemicolonToken || tokens[i].Type == EOFToken {
return i
}
if tokens[i].Type == OpenCurlyToken {
i += consumeASimpleBlock(tokens[i:])
return i
}
i += consumeAComponentValue(tokens[i:])
}
return i
}
// consumeAComponentValue returns the index that marks the end of the component value as per
// 5.4.6 https://www.w3.org/TR/css-syntax-3/#consume-a-component-value
func consumeAComponentValue(tokens []Token) int {
if len(tokens) == 0 {
return -1
}
switch tokens[0].Type {
case OpenCurlyToken, OpenSquareToken, OpenParenToken:
return consumeASimpleBlock(tokens)
case FunctionToken:
return consumeAFunction(tokens)
default:
return 0
}
}
// consumeASimpleBlock returns the index which marks the end of the block, or -1 if the tokens are empty as per
// 5.4.7 https://www.w3.org/TR/css-syntax-3/#consume-a-simple-block
func consumeASimpleBlock(tokens []Token) int {
if len(tokens) == 0 {
return -1
}
var endingTokenType TokenType
switch tokens[0].Type {
case OpenCurlyToken:
endingTokenType = CloseCurlyToken
case OpenParenToken:
endingTokenType = CloseParenToken
case OpenSquareToken:
endingTokenType = CloseSquareToken
}
i := 1
for ; i < len(tokens); i++ {
if tokens[i].Type == EOFToken || tokens[i].Type == endingTokenType {
return i
}
i += consumeAComponentValue(tokens[i:])
}
return i
}
// consumeAFunction returns the index marking the end of the function as defined by:
// 5.4.8 https://www.w3.org/TR/css-syntax-3/#consume-a-function
func consumeAFunction(tokens []Token) int {
if len(tokens) == 0 || tokens[0].Type != FunctionToken {
return -1 // should be impossible case.
}
i := 1
for ; i < len(tokens); i++ {
if tokens[i].Type == EOFToken || tokens[i].Type == CloseParenToken {
return i
}
i += consumeAComponentValue(tokens[i:])
}
return i
} | transformer/internal/css/cssurl.go | 0.593138 | 0.403567 | cssurl.go | starcoder |
package types
import (
"bytes"
"fmt"
"github.com/tendermint/tendermint/crypto"
)
// For event switch in entropy generator
const (
EventComputedEntropy = "EventComputedEntropy"
MaxEntropyShareSize = 500
MaxThresholdSignatureSize = 256
GenesisHeight = int64(0)
)
type ThresholdSignature = []byte
//-----------------------------------------------------------------------------
// BlockEntropy struct for entropy in block
type BlockEntropy struct {
GroupSignature ThresholdSignature `json:"group_signature"`
Round int64 `json:"round"`
AeonLength int64 `json:"aeon_length"`
DKGID int64 `json:"dkg_id"`
NextAeonStart int64 `json:"next_aeon_start"`
Qual []int64 `json:"qual"`
}
// EmptyBlockEntropy for constructing BlockEntropy for empty group signature
func EmptyBlockEntropy() *BlockEntropy {
return &BlockEntropy{
GroupSignature: []byte{},
Round: -1,
AeonLength: -1,
DKGID: -1,
Qual: make([]int64, 0),
}
}
// Function to test if a block entropy is empty
func IsEmptyBlockEntropy(entropy *BlockEntropy) bool {
return len(entropy.GroupSignature) == 0
}
// NewBlockEntropy for constructing BlockEntropy
func NewBlockEntropy(sig ThresholdSignature, round int64, aeonLength int64, dkgID int64, qual []int64) *BlockEntropy {
return &BlockEntropy{
GroupSignature: sig,
Round: round,
AeonLength: aeonLength,
DKGID: dkgID,
Qual: qual,
}
}
// Equal compares two block entropies and returns if they are identical
func (blockEntropy *BlockEntropy) Equal(anotherEntropy *BlockEntropy) bool {
for index, elem := range blockEntropy.Qual {
if anotherEntropy.Qual == nil || anotherEntropy.Qual[index] != elem {
return false
}
}
return bytes.Equal(blockEntropy.GroupSignature, anotherEntropy.GroupSignature) &&
blockEntropy.Round == anotherEntropy.Round &&
blockEntropy.AeonLength == anotherEntropy.AeonLength &&
blockEntropy.DKGID == anotherEntropy.DKGID &&
blockEntropy.NextAeonStart == anotherEntropy.NextAeonStart
}
// ValidateBasic performs basic validation on block entropy
func (blockEntropy *BlockEntropy) ValidateBasic() error {
// If entropy is empty then all other values should be -1
if len(blockEntropy.GroupSignature) == 0 {
if blockEntropy.Round != -1 || blockEntropy.AeonLength != -1 || blockEntropy.DKGID != -1 {
return fmt.Errorf("expected EmptyBlockEntropy, got: round %d, aeon length %v, dkg id %v",
blockEntropy.Round, blockEntropy.AeonLength, blockEntropy.DKGID)
}
return nil
}
if len(blockEntropy.GroupSignature) > MaxThresholdSignatureSize {
return fmt.Errorf("expected GroupSignature size be max %d bytes, got %d bytes",
MaxThresholdSignatureSize,
len(blockEntropy.GroupSignature),
)
}
if blockEntropy.Round < 0 {
return fmt.Errorf("expected Round >= 0, got %d", blockEntropy.Round)
}
if blockEntropy.AeonLength <= 0 {
return fmt.Errorf("expected AeonLength > 0, got %d", blockEntropy.AeonLength)
}
if blockEntropy.DKGID < 0 {
return fmt.Errorf("expected DKG ID >= 0, got %d", blockEntropy.DKGID)
}
if blockEntropy.Qual == nil {
return fmt.Errorf("expected non-nil Qual")
}
return nil
}
// String returns a string representation of the BlockEntropy
func (blockEntropy *BlockEntropy) String() string {
return blockEntropy.StringIndented("")
}
// StringIndented returns a string representation of the BlockEntropy
func (blockEntropy *BlockEntropy) StringIndented(indent string) string {
return fmt.Sprintf(`BlockEntropy{
%s Round/AeonLength: %v/%v
%s DKGID: %v
%s NextAeonStart: %v
%s GroupSignature: %v
%s}`,
indent, blockEntropy.Round, blockEntropy.AeonLength,
indent, blockEntropy.DKGID,
indent, blockEntropy.NextAeonStart,
indent, blockEntropy.GroupSignature,
indent)
}
//-----------------------------------------------------------------------------
// ChannelEntropy struct for sending entropy from entropy generator to consensus
type ChannelEntropy struct {
Height int64
Entropy BlockEntropy
Enabled bool
Validators *ValidatorSet
}
// NewChannelEntropy for constructing ChannelEntropy
func NewChannelEntropy(height int64, entropy BlockEntropy, enabled bool, validators *ValidatorSet) *ChannelEntropy {
return &ChannelEntropy{
Height: height,
Entropy: entropy,
Enabled: enabled,
Validators: validators,
}
}
// ValidateBasic performs basic validation.
func (ce *ChannelEntropy) ValidateBasic() error {
if ce.Height <= GenesisHeight {
return fmt.Errorf("invalid Height")
}
err := ce.Entropy.ValidateBasic()
if err != nil {
return err
}
return nil
}
//-----------------------------------------------------------------------------
// Wrappers for signing entropy message
type CanonicalEntropyShare struct {
Height int64
SignerAddress crypto.Address
SignatureShare string
ChainID string
}
func CanonicalizeEntropyShare(chainID string, entropy *EntropyShare) CanonicalEntropyShare {
return CanonicalEntropyShare{
Height: entropy.Height,
SignerAddress: entropy.SignerAddress,
SignatureShare: entropy.SignatureShare,
ChainID: chainID,
}
}
//-----------------------------------------------------------------------------
type EntropyShare struct {
Height int64 `json:"height"`
SignerAddress crypto.Address `json:"signer"`
SignatureShare string `json:"entropy_signature"`
Signature []byte `json:"signature"`
}
// ValidateBasic performs basic validation.
func (entropy *EntropyShare) ValidateBasic() error {
if entropy.Height < GenesisHeight+1 {
return fmt.Errorf("invalid Height")
}
if len(entropy.SignerAddress) != crypto.AddressSize {
return fmt.Errorf("expected ValidatorAddress size to be %d bytes, got %d bytes",
crypto.AddressSize,
len(entropy.SignerAddress),
)
}
if len(entropy.SignatureShare) == 0 || len(entropy.SignatureShare) > MaxEntropyShareSize {
return fmt.Errorf("expected SignatureShare size be max %d bytes, got %d bytes",
MaxEntropyShareSize,
len(entropy.SignatureShare),
)
}
if len(entropy.Signature) == 0 || len(entropy.Signature) > MaxThresholdSignatureSize {
return fmt.Errorf("expected Signature size be max %d bytes, got %d bytes",
MaxThresholdSignatureSize,
len(entropy.Signature),
)
}
return nil
}
// String returns a string representation of EntropyShare
func (entropy EntropyShare) String() string {
return entropy.StringIndented("")
}
// StringIndented returns a string representation of the EntropyShare
func (entropy EntropyShare) StringIndented(indent string) string {
return fmt.Sprintf(`EntropySignatureShare{
%s %v/%v/%v%v
%s}`,
indent, entropy.Height, entropy.SignerAddress, entropy.SignatureShare, entropy.Signature,
indent)
}
func (entropy EntropyShare) Copy() EntropyShare {
return EntropyShare{
Height: entropy.Height,
SignerAddress: entropy.SignerAddress,
SignatureShare: entropy.SignatureShare,
Signature: entropy.Signature,
}
}
//-----------------------------------------------------------
// These methods are for Protobuf Compatibility
// Marshal returns the amino encoding.
func (entropy *EntropyShare) Marshal() ([]byte, error) {
return cdc.MarshalBinaryBare(entropy)
}
// MarshalTo calls Marshal and copies to the given buffer.
func (entropy *EntropyShare) MarshalTo(data []byte) (int, error) {
bs, err := entropy.Marshal()
if err != nil {
return -1, err
}
return copy(data, bs), nil
}
// Unmarshal deserializes from amino encoded form.
func (entropy *EntropyShare) Unmarshal(bs []byte) error {
return cdc.UnmarshalBinaryBare(bs, entropy)
}
// For signing with private key
func (entropy *EntropyShare) SignBytes(chainID string) []byte {
bz, err := cdc.MarshalBinaryLengthPrefixed(CanonicalizeEntropyShare(chainID, entropy))
if err != nil {
panic(err)
}
return bz
} | types/entropy_share.go | 0.744378 | 0.463201 | entropy_share.go | starcoder |
package props
import (
"github.com/bjgirl/maroto/pkg/color"
"github.com/bjgirl/maroto/pkg/consts"
)
// Proportion represents a proportion from a rectangle, example: 16x9, 4x3...
type Proportion struct {
// Width from the rectangle: Barcode, image and etc.
Width float64
// Height from the rectangle: Barcode, image and etc.
Height float64
}
// Barcode represents properties from a barcode inside a cell.
type Barcode struct {
// Left is the space between the left cell boundary to the barcode, if center is false.
Left float64
// Top is space between the upper cell limit to the barcode, if center is false.
Top float64
// Percent is how much the barcode will occupy the cell,
// ex 100%: The barcode will fulfill the entire cell
// ex 50%: The greater side from the barcode will have half the size of the cell.
Percent float64
// Proportion is the proportion between size of the barcode.
// Ex: 16x9, 4x3...
Proportion Proportion
// Center define that the barcode will be vertically and horizontally centralized.
Center bool
}
// Rect represents properties from a rectangle (Image, QrCode or Barcode) inside a cell.
type Rect struct {
// Left is the space between the left cell boundary to the rectangle, if center is false.
Left float64
// Top is space between the upper cell limit to the barcode, if center is false.
Top float64
// Percent is how much the rectangle will occupy the cell,
// ex 100%: The rectangle will fulfill the entire cell
// ex 50%: The greater side from the rectangle will have half the size of the cell.
Percent float64
// Center define that the barcode will be vertically and horizontally centralized.
Center bool
}
// Text represents properties from a Text inside a cell.
type Text struct {
// Top is space between the upper cell limit to the barcode, if align is not center.
Top float64
// Family of the text, ex: consts.Arial, helvetica and etc.
Family string
// Style of the text, ex: consts.Normal, bold and etc.
Style consts.Style
// Size of the text.
Size float64
// Align of the text.
Align consts.Align
// Extrapolate define if the text will automatically add a new line when.
// text reach the right cell boundary.
Extrapolate bool
// VerticalPadding define an additional space between lines.
VerticalPadding float64
// Color define the font color.
Color color.Color
}
// Font represents properties from a text.
type Font struct {
// Family of the text, ex: consts.Arial, helvetica and etc.
Family string
// Style of the text, ex: consts.Normal, bold and etc.
Style consts.Style
// Size of the text.
Size float64
// Color define the font color.
Color color.Color
}
// TableListContent represents properties from a line (header/content) from a TableList.
type TableListContent struct {
// Family of the text, ex: consts.Arial, helvetica and etc.
Family string
// Style of the text, ex: consts.Normal, bold and etc.
Style consts.Style
// Size of the text.
Size float64
// Color define the font color.
Color color.Color
// GridSizes is the custom properties of the size of the grid
// the sum of the values cannot be greater than 12, if this
// value is not provided the width of all columns will be the
// same.
GridSizes []uint
}
// TableList represents properties from a TableList.
type TableList struct {
// HeaderProp is the custom properties of the text inside
// the headers.
HeaderProp TableListContent
// ContentProp is the custom properties of the text inside
// the contents.
ContentProp TableListContent
// Align is the align of the text (header and content) inside the columns.
Align consts.Align
// AlternatedBackground define the background color from even rows
// i.e rows with index (0, 2, 4, ..., N) will have background colorized,
// rows with index (1, 3, 5, ..., N) will not.
AlternatedBackground *color.Color
// HeaderContentSpace is the space between the header and the contents.
HeaderContentSpace float64
// Line adds a line after every content-row to separate rows. The line's spaceHeight is set to 1.0.
Line bool
}
// MakeValid from Rect will make the properties from a rectangle reliable to fit inside a cell
// and define default values for a rectangle.
func (s *Rect) MakeValid() {
minPercentage := 0.0
maxPercentage := 100.0
minValue := 0.0
if s.Percent <= minPercentage || s.Percent > maxPercentage {
s.Percent = maxPercentage
}
if s.Center {
s.Left = 0
s.Top = 0
}
if s.Left < minValue {
s.Left = minValue
}
if s.Top < minValue {
s.Top = minValue
}
}
// MakeValid from Barcode will make the properties from a barcode reliable to fit inside a cell
// and define default values for a barcode.
func (s *Barcode) MakeValid() {
minPercentage := 0.0
maxPercentage := 100.0
minValue := 0.0
if s.Percent <= minPercentage || s.Percent > maxPercentage {
s.Percent = maxPercentage
}
if s.Center {
s.Left = 0
s.Top = 0
}
if s.Left < minValue {
s.Left = minValue
}
if s.Top < minValue {
s.Top = minValue
}
if s.Proportion.Width <= 0 {
s.Proportion.Width = 1
}
if s.Proportion.Height <= 0 {
s.Proportion.Height = 1
}
maxHeightProportionBasedOnWidth := 0.20
minHeightProportionBasedOnWidth := 0.10
if s.Proportion.Height > s.Proportion.Width*maxHeightProportionBasedOnWidth {
s.Proportion.Height = s.Proportion.Width * maxHeightProportionBasedOnWidth
} else if s.Proportion.Height < s.Proportion.Width*minHeightProportionBasedOnWidth {
s.Proportion.Height = s.Proportion.Width * minHeightProportionBasedOnWidth
}
}
// MakeValid from Text define default values for a Text.
func (s *Text) MakeValid(defaultFamily string) {
minValue := 0.0
undefinedValue := 0.0
if s.Family == "" {
s.Family = defaultFamily
}
if s.Style == "" {
s.Style = consts.Normal
}
if s.Align == "" {
s.Align = consts.Left
}
if s.Size == undefinedValue {
s.Size = 10.0
}
if s.Top < minValue {
s.Top = minValue
}
if s.VerticalPadding < 0 {
s.VerticalPadding = 0
}
}
// MakeValid from Font define default values for a Signature.
func (s *Font) MakeValid(defaultFamily string) {
undefinedValue := 0.0
if s.Family == "" {
s.Family = defaultFamily
}
if s.Style == "" {
s.Style = consts.Bold
}
if s.Size == undefinedValue {
s.Size = 8.0
}
}
// ToTextProp from Font return a Text based on Font.
func (s *Font) ToTextProp(align consts.Align, top float64, extrapolate bool, verticalPadding float64) Text {
textProp := Text{
Family: s.Family,
Style: s.Style,
Size: s.Size,
Align: align,
Top: top,
Extrapolate: extrapolate,
VerticalPadding: verticalPadding,
Color: s.Color,
}
textProp.MakeValid(s.Family)
return textProp
}
// ToTextProp from Font return a TableListContent based on Font.
func (s *TableListContent) ToTextProp(align consts.Align, top float64, extrapolate bool, verticalPadding float64) Text {
textProp := Text{
Family: s.Family,
Style: s.Style,
Size: s.Size,
Align: align,
Top: top,
Extrapolate: extrapolate,
VerticalPadding: verticalPadding,
Color: s.Color,
}
textProp.MakeValid(s.Family)
return textProp
}
// MakeValid from TableList define default values for a TableList.
func (s *TableList) MakeValid(header []string, defaultFamily string) {
undefinedValue := 0.0
if s.HeaderProp.Size == undefinedValue {
s.HeaderProp.Size = 10.0
}
if s.HeaderProp.Family == "" {
s.HeaderProp.Family = defaultFamily
}
if s.HeaderProp.Style == "" {
s.HeaderProp.Style = consts.Bold
}
if len(s.HeaderProp.GridSizes) == 0 {
gridSize := uint(consts.MaxGridSum / float64(len(header)))
s.HeaderProp.GridSizes = []uint{}
for range header {
s.HeaderProp.GridSizes = append(s.HeaderProp.GridSizes, gridSize)
}
}
if s.Align == "" {
s.Align = consts.Left
}
if s.ContentProp.Size == undefinedValue {
s.ContentProp.Size = 10.0
}
if s.ContentProp.Family == "" {
s.ContentProp.Family = defaultFamily
}
if s.ContentProp.Style == "" {
s.ContentProp.Style = consts.Normal
}
if len(s.ContentProp.GridSizes) == 0 {
gridSize := uint(consts.MaxGridSum / float64(len(header)))
s.ContentProp.GridSizes = []uint{}
for range header {
s.ContentProp.GridSizes = append(s.ContentProp.GridSizes, gridSize)
}
}
if s.HeaderContentSpace == undefinedValue {
s.HeaderContentSpace = 4.0
}
} | pkg/props/prop.go | 0.731251 | 0.732317 | prop.go | starcoder |
package ta
// MinMax returns the min/max over a period
// Update returns min
// UpdateAll returns [min, max]
func MinMax(period int) MultiVarStudy {
return &minmax{data: NewCapped(period)}
}
func Min(period int) Study {
return &minmax{data: NewCapped(period)}
}
func Max(period int) Study {
return &minmax{data: NewCapped(period), isMax: true}
}
type minmax struct {
data *TA
isMax bool
}
func (s *minmax) Update(vs ...Decimal) Decimal {
ta := s.data.Append(vs...)
if s.isMax {
return ta.Max()
}
return ta.Min()
}
func (s *minmax) UpdateAll(vs ...Decimal) []Decimal {
ta := s.data.Append(vs...)
return []Decimal{ta.Min(), ta.Max()}
}
func (s *minmax) Len() int { return s.data.Len() }
func (s *minmax) LenAll() []int {
ln := s.Len()
return []int{ln, ln}
}
func (s *minmax) ToStudy() (Study, bool) { return s, true }
func (s *minmax) ToMulti() (MultiVarStudy, bool) { return s, true }
// BBands alias for BollingerBands(period, d, -d, nil)
func BBands(period int) MultiVarStudy {
d := Decimal(period)
return BollingerBands(period, d, d, nil)
}
// BBandsLimits alias for BollingerBands(period, up, down, nil)
func BBandsLimits(period int, up, down Decimal) MultiVarStudy {
return BollingerBands(period, up, down, nil)
}
// BollingerBands returns a Bollinger Bands study, if ma is nil, the mid will be the mean
// Update will return the upper bound
// UpdateAll returns [upper, mid, lower]
func BollingerBands(period int, up, down Decimal, ma MovingAverageFunc) MultiVarStudy {
if down > 0 {
down = -down
}
bb := &bbands{
std: newVar(period, runStd),
up: up,
down: down,
}
if ma != nil {
bb.ext = ma(period)
}
return bb
}
type bbands struct {
ext MovingAverage
std *variance
up Decimal
down Decimal
}
func (s *bbands) Update(vs ...Decimal) Decimal {
return s.UpdateAll(vs...)[0]
}
func (s *bbands) UpdateAll(vs ...Decimal) []Decimal {
var sd, base Decimal
if s.ext == nil {
data := s.std.UpdateAll(vs...)
sd, base = data[1], data[2]
} else {
sd = s.std.Update(vs...)
base = s.ext.Update(vs...)
}
return []Decimal{base + sd*s.up, base, base + sd*s.down}
}
func (s *bbands) Len() int { return s.std.Len() }
func (s *bbands) LenAll() []int {
ln := s.Len()
return []int{ln, ln, ln}
}
func (s *bbands) ToStudy() (Study, bool) { return s, true }
func (s *bbands) ToMulti() (MultiVarStudy, bool) { return s, true } | studies_01.go | 0.870583 | 0.460532 | studies_01.go | starcoder |
package geom
// A LineString represents a single, unbroken line, linearly interpreted
// between zero or more control points.
type LineString struct {
geom1
}
// NewLineString returns a new LineString with layout l and no control points.
func NewLineString(l Layout) *LineString {
return NewLineStringFlat(l, nil)
}
// NewLineStringFlat returns a new LineString with layout l and control points
// flatCoords.
func NewLineStringFlat(layout Layout, flatCoords []float64) *LineString {
ls := new(LineString)
ls.layout = layout
ls.stride = layout.Stride()
ls.flatCoords = flatCoords
return ls
}
// Area returns the length of ls, i.e. zero.
func (ls *LineString) Area() float64 {
return 0
}
// Clone returns a copy of ls that does not alias ls.
func (ls *LineString) Clone() *LineString {
flatCoords := make([]float64, len(ls.flatCoords))
copy(flatCoords, ls.flatCoords)
return NewLineStringFlat(ls.layout, flatCoords)
}
// Empty returns false.
func (ls *LineString) Empty() bool {
return false
}
// Interpolate returns the index and delta of val in dimension dim.
func (ls *LineString) Interpolate(val float64, dim int) (int, float64) {
n := len(ls.flatCoords)
if n == 0 {
panic("geom: empty linestring")
}
if val <= ls.flatCoords[dim] {
return 0, 0
}
if ls.flatCoords[n-ls.stride+dim] <= val {
return (n - 1) / ls.stride, 0
}
low := 0
high := n / ls.stride
for low < high {
mid := (low + high) / 2
if val < ls.flatCoords[mid*ls.stride+dim] {
high = mid
} else {
low = mid + 1
}
}
low--
val0 := ls.flatCoords[low*ls.stride+dim]
if val == val0 {
return low, 0
}
val1 := ls.flatCoords[(low+1)*ls.stride+dim]
return low, (val - val0) / (val1 - val0)
}
// Length returns the length of ls.
func (ls *LineString) Length() float64 {
return length1(ls.flatCoords, 0, len(ls.flatCoords), ls.stride)
}
// MustSetCoords is like SetCoords but it panics on any error.
func (ls *LineString) MustSetCoords(coords []Coord) *LineString {
Must(ls.SetCoords(coords))
return ls
}
// SetCoords sets the coordinates of ls.
func (ls *LineString) SetCoords(coords []Coord) (*LineString, error) {
if err := ls.setCoords(coords); err != nil {
return nil, err
}
return ls, nil
}
// SetSRID sets the SRID of ls.
func (ls *LineString) SetSRID(srid int) *LineString {
ls.srid = srid
return ls
}
// SubLineString returns a LineString from starts at index start and stops at
// index stop of ls. The returned LineString aliases ls.
func (ls *LineString) SubLineString(start, stop int) *LineString {
return NewLineStringFlat(ls.layout, ls.flatCoords[start*ls.stride:stop*ls.stride])
}
// Swap swaps the values of ls and ls2.
func (ls *LineString) Swap(ls2 *LineString) {
ls.geom1.swap(&ls2.geom1)
} | vendor/github.com/twpayne/go-geom/linestring.go | 0.879153 | 0.487551 | linestring.go | starcoder |
package expression
import (
"github.com/dolthub/go-mysql-server/sql"
)
// IsUnary returns whether the expression is unary or not.
func IsUnary(e sql.Expression) bool {
return len(e.Children()) == 1
}
// IsBinary returns whether the expression is binary or not.
func IsBinary(e sql.Expression) bool {
return len(e.Children()) == 2
}
// UnaryExpression is an expression that has only one children.
type UnaryExpression struct {
Child sql.Expression
}
// Children implements the Expression interface.
func (p *UnaryExpression) Children() []sql.Expression {
return []sql.Expression{p.Child}
}
// Resolved implements the Expression interface.
func (p *UnaryExpression) Resolved() bool {
return p.Child.Resolved()
}
// IsNullable returns whether the expression can be null.
func (p *UnaryExpression) IsNullable() bool {
return p.Child.IsNullable()
}
// BinaryExpression is an expression that has two children.
type BinaryExpression struct {
Left sql.Expression
Right sql.Expression
}
// Children implements the Expression interface.
func (p *BinaryExpression) Children() []sql.Expression {
return []sql.Expression{p.Left, p.Right}
}
// Resolved implements the Expression interface.
func (p *BinaryExpression) Resolved() bool {
return p.Left.Resolved() && p.Right.Resolved()
}
// IsNullable returns whether the expression can be null.
func (p *BinaryExpression) IsNullable() bool {
return p.Left.IsNullable() || p.Right.IsNullable()
}
type NaryExpression struct {
ChildExpressions []sql.Expression
}
// Children implements the Expression interface.
func (n *NaryExpression) Children() []sql.Expression {
return n.ChildExpressions
}
// Resolved implements the Expression interface.
func (n *NaryExpression) Resolved() bool {
for _, child := range n.Children() {
if !child.Resolved() {
return false
}
}
return true
}
// IsNullable returns whether the expression can be null.
func (n *NaryExpression) IsNullable() bool {
for _, child := range n.Children() {
if child.IsNullable() {
return true
}
}
return false
}
// ExpressionsResolve returns whether all the expressions in the slice given are resolved
func ExpressionsResolved(exprs ...sql.Expression) bool {
for _, e := range exprs {
if !e.Resolved() {
return false
}
}
return true
}
func Dispose(e sql.Expression) {
sql.Inspect(e, func(e sql.Expression) bool {
sql.Dispose(e)
return true
})
} | sql/expression/common.go | 0.820505 | 0.483709 | common.go | starcoder |
package types
import (
"math"
"reflect"
"github.com/lyraproj/pcore/px"
"github.com/lyraproj/pcore/utils"
)
// CommonType returns a type that both a and b are assignable to
func commonType(a px.Type, b px.Type) px.Type {
if isAssignable(a, b) {
return a
}
if isAssignable(b, a) {
return a
}
// Deal with mergable string types
switch a.(type) {
case *EnumType:
switch b.(type) {
case *vcStringType:
str := b.(*vcStringType).value
ea := a.(*EnumType)
return NewEnumType(utils.Unique(append(ea.values, str)), ea.caseInsensitive)
case px.StringType:
return DefaultStringType()
case *EnumType:
ea := a.(*EnumType)
eb := b.(*EnumType)
return NewEnumType(utils.Unique(append(ea.values, eb.values...)), ea.caseInsensitive || eb.caseInsensitive)
}
case *scStringType:
switch b.(type) {
case *scStringType:
as := a.(*scStringType)
bs := b.(*scStringType)
return NewStringType(commonType(as.Size(), bs.Size()).(*IntegerType), ``)
case px.StringType, *EnumType:
return DefaultStringType()
}
case *vcStringType:
switch b.(type) {
case *vcStringType:
as := a.(*vcStringType)
bs := b.(*vcStringType)
return NewEnumType([]string{as.value, bs.value}, false)
case px.StringType:
return DefaultStringType()
case *EnumType:
return commonType(b, a)
}
}
// Deal with mergable types same type
if reflect.TypeOf(a) == reflect.TypeOf(b) {
switch a.(type) {
case *ArrayType:
aa := a.(*ArrayType)
ba := b.(*ArrayType)
return NewArrayType(commonType(aa.typ, ba.typ), commonType(aa.size, ba.size).(*IntegerType))
case *FloatType:
af := a.(*FloatType)
bf := b.(*FloatType)
return NewFloatType(math.Min(af.min, bf.min), math.Max(af.max, bf.max))
case *IntegerType:
ai := a.(*IntegerType)
bi := b.(*IntegerType)
min := ai.min
if bi.min < min {
min = bi.min
}
max := ai.max
if bi.max > max {
max = bi.max
}
return NewIntegerType(min, max)
case *IterableType:
an := a.(*IterableType)
bn := b.(*IterableType)
return NewIterableType(commonType(an.ElementType(), bn.ElementType()))
case *IteratorType:
an := a.(*IteratorType)
bn := b.(*IteratorType)
return NewIteratorType(commonType(an.ElementType(), bn.ElementType()))
case *NotUndefType:
an := a.(*NotUndefType)
bn := b.(*NotUndefType)
return NewNotUndefType(commonType(an.ContainedType(), bn.ContainedType()))
case *PatternType:
ap := a.(*PatternType)
bp := b.(*PatternType)
return NewPatternType(UniqueRegexps(append(ap.regexps, bp.regexps...)))
case *RuntimeType:
ar := a.(*RuntimeType)
br := b.(*RuntimeType)
if ar.runtime == br.runtime {
return NewRuntimeType(ar.runtime, ``, nil)
}
return DefaultRuntimeType()
case *TupleType:
at := a.(*TupleType)
bt := b.(*TupleType)
return NewArrayType(commonType(at.CommonElementType(), bt.CommonElementType()), commonType(at.Size(), bt.Size()).(*IntegerType))
case *TypeType:
at := a.(*TypeType)
bt := b.(*TypeType)
return NewTypeType(commonType(at.ContainedType(), bt.ContainedType()))
case *VariantType:
ap := a.(*VariantType)
bp := b.(*VariantType)
return NewVariantType(UniqueTypes(append(ap.Types(), bp.Types()...))...)
}
}
if isCommonNumeric(a, b) {
return numericTypeDefault
}
if isCommonScalarData(a, b) {
return scalarDataTypeDefault
}
if isCommonScalar(a, b) {
return scalarTypeDefault
}
if isCommonData(a, b) {
return dataTypeDefault
}
if isCommonRichData(a, b) {
return richDataTypeDefault
}
return anyTypeDefault
}
func isCommonNumeric(a px.Type, b px.Type) bool {
return isAssignable(numericTypeDefault, a) && isAssignable(numericTypeDefault, b)
}
func isCommonScalarData(a px.Type, b px.Type) bool {
return isAssignable(scalarDataTypeDefault, a) && isAssignable(scalarDataTypeDefault, b)
}
func isCommonScalar(a px.Type, b px.Type) bool {
return isAssignable(scalarTypeDefault, a) && isAssignable(scalarTypeDefault, b)
}
func isCommonData(a px.Type, b px.Type) bool {
return isAssignable(dataTypeDefault, a) && isAssignable(dataTypeDefault, b)
}
func isCommonRichData(a px.Type, b px.Type) bool {
return isAssignable(richDataTypeDefault, a) && isAssignable(richDataTypeDefault, b)
}
func init() {
px.CommonType = commonType
} | types/commonality.go | 0.6705 | 0.528229 | commonality.go | starcoder |
package vclock
// Vector clock is logically a list of tuple [(pid1, tick1), (pid2, tick2), ...]
// where pid is the process id and tick is the clock value.
type VClock map[string]uint64
type Relation uint8
const (
Equal = iota
Ancestor
Descendant
Conflict
)
// Advance tick for the given process id by 1
func (v VClock) Tick(pid string) {
v[pid] = v[pid] + 1
}
// Get the tick for the given process id, return 0 if pid doesn't exist
func (v VClock) Get(pid string) uint64 {
return v[pid]
}
// Set the tick for the given process id
func (v VClock) Set(pid string, tick uint64) {
if (tick == 0) {
panic("Tick must be positive value")
}
v[pid] = tick
}
// Copy this vector clock
func (v VClock) Copy() VClock {
res := make(VClock, len(v))
for pid := range v {
res[pid] = v[pid]
}
return res
}
// Merge this vector clock with other vector clock, return the resulting vector
// clock
func (v VClock) Merge(other VClock) VClock {
res := v.Copy()
for pid := range other {
if res[pid] < other[pid] {
res[pid] = other[pid]
}
}
return res
}
// Whether this vector clock is equal to the given vector clock
func (v VClock) Equal(other VClock) bool {
if len(other) != len(v) {
return false
}
for pid := range other {
if v[pid] != other[pid] {
return false
}
}
return true
}
// Whether this vector clock is descendant of the given vector clock
func (v VClock) Descendant(other VClock) bool {
// v is descendant of other iff:
// - all elements in other is less than or equal than v, and
// - v != other
isEqual := (len(other) == len(v))
for pid := range other {
if other[pid] > v[pid] {
return false
} else if isEqual && other[pid] < v[pid] {
isEqual = false
}
}
return !isEqual
}
// Relation of this vector clock to the given vector clock
func (v VClock) Relation(other VClock) Relation {
if (v.Equal(other)) {
return Equal
} else if (other.Descendant(v)) {
return Ancestor
} else if (v.Descendant(other)) {
return Descendant
} else {
return Conflict
}
} | vector_clock.go | 0.781664 | 0.515681 | vector_clock.go | starcoder |
package index
import (
"image/color"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/stat"
"gonum.org/v1/plot"
"gonum.org/v1/plot/plotter"
"gonum.org/v1/plot/plotutil"
"gonum.org/v1/plot/vg"
)
/*
Genplot takes an index and plots its keys, CDF, its approximation, and writes a plot.png file in assets/folder
*/
func Genplot(index *LearnedIndex, indexedCol []float64, plotfilepath string, roundedError bool) {
linearRegFn := func(x float64) float64 { return index.M.Predict(x)*float64(index.Len) - 1 }
idxFromCDF := func(i float64) float64 { return stat.CDF(i, stat.Empirical, index.ST.Keys, nil)*float64(index.Len) - 1 }
p, _ := plot.New()
p.Title.Text = "Learned Index RMI"
p.X.Label.Text = "Age"
p.Y.Label.Text = "Index"
courbeKeys := plotter.XYs{}
for i, k := range index.ST.Keys {
courbeKeys = append(courbeKeys, plotter.XY{X: k, Y: float64(i)})
}
approxFn := plotter.NewFunction(linearRegFn)
approxFn.Dashes = []vg.Length{vg.Points(2), vg.Points(2)}
approxFn.Width = vg.Points(2)
approxFn.Color = color.RGBA{G: 255, A: 255}
maxErrFn := plotter.NewFunction(func(x float64) float64 { _, _, upper := index.GuessIndex(x); return float64(upper) })
if roundedError {
maxErrFn = plotter.NewFunction(func(x float64) float64 { return float64(index.MaxErrBound) + index.M.Predict(x)*float64(index.Len) - 1 })
}
maxErrFn.Dashes = []vg.Length{vg.Points(4), vg.Points(5)}
maxErrFn.Width = vg.Points(1)
maxErrFn.Color = plotutil.SoftColors[2]
p.Add(maxErrFn)
p.Legend.Add("upper bound", maxErrFn)
minErrFn := plotter.NewFunction(func(x float64) float64 { _, lower, _ := index.GuessIndex(x); return float64(lower) })
if roundedError {
minErrFn = plotter.NewFunction(func(x float64) float64 { return float64(index.MinErrBound) + index.M.Predict(x)*float64(index.Len) - 1 })
}
minErrFn.Dashes = []vg.Length{vg.Points(4), vg.Points(5)}
minErrFn.Width = vg.Points(1)
minErrFn.Color = plotutil.SoftColors[4]
p.Add(minErrFn)
p.Legend.Add("lower bound", minErrFn)
cdfFn := plotter.NewFunction(idxFromCDF)
cdfFn.Width = vg.Points(1)
cdfFn.Color = color.RGBA{A: 255, B: 255}
plotutil.AddLinePoints(p, "Keys", courbeKeys)
p.Add(approxFn)
p.Legend.Add("Approx (lr)", approxFn)
p.Add(cdfFn)
p.Legend.Add("CDF", cdfFn)
p.X.Min = 0
p.X.Max = floats.Max(index.ST.Keys)
p.Y.Min = -float64(index.Len) / 10
p.Y.Max = float64(index.Len) * 1.5
p.Add(plotter.NewGrid())
p.Save(4*vg.Inch, 4*vg.Inch, plotfilepath)
} | index/genplot.go | 0.695131 | 0.403626 | genplot.go | starcoder |
package iso20022
// Specifies a collection of monetary totals for this settlement.
type SettlementMonetarySummation1 struct {
// Monetary value of the line amount total being reported for this settlement.
LineTotalAmount []*CurrencyAndAmount `xml:"LineTtlAmt,omitempty"`
// Monetary value of the allowance total being reported for this settlement.
AllowanceTotalAmount []*CurrencyAndAmount `xml:"AllwncTtlAmt,omitempty"`
// Monetary value of the total discount being reported for this settlement.
TotalDiscountAmount []*CurrencyAndAmount `xml:"TtlDscntAmt,omitempty"`
// Monetary value of the charge amount total being reported for this settlement.
ChargeTotalAmount []*CurrencyAndAmount `xml:"ChrgTtlAmt,omitempty"`
// Monetary value of the total prepaid amount being reported for this settlement.
TotalPrepaidAmount []*CurrencyAndAmount `xml:"TtlPrepdAmt,omitempty"`
// Monetary value of the total of all tax basis amounts being reported for this settlement.
TaxTotalAmount []*CurrencyAndAmount `xml:"TaxTtlAmt,omitempty"`
// Monetary value of the total of all tax basis amounts being reported for this settlement.
TaxBasisAmount []*CurrencyAndAmount `xml:"TaxBsisAmt,omitempty"`
// Monetary value of a rounding amount being applied and reported for this settlement.
RoundingAmount []*CurrencyAndAmount `xml:"RndgAmt,omitempty"`
// Monetary value of the grand total being reported for this settlement, to include addition and subtraction of individual summation amounts.
GrandTotalAmount []*CurrencyAndAmount `xml:"GrdTtlAmt,omitempty"`
// Monetary value of an amount being reported as information for this settlement.
InformationAmount []*CurrencyAndAmount `xml:"InfAmt,omitempty"`
}
func (s *SettlementMonetarySummation1) AddLineTotalAmount(value, currency string) {
s.LineTotalAmount = append(s.LineTotalAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddAllowanceTotalAmount(value, currency string) {
s.AllowanceTotalAmount = append(s.AllowanceTotalAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddTotalDiscountAmount(value, currency string) {
s.TotalDiscountAmount = append(s.TotalDiscountAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddChargeTotalAmount(value, currency string) {
s.ChargeTotalAmount = append(s.ChargeTotalAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddTotalPrepaidAmount(value, currency string) {
s.TotalPrepaidAmount = append(s.TotalPrepaidAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddTaxTotalAmount(value, currency string) {
s.TaxTotalAmount = append(s.TaxTotalAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddTaxBasisAmount(value, currency string) {
s.TaxBasisAmount = append(s.TaxBasisAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddRoundingAmount(value, currency string) {
s.RoundingAmount = append(s.RoundingAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddGrandTotalAmount(value, currency string) {
s.GrandTotalAmount = append(s.GrandTotalAmount, NewCurrencyAndAmount(value, currency))
}
func (s *SettlementMonetarySummation1) AddInformationAmount(value, currency string) {
s.InformationAmount = append(s.InformationAmount, NewCurrencyAndAmount(value, currency))
} | SettlementMonetarySummation1.go | 0.786254 | 0.447581 | SettlementMonetarySummation1.go | starcoder |
2D Spirals
https://math.stackexchange.com/questions/175106/distance-between-point-and-a-spiral
*/
//-----------------------------------------------------------------------------
package sdf
import (
"errors"
"math"
)
//-----------------------------------------------------------------------------
// polarDist2 returns the distance squared between two polar points.
func polarDist2(p0, p1 P2) float64 {
return (p0.R * p0.R) + (p1.R * p1.R) - 2.0*p0.R*p1.R*math.Cos(p0.Theta-p1.Theta)
}
//-----------------------------------------------------------------------------
// arcSpiral is an archimedean spiral.
type arcSpiral struct {
a, n, k float64 // r = a * pow(theta, 1/n) + k
}
// radius returns the radius for a given theta.
func (s *arcSpiral) radius(theta float64) float64 {
var r float64
if s.a == 0 {
r = s.k
} else {
if s.n == 1.0 {
r = s.a*theta + s.k
} else {
r = math.Pow(theta, 1.0/s.n) + s.k
}
}
return r
}
// theta returns the theta(s) for a given radius.
func (s *arcSpiral) theta(radius float64) ([]float64, error) {
if s.a == 0 {
if s.k == radius {
// infinite solutions
return nil, errors.New("inf")
}
// no solutions
return nil, nil
}
if s.n == 1.0 {
return []float64{(radius - s.k) / s.a}, nil
}
return []float64{math.Exp(s.n * math.Log((radius-s.k)/s.a))}, nil
}
//-----------------------------------------------------------------------------
// ArcSpiralSDF2 is a 2d Archimedean spiral.
type ArcSpiralSDF2 struct {
spiral arcSpiral
d float64 // offset distance
start, end P2 // start/end positions
bb Box2
}
// ArcSpiral2D returns a 2d Archimedean spiral (r = m*theta + b).
func ArcSpiral2D(
a, k float64, // r = m*theta + b
start, end float64, // start/end angle (radians)
d float64, // offset distance
) (SDF2, error) {
// sanity checking
if start == end {
return nil, errors.New("start == end")
}
if a == 0 {
return nil, errors.New("a == 0")
}
s := ArcSpiralSDF2{
spiral: arcSpiral{a, 1.0, k},
d: d,
}
// start and end points
if start > end {
start, end = end, start
}
s.start = P2{s.spiral.radius(start), start}
s.end = P2{s.spiral.radius(end), end}
// bounding box
rMax := math.Max(math.Abs(s.spiral.radius(start)), math.Abs(s.spiral.radius(end))) + d
s.bb = Box2{V2{-rMax, -rMax}, V2{rMax, rMax}}
return &s, nil
}
// Evaluate returns the minimum distance to a 2d Archimedean spiral.
func (s *ArcSpiralSDF2) Evaluate(p V2) float64 {
pp := p.CartesianToPolar()
// end points
d2 := math.Min(polarDist2(pp, s.start), polarDist2(pp, s.end))
thetas, err := s.spiral.theta(pp.R)
if err == nil {
for _, theta := range thetas {
n := math.Round((pp.Theta - theta) / Tau)
theta = pp.Theta - (Tau * n)
if theta >= s.start.Theta && theta <= s.end.Theta {
d2 = math.Min(d2, polarDist2(pp, P2{s.spiral.radius(theta), theta}))
} else {
if theta < s.start.Theta {
for theta < s.start.Theta {
theta += Tau
}
if theta < s.end.Theta {
d2 = math.Min(d2, polarDist2(pp, P2{s.spiral.radius(theta), theta}))
}
}
if theta > s.end.Theta {
for theta > s.end.Theta {
theta -= Tau
}
if theta > s.start.Theta {
d2 = math.Min(d2, polarDist2(pp, P2{s.spiral.radius(theta), theta}))
}
}
}
}
}
return math.Sqrt(d2) - s.d
}
// BoundingBox returns the bounding box of a 2d Archimedean spiral.
func (s *ArcSpiralSDF2) BoundingBox() Box2 {
return s.bb
}
//----------------------------------------------------------------------------- | sdf/spiral.go | 0.895816 | 0.632645 | spiral.go | starcoder |
package year2021
import (
"strconv"
"strings"
"github.com/lanphiergm/adventofcodego/internal/utils"
)
// Seven Segment Search Part 1 computes the number of 1s, 4s, 7s, and 8s that
// appear in the output
func SevenSegmentSearchPart1(filename string) interface{} {
notes := utils.ReadStrings(filename)
counter := 0
for _, entry := range notes {
_, output := parseEntry(entry)
for _, val := range output {
if len(val) == 2 || len(val) == 3 || len(val) == 4 || len(val) == 7 {
counter++
}
}
}
return counter
}
// Seven Segment Search Part 2 computes the sum of all output values
func SevenSegmentSearchPart2(filename string) interface{} {
notes := utils.ReadStrings(filename)
sum := 0
for _, entry := range notes {
sum += decodeEntry(entry)
}
return sum
}
func parseEntry(entry string) ([]string, []string) {
entrySplit := strings.Split(entry, " | ")
input := strings.Split(entrySplit[0], " ")
output := strings.Split(entrySplit[1], " ")
return input, output
}
func decodeEntry(entry string) int {
var digits [10]string
var len5 []string
var len6 []string
input, output := parseEntry(entry)
for _, pattern := range input {
switch len(pattern) {
case 2:
digits[1] = pattern
case 3:
digits[7] = pattern
case 4:
digits[4] = pattern
case 5:
len5 = append(len5, pattern)
case 6:
len6 = append(len6, pattern)
case 7:
digits[8] = pattern
}
}
// Find 3 as the only len5 that contains 1
for i, pattern := range len5 {
if isSubset(pattern, digits[1]) {
digits[3] = pattern
len5 = utils.RemoveStr(len5, i)
break
}
}
// Find 9 as the only len6 that contains 3
for i, pattern := range len6 {
if isSubset(pattern, digits[3]) {
digits[9] = pattern
len6 = utils.RemoveStr(len6, i)
break
}
}
// Find 0 as the only remaining len6 that contains 1
for i, pattern := range len6 {
if isSubset(pattern, digits[1]) {
digits[0] = pattern
len6 = utils.RemoveStr(len6, i)
break
}
}
// 6 is the only len6 left
digits[6] = len6[0]
// Find 5 as the only len5 that is a subset of 9
for i, pattern := range len5 {
if isSubset(digits[9], pattern) {
digits[5] = pattern
len5 = utils.RemoveStr(len5, i)
break
}
}
// 2 is the only len5 left
digits[2] = len5[0]
outputStr := ""
for _, pattern := range output {
outputStr += findDigit(pattern, digits)
}
return utils.Atoi(outputStr)
}
func isSubset(set string, subset string) bool {
for _, r := range subset {
if !strings.ContainsRune(set, r) {
return false
}
}
return true
}
func areDigitsEqual(a string, b string) bool {
return len(a) == len(b) && isSubset(a, b)
}
func findDigit(pattern string, digits [10]string) string {
for i, digit := range digits {
if areDigitsEqual(pattern, digit) {
return strconv.Itoa(i)
}
}
return ""
} | internal/puzzles/year2021/day_08_seven_segment_search.go | 0.68637 | 0.491151 | day_08_seven_segment_search.go | starcoder |
package math32
// Box2 represents a 2D bounding box defined by two points:
// the point with minimum coordinates and the point with maximum coordinates.
type Box2 struct {
min Vector2
max Vector2
}
// NewBox2 creates and returns a pointer to a new Box2 defined
// by its minimum and maximum coordinates.
func NewBox2(min, max *Vector2) *Box2 {
b := new(Box2)
b.Set(min, max)
return b
}
// Set sets this bounding box minimum and maximum coordinates.
// Returns pointer to this updated bounding box.
func (b *Box2) Set(min, max *Vector2) *Box2 {
if min != nil {
b.min = *min
} else {
b.min.Set(Infinity, Infinity)
}
if max != nil {
b.max = *max
} else {
b.max.Set(-Infinity, -Infinity)
}
return b
}
// SetFromPoints set this bounding box from the specified array of points.
// Returns pointer to this updated bounding box.
func (b *Box2) SetFromPoints(points []*Vector2) *Box2 {
b.MakeEmpty()
for i := 0; i < len(points); i++ {
b.ExpandByPoint(points[i])
}
return b
}
// SetFromCenterAndSize set this bounding box from a center point and size.
// Size is a vector from the minimum point to the maximum point.
// Returns pointer to this updated bounding box.
func (b *Box2) SetFromCenterAndSize(center, size *Vector2) *Box2 {
var v1 Vector2
halfSize := v1.Copy(size).MultiplyScalar(0.5)
b.min.Copy(center).Sub(halfSize)
b.max.Copy(center).Add(halfSize)
return b
}
// Copy copy other to this bounding box.
// Returns pointer to this updated bounding box.
func (b *Box2) Copy(box *Box2) *Box2 {
b.min = box.min
b.max = box.max
return b
}
// MakeEmpty set this bounding box to empty.
// Returns pointer to this updated bounding box.
func (b *Box2) MakeEmpty() *Box2 {
b.min.X = Infinity
b.min.Y = Infinity
b.max.X = -Infinity
b.max.Y = -Infinity
return b
}
// Empty returns if this bounding box is empty.
func (b *Box2) Empty() bool {
return (b.max.X < b.min.X) || (b.max.Y < b.min.Y)
}
// Center calculates the center point of this bounding box and
// stores its pointer to optionalTarget, if not nil, and also returns it.
func (b *Box2) Center(optionalTarget *Vector2) *Vector2 {
var result *Vector2
if optionalTarget == nil {
result = NewVector2(0, 0)
} else {
result = optionalTarget
}
return result.AddVectors(&b.min, &b.max).MultiplyScalar(0.5)
}
// Size calculates the size of this bounding box: the vector from
// its minimum point to its maximum point.
// Store pointer to the calculated size into optionalTarget, if not nil,
// and also returns it.
func (b *Box2) Size(optionalTarget *Vector2) *Vector2 {
var result *Vector2
if optionalTarget == nil {
result = NewVector2(0, 0)
} else {
result = optionalTarget
}
return result.SubVectors(&b.min, &b.max)
}
// ExpandByPoint may expand this bounding box to include the specified point.
// Returns pointer to this updated bounding box.
func (b *Box2) ExpandByPoint(point *Vector2) *Box2 {
b.min.Min(point)
b.max.Max(point)
return b
}
// ExpandByVector expands this bounding box by the specified vector.
// Returns pointer to this updated bounding box.
func (b *Box2) ExpandByVector(vector *Vector2) *Box2 {
b.min.Sub(vector)
b.max.Add(vector)
return b
}
// ExpandByScalar expands this bounding box by the specified scalar.
// Returns pointer to this updated bounding box.
func (b *Box2) ExpandByScalar(scalar float32) *Box2 {
b.min.AddScalar(-scalar)
b.max.AddScalar(scalar)
return b
}
// ContainsPoint returns if this bounding box contains the specified point.
func (b *Box2) ContainsPoint(point *Vector2) bool {
if point.X < b.min.X || point.X > b.max.X ||
point.Y < b.min.Y || point.Y > b.max.Y {
return false
}
return true
}
// ContainsBox returns if this bounding box contains other box.
func (b *Box2) ContainsBox(other *Box2) bool {
if (b.min.X <= other.min.X) && (other.max.X <= b.max.X) &&
(b.min.Y <= other.min.Y) && (other.max.Y <= b.max.Y) {
return true
}
return false
}
// IsIntersectionBox returns if other box intersects this one.
func (b *Box2) IsIntersectionBox(other *Box2) bool {
// using 6 splitting planes to rule out intersections.
if other.max.X < b.min.X || other.min.X > b.max.X ||
other.max.Y < b.min.Y || other.min.Y > b.max.Y {
return false
}
return true
}
// ClampPoint calculates a new point which is the specified point clamped inside this box.
// Stores the pointer to this new point into optionaTarget, if not nil, and also returns it.
func (b *Box2) ClampPoint(point *Vector2, optionalTarget *Vector2) *Vector2 {
var result *Vector2
if optionalTarget == nil {
result = NewVector2(0, 0)
} else {
result = optionalTarget
}
return result.Copy(point).Clamp(&b.min, &b.max)
}
// DistanceToPoint returns the distance from this box to the specified point.
func (b *Box2) DistanceToPoint(point *Vector2) float32 {
v1 := NewVector2(0, 0)
clampedPoint := v1.Copy(point).Clamp(&b.min, &b.max)
return clampedPoint.Sub(point).Length()
}
// Intersect sets this box to the intersection with other box.
// Returns pointer to this updated bounding box.
func (b *Box2) Intersect(other *Box2) *Box2 {
b.min.Max(&other.min)
b.max.Min(&other.max)
return b
}
// Union set this box to the union with other box.
// Returns pointer to this updated bounding box.
func (b *Box2) Union(other *Box2) *Box2 {
b.min.Min(&other.min)
b.max.Max(&other.max)
return b
}
// Translate translates the position of this box by offset.
// Returns pointer to this updated box.
func (b *Box2) Translate(offset *Vector2) *Box2 {
b.min.Add(offset)
b.max.Add(offset)
return b
}
// Equals returns if this box is equal to other
func (b *Box2) Equals(other *Box2) bool {
return other.min.Equals(&b.min) && other.max.Equals(&b.max)
} | math32/box2.go | 0.9244 | 0.516413 | box2.go | starcoder |
package renderer
import (
"image/color"
"math"
"github.com/9600org/cubebit"
)
// Object is a thing to be rendered.
type Object interface {
// At returns the colour at the given point in space.
At(x, y, z float64) color.RGBA
}
// Sphere represents a sphere to be rendered
type Sphere struct {
// CentreX, CentreY, and CentreZ specify the centre of the sphere in space.
// The visible space is in the range [0..1]
CentreX, CentreY, CentreZ float64
// Radius is the radius of the sphere.
Radius float64
// CentreColour is the colour of the sphere at its centre.
CentreColour color.RGBA
// EdgeColour is the colour of the sphere at its edge.
EdgeColour color.RGBA
}
// Renderer is a *very* simple renderer for objects on the Cube:Bit volume.
type Renderer struct {
c *cubebit.Cubebit
objects []Object
}
// New creates a new Renderer.
func New(c *cubebit.Cubebit) *Renderer {
return &Renderer{c: c}
}
// Add adds an object to be rendered.
func (r *Renderer) Add(o Object) {
r.objects = append(r.objects, o)
}
// Render renders the objects onto the Cube:Bit LEDs.
func (r *Renderer) Render() {
sx, sz, sy := r.c.Bounds()
sxf := float64(sx - 1)
syf := float64(sy - 1)
szf := float64(sz - 1)
for z := 0; z < sz; z++ {
for y := 0; y < sy; y++ {
for x := 0; x < sx; x++ {
var lr, lg, lb, _ uint32
n := uint32(0)
for _, o := range r.objects {
r, g, b, _ := o.At(float64(x)/sxf, float64(y)/syf, float64(z)/szf).RGBA()
lr += r >> 8
lg += g >> 8
lb += b >> 8
n++
}
r.c.Set(x, y, z, color.RGBA{uint8(lr/n), uint8(lg/n), uint8(lb/n), 255})
}
}
}
r.c.Render()
}
// blend returns a colour between a and b, according to the ratio specified.
func blend(a, b color.Color, blend float64) color.RGBA {
inv := float64(0) // float64(1)-blend
ar, ag, ab, aa := a.RGBA()
br, bg, bb, ba := b.RGBA()
or, og, ob, _ := uint32(float64(ar)*blend+float64(br)*inv),
uint32(float64(ag)*blend+float64(bg)*inv),
uint32(float64(ab)*blend+float64(bb)*inv),
uint32(float64(aa)*blend+float64(ba)*inv)
return color.RGBA{uint8(or >> 8), uint8(og >> 8), uint8(ob >> 8), 255} //uint8(oa>>8)}
}
// At implements Object.At.
func (s *Sphere) At(x, y, z float64) color.RGBA {
dx := (s.CentreX - x)
dy := (s.CentreY - y)
dz := (s.CentreZ - z)
dist := math.Sqrt(dx*dx + dy*dy + dz*dz)
if dist > s.Radius {
return color.RGBA{0, 0, 0, 0}
}
return blend(s.CentreColour, s.EdgeColour, float64(1)-((dist*dist)/s.Radius))
} | renderer/render.go | 0.840423 | 0.460895 | render.go | starcoder |
package model
import (
"fmt"
"github.com/reserve-trust/grule-rule-engine/pkg"
"reflect"
"strings"
)
// ValueNode is an abstraction layer to access underlying dom style data.
// the node have tree kind of structure which each node are tied to an underlying data node.
type ValueNode interface {
IdentifiedAs() string
Value() reflect.Value
HasParent() bool
Parent() ValueNode
ContinueWithValue(value reflect.Value, identifiedAs string) ValueNode
GetValue() (reflect.Value, error)
GetType() (reflect.Type, error)
IsArray() bool
GetArrayType() (reflect.Type, error)
GetArrayValueAt(index int) (reflect.Value, error)
GetChildNodeByIndex(index int) (ValueNode, error)
SetArrayValueAt(index int, value reflect.Value) error
AppendValue(value []reflect.Value) error
Length() (int, error)
IsMap() bool
GetMapValueAt(index reflect.Value) (reflect.Value, error)
SetMapValueAt(index, newValue reflect.Value) error
GetChildNodeBySelector(index reflect.Value) (ValueNode, error)
IsObject() bool
GetObjectValueByField(field string) (reflect.Value, error)
GetObjectTypeByField(field string) (reflect.Type, error)
SetObjectValueByField(field string, newValue reflect.Value) error
CallFunction(funcName string, args ...reflect.Value) (reflect.Value, error)
GetChildNodeByField(field string) (ValueNode, error)
IsTime() bool
IsInteger() bool
IsReal() bool
IsBool() bool
IsString() bool
}
// StrLen is return the string length value
func StrLen(str string, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(nil), fmt.Errorf("function Len requires no argument")
}
i := len(str)
return reflect.ValueOf(i), nil
}
// StrCompare is like strings.compare() function, to be called by the ValueNode function call if the underlying data is string.
func StrCompare(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Compare requires 1 string argument")
}
i := strings.Compare(str, arg[0].String())
return reflect.ValueOf(i), nil
}
// StrContains is like strings.Contains() function, to be called by the ValueNode function call if the underlying data is string. is like strings.compare() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrContains(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Contains requires 1 string argument")
}
i := strings.Contains(str, arg[0].String())
return reflect.ValueOf(i), nil
}
// StrCount is like strings.Count() function, to be called by the ValueNode function call if the underlying data is string.
func StrCount(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Count requires 1 string argument")
}
i := strings.Count(str, arg[0].String())
return reflect.ValueOf(i), nil
}
// StrHasPrefix is like strings.HasPrefix() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrHasPrefix(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function HasPrefix requires 1 string argument")
}
b := strings.HasPrefix(str, arg[0].String())
return reflect.ValueOf(b), nil
}
// StrHasSuffix is like strings.HasSuffix() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrHasSuffix(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function HasSuffix requires 1 string argument")
}
b := strings.HasSuffix(str, arg[0].String())
return reflect.ValueOf(b), nil
}
// StrIndex is like strings.Index() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrIndex(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Index requires 1 string argument")
}
b := strings.Index(str, arg[0].String())
return reflect.ValueOf(b), nil
}
// StrLastIndex is like strings.LastIndex() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrLastIndex(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function LastIndex requires 1 string argument")
}
b := strings.LastIndex(str, arg[0].String())
return reflect.ValueOf(b), nil
}
// StrRepeat is like strings.Repeat() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrRepeat(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 {
return reflect.ValueOf(nil), fmt.Errorf("function Repeat requires 1 numeric argument")
}
repeat := 0
switch pkg.GetBaseKind(arg[0]) {
case reflect.Int64:
repeat = int(arg[0].Int())
case reflect.Uint64:
repeat = int(arg[0].Uint())
case reflect.Float64:
repeat = int(arg[0].Float())
default:
return reflect.ValueOf(nil), fmt.Errorf("function Repeat requires 1 numeric argument")
}
b := strings.Repeat(str, repeat)
return reflect.ValueOf(b), nil
}
// StrReplace is like strings.Replace() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrReplace(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 2 || arg[0].Kind() != reflect.String || arg[1].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Cmpare requires 2 string argument")
}
b := strings.ReplaceAll(str, arg[0].String(), arg[1].String())
return reflect.ValueOf(b), nil
}
// StrSplit is like strings.Split() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrSplit(str string, arg []reflect.Value) (reflect.Value, error) {
if arg == nil || len(arg) != 1 || arg[0].Kind() != reflect.String {
return reflect.ValueOf(nil), fmt.Errorf("function Split requires 1 string argument")
}
b := strings.Split(str, arg[0].String())
return reflect.ValueOf(b), nil
}
// StrToLower is like strings.ToLower() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrToLower(str string, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(nil), fmt.Errorf("function ToLower requires no argument")
}
b := strings.ToLower(str)
return reflect.ValueOf(b), nil
}
// StrToUpper is like strings.ToUpper() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrToUpper(str string, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(nil), fmt.Errorf("function ToUpper requires no argument")
}
b := strings.ToUpper(str)
return reflect.ValueOf(b), nil
}
// StrTrim is like strings.Trim() function, to be called by the ValueNode functioncall if the underlying data is string.
func StrTrim(str string, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(nil), fmt.Errorf("function Trim requires no argument")
}
b := strings.TrimSpace(str)
return reflect.ValueOf(b), nil
}
// StrIn will check the string instance if its equals one of the arguments, if no argument specified it will return false
func StrIn(str string, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(false), nil
}
for _, a := range arg {
if !a.IsNil() && a.IsValid() && a.Kind() == reflect.String {
if a.String() == str {
return reflect.ValueOf(true), nil
}
}
}
return reflect.ValueOf(false), nil
}
// ArrMapLen will return the size of underlying map, array or slice
func ArrMapLen(arr reflect.Value, arg []reflect.Value) (reflect.Value, error) {
if arg != nil && len(arg) != 0 {
return reflect.ValueOf(nil), fmt.Errorf("function Len requires no argument")
}
return reflect.ValueOf(arr.Len()), nil
} | model/DataAccessLayer.go | 0.683736 | 0.458409 | DataAccessLayer.go | starcoder |
package evaluator
import (
"fmt"
"yokan/ast"
"yokan/object"
)
func Eval(node ast.Node, env *object.Environment) object.Object {
switch node := node.(type) {
case *ast.Program:
return evalStatements(node.Statements, env)
case *ast.ExpressionStatement:
return Eval(node.Expression, env)
case *ast.Assign:
return evalAssign(*node, env)
case *ast.FunctionLiteral:
return &object.Function{Parameters: node.Arguments, Body: node.Body, Env: env}
case *ast.FunctionCalling:
function := Eval(node.Function, env)
if isError(function) { return function }
args := evalExpressions(node.Arguments, env)
if len(args) == 1 && isError(args[0]) { return args[0] }
return applyFunction(function, args)
case *ast.PrefixExpression:
right := Eval(node.Right, env)
if isError(right) { return right }
return evalPrefixExpression(node.Operator, right)
case *ast.InfixExpression:
left := Eval(node.Left, env)
if isError(left) { return left }
right := Eval(node.Right, env)
if isError(right) { return right }
return evalInfixExpression(left, node.Operator, right)
case *ast.Identifier:
name := node.Name
val, ok := env.Get(name)
if !ok { return &object.OtherError{Msg: fmt.Sprintf("%s is unbouded variable", name)} }
return val
case *ast.IntegerLiteral:
return &object.Integer{Value: node.Value}
case *ast.StringLiteral:
return &object.String{Value: node.Value}
}
return &object.OtherError{Msg: fmt.Sprintf("%T is not yet implemented", node)}
}
func evalStatements(stmts []ast.Statement, env *object.Environment) object.Object {
var result object.Object = &object.Null{ }
for _, stmt := range stmts {
result = Eval(stmt, env)
if isError(result) {
return result
}
}
return result
}
func evalAssign(assign ast.Assign, env *object.Environment) object.Object {
val := Eval(assign.Value, env)
if isError(val) { return val }
env.Set(assign.Name.Name, val)
return &object.ReturnValueOsStatement{ }
}
func evalExpressions(exps []ast.Expression, env *object.Environment) []object.Object {
var result []object.Object
for _, e := range exps {
evaled := Eval(e, env)
if isError(evaled) { return []object.Object{evaled} }
result = append(result, evaled)
}
return result
}
func applyFunction(fn object.Object, args []object.Object) object.Object {
switch fn := fn.(type) {
case *object.Function:
if len(fn.Parameters) != len(args) {
return &object.OtherError {
Msg: fmt.Sprintf("Function need %d params, but got %d params", len(fn.Parameters), len(args)),
}
}
inheritEnv := inheritFunctionEnv(fn, args)
a := evalStatements(fn.Body, inheritEnv)
return a
case *object.Buildin:
return fn.Fn(args...)
default:
return &object.OtherError {
Msg: fmt.Sprintf("%s(%s) is not a function", fn.Type(), fn.String()),
}
}
}
func inheritFunctionEnv(fn *object.Function, args []object.Object) *object.Environment {
env := object.NewInferitEnvironment(fn.Env)
for paramIdx, param := range fn.Parameters {
env.Set(param.Name, args[paramIdx])
}
return env
}
func evalPrefixExpression(operator string, right object.Object) object.Object {
switch operator {
case "+":
return evalPlusPrefixOperatorExpression(right)
case "-":
return evalMinusPrefixOperatorExpression(right)
default:
return &object.OtherError{Msg: fmt.Sprintf("Invalid operator '%s' in PrefixExpression.", operator)}
}
}
func evalPlusPrefixOperatorExpression(right object.Object) object.Object {
err, ok := checkTypeIsInteger("PlusPrefixOperator", right)
if !ok { return err }
return right
}
func evalMinusPrefixOperatorExpression(right object.Object) object.Object {
err, ok := checkTypeIsInteger("MinuPrefixOperator", right)
if !ok { return err }
value := right.(*object.Integer).Value
return &object.Integer{Value: -value}
}
func evalInfixExpression(left object.Object, operator string, right object.Object) object.Object {
switch operator {
case "+":
return evalPlusInfixOperatorExpression(left, right)
case "-":
return evalMinusInfixOperatorExpression(left, right)
case "*":
return evalStarInfixOperatorExpression(left, right)
case "/":
return evalSlashInfixOperatorExpression(left, right)
case "==":
return evalEqInfixOperatorExpression(left, right)
case "!=":
return evalNotEqInfixOperatorExpression(left, right)
case "<":
return evalLTInfixOperatorExpression(left, right)
case "<=":
return evalLTEQInfixOperatorExpression(left, right)
case ">":
return evalGTInfixOperatorExpression(left, right)
case ">=":
return evalGTEQInfixOperatorExpression(left, right)
default:
return &object.OtherError{Msg: fmt.Sprintf("Invalid operator '%s' in InfixExpression.", operator)}
}
}
func evalPlusInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("PlusInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("PlusInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
return &object.Integer{Value: l+r}
}
func evalMinusInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("MinusInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("MinusInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
return &object.Integer{Value: l-r}
}
func evalStarInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("StarInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("StarInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
return &object.Integer{Value: l*r}
}
func evalSlashInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("SlashInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("SlashInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
if r==0 {
return &object.OtherError{Msg: "Zero division Error"}
}
return &object.Integer{Value: l/r}
}
func checkTypeIsInteger(name string, val object.Object) (object.Object, bool) {
if val.Type() != object.INTEGER_OBJ {
return &object.TypeMisMatchError{Name: name, Expected: object.INTEGER_OBJ, Got: val}, false
}
return nil, true
}
var comparableInEqInfixOperatorTypes = []object.ObjectType {
object.INTEGER_OBJ,
object.STRING_OBJ,
object.BOOLEAN_OBJ,
object.NULL_OBJ,
}
var comparableInEqInfixOperatorTypesName =
object.INTEGER_OBJ+", "+object.STRING_OBJ+", "+object.BOOLEAN_OBJ+", "+object.NULL_OBJ
func evalEqInfixOperatorExpression(left object.Object, right object.Object) object.Object {
if !contains(left.Type(), comparableInEqInfixOperatorTypes) {
return &object.TypeMisMatchError{
Name: "EqInfixOperator",
Expected: comparableInEqInfixOperatorTypesName,
Got: left,
}
}
if !contains(right.Type(), comparableInEqInfixOperatorTypes) {
return &object.TypeMisMatchError{
Name: "EqInfixOperator",
Expected: comparableInEqInfixOperatorTypesName,
Got: right,
}
}
if (
left.Type() == object.INTEGER_OBJ &&
right.Type() == object.INTEGER_OBJ &&
left.(*object.Integer).Value == right.(*object.Integer).Value ||
left.Type() == object.STRING_OBJ &&
right.Type() == object.STRING_OBJ &&
left.(*object.String).Value == right.(*object.String).Value ||
left.Type() == object.BOOLEAN_OBJ &&
right.Type() == object.BOOLEAN_OBJ &&
left.(*object.Boolean).Value == right.(*object.Boolean).Value ||
left.Type() == object.NULL_OBJ &&
right.Type() == object.NULL_OBJ ) {
return &object.Boolean{Value: true}
}
return &object.Boolean{Value: false}
}
func evalNotEqInfixOperatorExpression(left object.Object, right object.Object) object.Object {
return not(evalEqInfixOperatorExpression(left, right))
}
func evalLTInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("LTInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("LTInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
return &object.Boolean{Value: l<r}
}
func evalLTEQInfixOperatorExpression(left object.Object, right object.Object) object.Object {
{
err, ok := checkTypeIsInteger("LTInfixOperator", left)
if !ok { return err }
}
{
err, ok := checkTypeIsInteger("LTInfixOperator", right)
if !ok { return err }
}
l := left.(*object.Integer).Value
r := right.(*object.Integer).Value
return &object.Boolean{Value: l<=r}
}
func evalGTInfixOperatorExpression(left object.Object, right object.Object) object.Object {
return not(evalLTEQInfixOperatorExpression(left, right))
}
func evalGTEQInfixOperatorExpression(left object.Object, right object.Object) object.Object {
return not(evalLTInfixOperatorExpression(left, right))
}
func not(obj object.Object) object.Object {
if obj.Type() == object.BOOLEAN_OBJ {
return &object.Boolean{Value: !obj.(*object.Boolean).Value}
} else {
return obj
}
}
func contains(target object.ObjectType, types []object.ObjectType) bool {
ret := false
for _, t := range types {
ret = ret || target == t
}
return ret
}
func isError(obj object.Object) bool {
return obj.Type() == object.ERROR_OBJ
} | evaluator/evaluator.go | 0.542621 | 0.416797 | evaluator.go | starcoder |
package game
import "fmt"
type mark = int
const (
// None represents no marking on a square
None mark = iota
// X represents an 'X' marking on a square
X mark = iota
// O represents an 'O' marking on a square
O mark = iota
)
// GridRef indicates a square on the grid surface
type GridRef = int
const (
// TopLeft is the index of the top left square in the grid
TopLeft GridRef = iota
// TopMid is the index of the top middle square in the grid
TopMid GridRef = iota
// TopRight is the index of the top right square in the grid
TopRight GridRef = iota
// MidLeft is the index of the middle left square in the grid
MidLeft GridRef = iota
// Center is the index of the center square in the grid
Center GridRef = iota
// MidRight is the index of the middle right square in the grid
MidRight GridRef = iota
// BotLeft is the index of the bottom left square in the grid
BotLeft GridRef = iota
// BotMid is the index of the bottom middle square in the grid
BotMid GridRef = iota
// BotRight is the index of the bottom right square in the grid
BotRight GridRef = iota
)
// Game represents the state of the game world
type Game struct {
Grid [9]mark `json:"gameGrid"`
Next mark `json:"nextUp"`
Turn mark `json:"whoseTurn"`
Winner mark `json:"winner"`
Pretty string `json:"pretty"`
}
// New creates a pristine game board
func New() *Game {
g := &Game{Turn: X, Next: O}
g.pretty()
return g
}
// Mark takes a GridRef and marks it with the next marker
func (g *Game) Mark(square GridRef) error {
// If the square is already marked we change nothing
if g.Grid[square] != None {
return nil
}
g.Grid[square] = g.Turn
g.pretty()
if won(g.Grid) {
g.Winner = g.Turn
g.Turn = None
g.Next = None
return nil
}
t := g.Turn
g.Turn = g.Next
g.Next = t
return nil
}
// Won returns true if the board has a winner
func won(grid [9]mark) bool {
if grid[Center] > None {
if (grid[TopLeft] == grid[Center] && grid[BotRight] == grid[Center]) ||
(grid[TopRight] == grid[Center] && grid[BotLeft] == grid[Center]) ||
(grid[TopMid] == grid[Center] && grid[BotMid] == grid[Center]) ||
(grid[MidLeft] == grid[Center] && grid[MidRight] == grid[Center]) {
return true
}
}
if grid[TopMid] > None {
if grid[TopLeft] == grid[TopMid] && grid[TopRight] == grid[TopMid] {
return true
}
}
if grid[BotMid] > None {
if grid[BotLeft] == grid[BotMid] && grid[BotRight] == grid[BotMid] {
return true
}
}
if grid[MidLeft] > None {
if grid[TopLeft] == grid[MidLeft] && grid[BotLeft] == grid[MidLeft] {
return true
}
}
if grid[MidRight] > None {
if grid[TopRight] == grid[MidRight] && grid[BotRight] == grid[MidRight] {
return true
}
}
return false
}
func (g *Game) pretty() {
var chars [9]rune
for i, v := range g.Grid {
switch v {
case None:
chars[i] = ' '
case X:
chars[i] = 'X'
case O:
chars[i] = 'O'
}
}
g.Pretty = fmt.Sprintf("%c%c%c,%c%c%c,%c%c%c", chars[0], chars[1], chars[2], chars[3], chars[4], chars[5], chars[6], chars[7], chars[8])
}
// Valid returns the set of valid marks remaining
func (g *Game) Valid() map[GridRef]bool {
m := make(map[GridRef]bool)
for i, v := range g.Grid {
if v == None {
m[i] = true
}
}
return m
} | game/game.go | 0.679604 | 0.614857 | game.go | starcoder |
package sctp
import (
"github.com/pkg/errors"
)
/*
chunkHeartbeatAck represents an SCTP Chunk of type HEARTBEAT ACK
An endpoint should send this chunk to its peer endpoint as a response
to a HEARTBEAT chunk (see Section 8.3). A HEARTBEAT ACK is always
sent to the source IP address of the IP datagram containing the
HEARTBEAT chunk to which this ack is responding.
The parameter field contains a variable-length opaque data structure.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Type = 5 | Chunk Flags | Heartbeat Ack Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| |
| Heartbeat Information TLV (Variable-Length) |
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Defined as a variable-length parameter using the format described
in Section 3.2.1, i.e.:
Variable Parameters Status Type Value
-------------------------------------------------------------
Heartbeat Info Mandatory 1
*/
type chunkHeartbeatAck struct {
chunkHeader
params []param
}
func (h *chunkHeartbeatAck) unmarshal(raw []byte) error {
return errors.Errorf("Unimplemented")
}
func (h *chunkHeartbeatAck) marshal() ([]byte, error) {
if len(h.params) != 1 {
return nil, errors.Errorf("Heartbeat Ack must have one param")
}
switch h.params[0].(type) {
case *paramHeartbeatInfo:
// ParamHeartbeatInfo is valid
default:
return nil, errors.Errorf("Heartbeat Ack must have one param, and it should be a HeartbeatInfo")
}
out := make([]byte, 0)
for idx, p := range h.params {
pp, err := p.marshal()
if err != nil {
return nil, errors.Wrap(err, "Unable to marshal parameter for Heartbeat Ack")
}
out = append(out, pp...)
// Chunks (including Type, Length, and Value fields) are padded out
// by the sender with all zero bytes to be a multiple of 4 bytes
// long. This padding MUST NOT be more than 3 bytes in total. The
// Chunk Length value does not include terminating padding of the
// chunk. *However, it does include padding of any variable-length
// parameter except the last parameter in the chunk.* The receiver
// MUST ignore the padding.
if idx != len(h.params)-1 {
out = padByte(out, getPadding(len(pp)))
}
}
h.chunkHeader.typ = ctHeartbeatAck
h.chunkHeader.raw = out
return h.chunkHeader.marshal()
}
func (h *chunkHeartbeatAck) check() (abort bool, err error) {
return false, nil
} | trunk/3rdparty/srs-bench/vendor/github.com/pion/sctp/chunk_heartbeat_ack.go | 0.73914 | 0.49646 | chunk_heartbeat_ack.go | starcoder |
package runtime
import (
"math"
)
func unm(t *Thread, x Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
switch kx {
case IsInt:
return IntValue(-nx), nil
case IsFloat:
return FloatValue(-fx), nil
}
res, err, ok := metaun(t, "__unm", x)
if ok {
return res, err
}
return NilValue, NewErrorS("cannot neg")
}
func add(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return IntValue(nx + ny), nil
case IsFloat:
return FloatValue(float64(nx) + fy), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(fx + float64(ny)), nil
case IsFloat:
return FloatValue(fx + fy), nil
}
}
res, err, ok := metabin(t, "__add", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("add expects addable values")
}
func sub(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return IntValue(nx - ny), nil
case IsFloat:
return FloatValue(float64(nx) - fy), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(fx - float64(ny)), nil
case IsFloat:
return FloatValue(fx - fy), nil
}
}
res, err, ok := metabin(t, "__sub", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("sub expects subtractable values")
}
func mul(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return IntValue(nx * ny), nil
case IsFloat:
return FloatValue(float64(nx) * fy), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(fx * float64(ny)), nil
case IsFloat:
return FloatValue(fx * fy), nil
}
}
res, err, ok := metabin(t, "__mul", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("mul expects multipliable values")
}
func div(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return FloatValue(float64(nx) / float64(ny)), nil
case IsFloat:
return FloatValue(float64(nx) / fy), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(fx / float64(ny)), nil
case IsFloat:
return FloatValue(fx / fy), nil
}
}
res, err, ok := metabin(t, "__div", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("div expects dividable values")
}
func floordivInt(x, y int64) int64 {
r := x % y
q := x / y
if (r < 0) != (y < 0) {
q--
}
return q
}
func floordivFloat(x, y float64) float64 {
return math.Floor(x / y)
}
func idiv(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return IntValue(floordivInt(nx, ny)), nil
case IsFloat:
return FloatValue(floordivFloat(float64(nx), fy)), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(floordivFloat(fx, float64(ny))), nil
case IsFloat:
return FloatValue(floordivFloat(fx, fy)), nil
}
}
res, err, ok := metabin(t, "__idiv", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("idiv expects idividable values")
}
func modInt(x, y int64) int64 {
r := x % y
if (r < 0) != (y < 0) {
r += y
}
return r
}
func modFloat(x, y float64) float64 {
r := math.Mod(x, y)
if (r < 0) != (y < 0) {
r += y
}
return r
}
// Mod returns x % y.
func Mod(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return IntValue(modInt(nx, ny)), nil
case IsFloat:
return FloatValue(modFloat(float64(nx), fy)), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(modFloat(fx, float64(ny))), nil
case IsFloat:
return FloatValue(modFloat(fx, fy)), nil
}
}
res, err, ok := metabin(t, "__mod", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("mod expects modable values")
}
func powFloat(x, y float64) float64 {
return math.Pow(x, y)
}
func pow(t *Thread, x Value, y Value) (Value, *Error) {
nx, fx, kx := ToNumber(x)
ny, fy, ky := ToNumber(y)
switch kx {
case IsInt:
switch ky {
case IsInt:
return FloatValue(powFloat(float64(nx), float64(ny))), nil
case IsFloat:
return FloatValue(powFloat(float64(nx), fy)), nil
}
case IsFloat:
switch ky {
case IsInt:
return FloatValue(powFloat(fx, float64(ny))), nil
case IsFloat:
return FloatValue(powFloat(fx, fy)), nil
}
}
res, err, ok := metabin(t, "__pow", x, y)
if ok {
return res, err
}
return NilValue, NewErrorS("pow expects powidable values")
} | runtime/arith.go | 0.670069 | 0.420421 | arith.go | starcoder |
package iso20022
// Provides information about the rates related to securities movement.
type RateDetails3 struct {
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat14Choice `xml:"AddtlTax,omitempty"`
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividendRate []*GrossDividendRateFormat5Choice `xml:"GrssDvddRate,omitempty"`
// The actual interest rate used for the payment of the interest for the specified interest period.
// Usage guideline: It is used to provide the applicable rate for the current payment, after all calculations have been performed, that is, application of period and method of interest computation.
InterestRateUsedForPayment []*InterestRateUsedForPaymentFormat5Choice `xml:"IntrstRateUsdForPmt,omitempty"`
// Percentage of the gross dividend rate on which tax must be paid .
TaxRelatedRate []*RateTypeAndAmountAndStatus6 `xml:"TaxRltdRate,omitempty"`
// Percentage of a cash distribution that will be withheld by a tax authority.
WithholdingTaxRate *RateFormat6Choice `xml:"WhldgTaxRate,omitempty"`
// Rate used to calculate the amount of the charges/fees that cannot be categorised.
ChargesFees *RateAndAmountFormat14Choice `xml:"ChrgsFees,omitempty"`
// Cash rate made available, as an incentive, in addition to the solicitation fee, in order to encourage early participation in an offer.
EarlySolicitationFeeRate *SolicitationFeeRateFormat3Choice `xml:"EarlySlctnFeeRate,omitempty"`
// Dividend is final.
FinalDividendRate *RateAndAmountFormat15Choice `xml:"FnlDvddRate,omitempty"`
// Percentage of fiscal tax to apply.
FiscalStamp *RateFormat6Choice `xml:"FsclStmp,omitempty"`
// Rate resulting from a fully franked dividend paid by a company; rate includes tax credit for companies that have made sufficient tax payments during fiscal period.
FullyFrankedRate *RateAndAmountFormat14Choice `xml:"FullyFrnkdRate,omitempty"`
// Rate of the cash premium made available if the securities holder consents or participates to an event, for example, consent fees.
CashIncentiveRate *RateFormat6Choice `xml:"CshIncntivRate,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividendRate []*NetDividendRateFormat5Choice `xml:"NetDvddRate,omitempty"`
// Rate per share to which a non-resident is entitled.
NonResidentRate *RateAndAmountFormat14Choice `xml:"NonResdtRate,omitempty"`
// Dividend is provisional.
ProvisionalDividendRate *RateAndAmountFormat15Choice `xml:"PrvsnlDvddRate,omitempty"`
// Rate applicable to the event announced, for example, redemption rate for a redemption event.
ApplicableRate *RateFormat6Choice `xml:"AplblRate,omitempty"`
// Cash rate made available in an offer in order to encourage participation in the offer.
SolicitationFeeRate *SolicitationFeeRateFormat3Choice `xml:"SlctnFeeRate,omitempty"`
// Amount of money per equity allocated as the result of a tax credit.
TaxCreditRate []*TaxCreditRateFormat5Choice `xml:"TaxCdtRate,omitempty"`
// Taxation applied on an amount clearly identified as an income.
TaxOnIncome *RateFormat6Choice `xml:"TaxOnIncm,omitempty"`
// Taxation applied on an amount clearly identified as capital profits, capital gains.
TaxOnProfits *RateFormat6Choice `xml:"TaxOnPrfts,omitempty"`
// Percentage of cash that was paid in excess of actual tax obligation and was reclaimed.
TaxReclaimRate *RateFormat6Choice `xml:"TaxRclmRate,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the income was originally paid, for which relief at source and/or reclaim may be possible.
WithholdingOfForeignTax *RateAndAmountFormat14Choice `xml:"WhldgOfFrgnTax,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the account owner is located, for which relief at source and/or reclaim may be possible.
WithholdingOfLocalTax *RateAndAmountFormat14Choice `xml:"WhldgOfLclTax,omitempty"`
}
func (r *RateDetails3) AddAdditionalTax() *RateAndAmountFormat14Choice {
r.AdditionalTax = new(RateAndAmountFormat14Choice)
return r.AdditionalTax
}
func (r *RateDetails3) AddGrossDividendRate() *GrossDividendRateFormat5Choice {
newValue := new(GrossDividendRateFormat5Choice)
r.GrossDividendRate = append(r.GrossDividendRate, newValue)
return newValue
}
func (r *RateDetails3) AddInterestRateUsedForPayment() *InterestRateUsedForPaymentFormat5Choice {
newValue := new(InterestRateUsedForPaymentFormat5Choice)
r.InterestRateUsedForPayment = append(r.InterestRateUsedForPayment, newValue)
return newValue
}
func (r *RateDetails3) AddTaxRelatedRate() *RateTypeAndAmountAndStatus6 {
newValue := new(RateTypeAndAmountAndStatus6)
r.TaxRelatedRate = append(r.TaxRelatedRate, newValue)
return newValue
}
func (r *RateDetails3) AddWithholdingTaxRate() *RateFormat6Choice {
r.WithholdingTaxRate = new(RateFormat6Choice)
return r.WithholdingTaxRate
}
func (r *RateDetails3) AddChargesFees() *RateAndAmountFormat14Choice {
r.ChargesFees = new(RateAndAmountFormat14Choice)
return r.ChargesFees
}
func (r *RateDetails3) AddEarlySolicitationFeeRate() *SolicitationFeeRateFormat3Choice {
r.EarlySolicitationFeeRate = new(SolicitationFeeRateFormat3Choice)
return r.EarlySolicitationFeeRate
}
func (r *RateDetails3) AddFinalDividendRate() *RateAndAmountFormat15Choice {
r.FinalDividendRate = new(RateAndAmountFormat15Choice)
return r.FinalDividendRate
}
func (r *RateDetails3) AddFiscalStamp() *RateFormat6Choice {
r.FiscalStamp = new(RateFormat6Choice)
return r.FiscalStamp
}
func (r *RateDetails3) AddFullyFrankedRate() *RateAndAmountFormat14Choice {
r.FullyFrankedRate = new(RateAndAmountFormat14Choice)
return r.FullyFrankedRate
}
func (r *RateDetails3) AddCashIncentiveRate() *RateFormat6Choice {
r.CashIncentiveRate = new(RateFormat6Choice)
return r.CashIncentiveRate
}
func (r *RateDetails3) AddNetDividendRate() *NetDividendRateFormat5Choice {
newValue := new(NetDividendRateFormat5Choice)
r.NetDividendRate = append(r.NetDividendRate, newValue)
return newValue
}
func (r *RateDetails3) AddNonResidentRate() *RateAndAmountFormat14Choice {
r.NonResidentRate = new(RateAndAmountFormat14Choice)
return r.NonResidentRate
}
func (r *RateDetails3) AddProvisionalDividendRate() *RateAndAmountFormat15Choice {
r.ProvisionalDividendRate = new(RateAndAmountFormat15Choice)
return r.ProvisionalDividendRate
}
func (r *RateDetails3) AddApplicableRate() *RateFormat6Choice {
r.ApplicableRate = new(RateFormat6Choice)
return r.ApplicableRate
}
func (r *RateDetails3) AddSolicitationFeeRate() *SolicitationFeeRateFormat3Choice {
r.SolicitationFeeRate = new(SolicitationFeeRateFormat3Choice)
return r.SolicitationFeeRate
}
func (r *RateDetails3) AddTaxCreditRate() *TaxCreditRateFormat5Choice {
newValue := new(TaxCreditRateFormat5Choice)
r.TaxCreditRate = append(r.TaxCreditRate, newValue)
return newValue
}
func (r *RateDetails3) AddTaxOnIncome() *RateFormat6Choice {
r.TaxOnIncome = new(RateFormat6Choice)
return r.TaxOnIncome
}
func (r *RateDetails3) AddTaxOnProfits() *RateFormat6Choice {
r.TaxOnProfits = new(RateFormat6Choice)
return r.TaxOnProfits
}
func (r *RateDetails3) AddTaxReclaimRate() *RateFormat6Choice {
r.TaxReclaimRate = new(RateFormat6Choice)
return r.TaxReclaimRate
}
func (r *RateDetails3) AddWithholdingOfForeignTax() *RateAndAmountFormat14Choice {
r.WithholdingOfForeignTax = new(RateAndAmountFormat14Choice)
return r.WithholdingOfForeignTax
}
func (r *RateDetails3) AddWithholdingOfLocalTax() *RateAndAmountFormat14Choice {
r.WithholdingOfLocalTax = new(RateAndAmountFormat14Choice)
return r.WithholdingOfLocalTax
} | RateDetails3.go | 0.843219 | 0.639652 | RateDetails3.go | starcoder |
package flat
import (
"go-simulate-a-city/common/commonmath"
"go-simulate-a-city/sim/core/dto/geometry"
"go-simulate-a-city/sim/core/gamegrid"
"go-simulate-a-city/sim/core/mailroom"
"go-simulate-a-city/sim/ui"
"github.com/go-gl/mathgl/mgl32"
)
// Defines how to render generic regions in a channel-based manner
type RegionRenderer struct {
offsetChangeChannel chan mgl32.Vec2
scaleChangeChannel chan float32
cameraOffset mgl32.Vec2
cameraScale float32
regionColor mgl32.Vec3
lastRendereredRegions []commonMath.Region
regions map[int64]commonMath.Region
newInput bool
NewRegionChannel chan geometry.IdRegion
DeleteRegionChannel chan int64
}
func NewRegionRenderer(regionColor mgl32.Vec3) *RegionRenderer {
renderer := RegionRenderer{
offsetChangeChannel: make(chan mgl32.Vec2, 10),
scaleChangeChannel: make(chan float32, 10),
cameraOffset: mgl32.Vec2{0, 0},
cameraScale: 1.0,
regionColor: regionColor,
lastRendereredRegions: make([]commonMath.Region, 0),
regions: make(map[int64]commonMath.Region),
newInput: false,
NewRegionChannel: make(chan geometry.IdRegion, 50),
DeleteRegionChannel: make(chan int64, 50)}
mailroom.CameraOffsetRegChannel <- renderer.offsetChangeChannel
mailroom.CameraScaleRegChannel <- renderer.scaleChangeChannel
return &renderer
}
func (r *RegionRenderer) drainInputChannels() {
inputLeft := true
r.newInput = false
for inputLeft {
select {
case r.cameraOffset = <-r.offsetChangeChannel:
r.newInput = true
case r.cameraScale = <-r.scaleChangeChannel:
r.newInput = true
case deletionId := <-r.DeleteRegionChannel:
delete(r.regions, deletionId)
r.newInput = true
case idRegion := <-r.NewRegionChannel:
if idRegion.Id == -1 {
// Special case -- if someone sends an invalid ID, we reset EVERYTHING
r.regions = make(map[int64]commonMath.Region)
} else {
r.regions[idRegion.Id] = idRegion.Region
}
r.newInput = true
default:
inputLeft = false
}
}
}
func (r *RegionRenderer) Render() {
r.drainInputChannels()
if r.newInput {
r.lastRendereredRegions = make([]commonMath.Region, 0)
for _, region := range r.regions {
mappedRegion := gamegrid.MapEngineRegionToScreen(®ion, r.cameraScale, r.cameraOffset)
r.lastRendereredRegions = append(r.lastRendereredRegions, *mappedRegion)
}
}
// TODO: Update region renderer to support caching buffers,
// which will significantly improve no-op perf.
for _, region := range r.lastRendereredRegions {
ui.Ui.RegionProgram.Render(®ion, r.regionColor)
}
} | sim/ui/flat/regionRenderer.go | 0.598899 | 0.420124 | regionRenderer.go | starcoder |
package math
import (
"fmt"
)
// Orientation represents a transformation matrix, written as a right and a down vector.
type Orientation struct {
Right Delta
Down Delta
}
// Concat returns the orientation o * o2 so that o.Concat(o2).Apply(d) == o.Apply(o2.Apply(d)).
func (o Orientation) Concat(o2 Orientation) Orientation {
return Orientation{
Right: o.Apply(o2.Right),
Down: o.Apply(o2.Down),
}
}
// Apply rotates a delta by an orientation.
func (o Orientation) Apply(d Delta) Delta {
return Delta{
DX: o.Right.DX*d.DX + o.Down.DX*d.DY,
DY: o.Right.DY*d.DX + o.Down.DY*d.DY,
}
}
// ApplyToRect2 rotates a rectangle by the given orientation, mapping the pivot to itself.
// The pivot is given in doubled coordinates to support half-pixel pivots.
// Note: odd numbers are pixel centers, even numbers are pixel corners!
func (o Orientation) ApplyToRect2(pivot2 Pos, r Rect) Rect {
return Rect{
Origin: pivot2.Add(o.Apply(r.Origin.Mul(2).Delta(pivot2))).Div(2),
Size: o.Apply(r.Size),
}.Normalized()
}
// Inverse returns an orientation so that o.Concat(o.Invert()) == Identity().
func (o Orientation) Inverse() Orientation {
// There is probably a more efficient way, but all our orientations are identity when applied four times.
return o.Concat(o).Concat(o)
}
// Identity yields the default orientation.
func Identity() Orientation {
return Orientation{Right: East(), Down: South()}
}
// FlipX yields an orientation where X is flipped.
func FlipX() Orientation {
return Orientation{Right: West(), Down: South()}
}
// FlipY yields an orientation where Y is flipped.
func FlipY() Orientation {
return Orientation{Right: East(), Down: North()}
}
// FlipD yields an orientation where X/Y are swapped.
func FlipD() Orientation {
return Orientation{Right: South(), Down: East()}
}
// Left yields an orientation that turns left.
func Left() Orientation {
return Orientation{Right: North(), Down: East()}
}
// Right yields an orientation that turns right.
func Right() Orientation {
return Orientation{Right: South(), Down: West()}
}
// Left yields an orientation that turns left.
func TurnAround() Orientation {
return Orientation{Right: West(), Down: North()}
}
// ParseOrientation parses an orientation from a string. It is given by the right and down directions in that order.
func ParseOrientation(s string) (Orientation, error) {
switch s {
case "EN":
return Orientation{Right: East(), Down: North()}, nil
case "ES":
return Orientation{Right: East(), Down: South()}, nil
case "NE":
return Orientation{Right: North(), Down: East()}, nil
case "NW":
return Orientation{Right: North(), Down: West()}, nil
case "SE":
return Orientation{Right: South(), Down: East()}, nil
case "SW":
return Orientation{Right: South(), Down: West()}, nil
case "WN":
return Orientation{Right: West(), Down: North()}, nil
case "WS":
return Orientation{Right: West(), Down: South()}, nil
default:
return Orientation{}, fmt.Errorf("unsupported orientation %q; want <right><down> direction like ES", s)
}
}
func (o Orientation) Determinant() int {
return o.Right.DX*o.Down.DY - o.Right.DY*o.Down.DX
} | internal/math/orientation.go | 0.903578 | 0.71638 | orientation.go | starcoder |
package polynomial
import (
"fmt"
"io"
"strconv"
)
type Action int
const (
ActDefault Action = iota
ActStore
ActLoad
ActDerive
ActZeroes
)
type InputStatement struct {
Request Action
Name string
Function Polynomial
}
// Parser represents a parser
type Parser struct {
s *Scanner
buf struct {
tok Token // last read token
lit string // last read literal
n int // buffer size (max = 1)
}
}
// NewParser returns a new instance of Parser.
func NewParser(r io.Reader) *Parser {
return &Parser{s: NewScanner(r)}
}
// scan returns the next token from the underlying scanner.
// If a token has been unscanned then read that instead
func (p *Parser) scan() (tok Token, lit string) {
// If the buffer holds a token, the return it
if p.buf.n != 0 {
p.buf.n = 0
return p.buf.tok, p.buf.lit
}
// Otherwise read the next token from the scanner
tok, lit = p.s.Scan()
// Save it to the buffer in case we unscan it later
p.buf.tok, p.buf.lit = tok, lit
return
}
// unscan pushes the previously read token back into the buffer
func (p *Parser) unscan() {
p.buf.n = 1
}
// scanIgnoreWhitespace scans the next non-whitespace token.
func (p *Parser) scanIgnoreWhitespace() (Token, string) {
tok, lit := p.scan()
if tok == WHITESPACE {
tok, lit = p.scan()
}
return tok, lit
}
func (p *Parser) Parse() (*InputStatement, error) {
stmt := &InputStatement{Request: ActDefault}
switch tok, _ := p.scanIgnoreWhitespace(); tok {
case STORE:
stmt.Request = ActStore
case LOAD:
stmt.Request = ActLoad
case DERIVE:
stmt.Request = ActDerive
case ZEROES:
stmt.Request = ActZeroes
default:
p.unscan()
}
// expecting function
if tok, lit := p.scanIgnoreWhitespace(); tok == NAME {
stmt.Name = lit
} else {
return nil, fmt.Errorf("found %q, expected function name", lit)
}
if tok, lit := p.scanIgnoreWhitespace(); tok != OPENPARAN {
return nil, fmt.Errorf("found %q, expected (", lit)
}
if tok, lit := p.scanIgnoreWhitespace(); tok != VAR {
return nil, fmt.Errorf("found %q, expected x", lit)
}
if tok, lit := p.scanIgnoreWhitespace(); tok != CLOSEPARAN {
return nil, fmt.Errorf("found %q, expected )", lit)
}
if tok, lit := p.scanIgnoreWhitespace(); tok == EQUALS {
if stmt.Request == ActDefault || stmt.Request == ActStore {
if function, err := p.parsePolynomial(); err != nil {
return nil, err
} else {
stmt.Request = ActStore
stmt.Function = function
}
} else {
return nil, fmt.Errorf("found %q, expected EOF", lit)
}
} else if tok == EOF {
if stmt.Request == ActStore {
return nil, fmt.Errorf("found EOF, expected =")
} else if stmt.Request == ActDefault {
stmt.Request = ActLoad
}
} else {
return nil, fmt.Errorf("found %q, expected = or EOF", lit)
}
return stmt, nil
}
func (p *Parser) parsePolynomial() (Polynomial, error) {
polynomial := make(Polynomial)
sign := true // true for positive, false for negative
if tok, lit := p.scanIgnoreWhitespace(); tok == MINUS {
sign = false
} else if tok != PLUS && tok != INTEGER && tok != FLOAT && tok != VAR {
return nil, fmt.Errorf("found %q, expected function declaration", lit)
} else {
p.unscan()
}
for {
var (
coeff float64
exp uint
)
// Expecting coefficient or 'x'
if tok, lit := p.scanIgnoreWhitespace(); tok == FLOAT || tok == INTEGER {
if flt, err := strconv.ParseFloat(lit, 64); err != nil {
return nil, fmt.Errorf("problem parsing float %q", lit)
} else {
coeff = flt
if !sign {
coeff *= -1
}
}
} else if tok == VAR {
coeff = 1
p.unscan()
} else {
return nil, fmt.Errorf("found %q, expected polynomial term", lit)
}
if tok, _ := p.scanIgnoreWhitespace(); tok == VAR {
// Expecting either exponent or next term
if tok, lit := p.scanIgnoreWhitespace(); tok != CARET {
exp = 1
p.unscan()
} else {
if tok, lit = p.scanIgnoreWhitespace(); tok == INTEGER {
if i, err := strconv.Atoi(lit); err != nil {
return nil, fmt.Errorf("problem parsing integer %q", lit)
} else {
exp = uint(i)
}
} else {
return nil, fmt.Errorf("found %q, expected integer", lit)
}
}
} else {
exp = 0
p.unscan()
}
polynomial[exp] += coeff
if tok, lit := p.scanIgnoreWhitespace(); tok == PLUS {
sign = true
} else if tok == MINUS {
sign = false
} else if tok == EOF {
break
} else {
return nil, fmt.Errorf("found %q, expected polynomial term", lit)
}
}
return polynomial, nil
} | polynomial/parser.go | 0.700997 | 0.428114 | parser.go | starcoder |
package holidays
func init() {
registerHolidayDataSource("de", holidaysDENational{})
registerHolidayDataSource("de-bb", holidaysDEBB{})
registerHolidayDataSource("de-be", holidaysDEBE{})
registerHolidayDataSource("de-bw", holidaysDEBW{})
registerHolidayDataSource("de-by", holidaysDEBY{})
registerHolidayDataSource("de-hb", holidaysDEHB{})
registerHolidayDataSource("de-he", holidaysDEHE{})
registerHolidayDataSource("de-hh", holidaysDEHH{})
registerHolidayDataSource("de-mv", holidaysDEMV{})
registerHolidayDataSource("de-ni", holidaysDENI{})
registerHolidayDataSource("de-nw", holidaysDENW{})
registerHolidayDataSource("de-rp", holidaysDERP{})
registerHolidayDataSource("de-sh", holidaysDESH{})
registerHolidayDataSource("de-sl", holidaysDESL{})
registerHolidayDataSource("de-sn", holidaysDESN{})
registerHolidayDataSource("de-st", holidaysDEST{})
registerHolidayDataSource("de-th", holidaysDETH{})
}
type holidaysDENational struct{}
func (h holidaysDENational) GetIncludes() []string { return []string{} }
func (h holidaysDENational) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("New Year's Day", map[string]string{"de": "Neujahrstag"}, dateFromNumbers(year, 1, 1)),
newHoliday("Labor Day", map[string]string{"de": "Tag der Arbeit"}, dateFromNumbers(year, 5, 1)),
newHoliday("German Unity Day", map[string]string{"de": "Tag der Deutschen Einheit"}, dateFromNumbers(year, 10, 3)),
newHoliday("Christmas Day", map[string]string{"de": "Weihnachtstag"}, dateFromNumbers(year, 12, 25)),
newHoliday("Boxing Day", map[string]string{"de": "Zweiter Weihnachtsfeiertag"}, dateFromNumbers(year, 12, 26)),
newHoliday("Good Friday", map[string]string{"de": "Karfreitag"}, GregorianEasterSunday(year).Add(-2*day)),
newHoliday("Easter Sunday", map[string]string{"de": "Ostersonntag"}, GregorianEasterSunday(year)),
newHoliday("Easter Monday", map[string]string{"de": "Ostermontag"}, GregorianEasterSunday(year).Add(1*day)),
newHoliday("Ascension Day", map[string]string{"de": "<NAME>"}, GregorianEasterSunday(year).Add(39*day)),
newHoliday("Whit Monday", map[string]string{"de": "Pfingstmontag"}, GregorianEasterSunday(year).Add(50*day)),
}
}
type holidaysDEBB struct{}
func (h holidaysDEBB) GetIncludes() []string { return []string{"de"} }
func (h holidaysDEBB) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("Reformation Day", map[string]string{"de": "Reformationstag"}, dateFromNumbers(year, 10, 31)),
}
}
type holidaysDEBE struct{ holidaysDENational }
type holidaysDEBW struct{}
func (h holidaysDEBW) GetIncludes() []string { return []string{"de"} }
func (h holidaysDEBW) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("Epiphany", map[string]string{"de": "Heilige Drei Könige"}, dateFromNumbers(year, 1, 6)),
newHoliday("All Saints", map[string]string{"de": "Allerheiligen"}, dateFromNumbers(year, 11, 1)),
newHoliday("Corpus Christi", map[string]string{"de": "Fronleichnam"}, GregorianEasterSunday(year).Add(60*day)),
}
}
type holidaysDEBY struct{ holidaysDEBW }
type holidaysDEHB struct{ holidaysDENational }
type holidaysDEHE struct{}
func (h holidaysDEHE) GetIncludes() []string { return []string{"de"} }
func (h holidaysDEHE) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("Corpus Christi", map[string]string{"de": "Fronleichnam"}, GregorianEasterSunday(year).Add(60*day)),
}
}
type holidaysDEHH struct{ holidaysDENational }
type holidaysDEMV struct{ holidaysDEBB }
type holidaysDENI struct{ holidaysDENational }
type holidaysDENW struct{}
func (h holidaysDENW) GetIncludes() []string { return []string{"de"} }
func (h holidaysDENW) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("All Saints", map[string]string{"de": "Allerheiligen"}, dateFromNumbers(year, 11, 1)),
newHoliday("Corpus Christi", map[string]string{"de": "Fronleichnam"}, GregorianEasterSunday(year).Add(60*day)),
}
}
type holidaysDERP struct{ holidaysDENW }
type holidaysDESH struct{ holidaysDENational }
type holidaysDESL struct{}
func (h holidaysDESL) GetIncludes() []string { return []string{"de"} }
func (h holidaysDESL) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("Assumption Day", map[string]string{"de": "Mariä Himmelfahrt"}, dateFromNumbers(year, 8, 15)),
newHoliday("All Saints", map[string]string{"de": "Allerheiligen"}, dateFromNumbers(year, 11, 1)),
newHoliday("Cor<NAME>", map[string]string{"de": "Fronleichnam"}, GregorianEasterSunday(year).Add(60*day)),
}
}
type holidaysDESN struct{ holidaysDEBB }
type holidaysDEST struct{}
func (h holidaysDEST) GetIncludes() []string { return []string{"de"} }
func (h holidaysDEST) GetHolidays(year int) []Holiday {
return []Holiday{
newHoliday("Epiphany", map[string]string{"de": "Heilige Drei Könige"}, dateFromNumbers(year, 1, 6)),
newHoliday("Reformation Day", map[string]string{"de": "Reformationstag"}, dateFromNumbers(year, 10, 31)),
}
}
type holidaysDETH struct{ holidaysDEBB } | holidays/holidays_de.go | 0.500732 | 0.48377 | holidays_de.go | starcoder |
package util
import (
"crypto/sha256"
"math/big"
"github.com/ing-bank/zkrp/crypto/bn256"
"github.com/ing-bank/zkrp/crypto/p256"
"github.com/ing-bank/zkrp/util/bn"
"github.com/ing-bank/zkrp/util/byteconversion"
)
// Constants that are going to be used frequently, then we just need to compute them once.
var (
G1 = new(bn256.G1).ScalarBaseMult(new(big.Int).SetInt64(1))
G2 = new(bn256.G2).ScalarBaseMult(new(big.Int).SetInt64(1))
E = bn256.Pair(G1, G2)
)
/*
Decompose receives as input a bigint x and outputs an array of integers such that
x = sum(xi.u^i), i.e. it returns the decomposition of x into base u.
*/
func Decompose(x *big.Int, u int64, l int64) ([]int64, error) {
var (
result []int64
i int64
)
result = make([]int64, l)
i = 0
for i < l {
result[i] = bn.Mod(x, new(big.Int).SetInt64(u)).Int64()
x = new(big.Int).Div(x, new(big.Int).SetInt64(u))
i = i + 1
}
return result, nil
}
/*
Commit method corresponds to the Pedersen commitment scheme. Namely, given input
message x, and randomness r, it outputs g^x.h^r.
*/
func Commit(x, r *big.Int, h *bn256.G2) (*bn256.G2, error) {
var C = new(bn256.G2).ScalarBaseMult(x)
C.Add(C, new(bn256.G2).ScalarMult(h, r))
return C, nil
}
/*
CommitG1 method corresponds to the Pedersen commitment scheme. Namely, given input
message x, and randomness r, it outputs g^x.h^r.
*/
func CommitG1(x, r *big.Int, h *p256.P256) (*p256.P256, error) {
var C = new(p256.P256).ScalarBaseMult(x)
Hr := new(p256.P256).ScalarMult(h, r)
C.Add(C, Hr)
return C, nil
}
/*
HashSet is responsible for the computing a Zp element given elements from GT and G2.
*/
func HashSet(a *bn256.GT, D *bn256.G2) (*big.Int, error) {
digest := sha256.New()
digest.Write([]byte(a.String()))
digest.Write([]byte(D.String()))
output := digest.Sum(nil)
tmp := output[0:]
return byteconversion.FromByteArray(tmp)
}
/*
Hash is responsible for the computing a Zp element given elements from GT and G2.
*/
func Hash(a []*bn256.GT, D *bn256.G2) (*big.Int, error) {
digest := sha256.New()
for i := range a {
digest.Write([]byte(a[i].String()))
}
digest.Write([]byte(D.String()))
output := digest.Sum(nil)
tmp := output[0:]
return byteconversion.FromByteArray(tmp)
} | crypto/vendor/ing-bank/zkrp/util/util.go | 0.618435 | 0.483709 | util.go | starcoder |
package farm
import (
"encoding/binary"
"math/bits"
)
func uoH(x, y, mul uint64, r uint) uint64 {
a := (x ^ y) * mul
a ^= (a >> 47)
b := (y ^ a) * mul
return bits.RotateLeft64(b, -int(r)) * mul
}
// Hash64WithSeeds hashes a byte slice and two uint64 seeds and returns a uint64 hash value
func Hash64WithSeeds(s []byte, seed0, seed1 uint64) uint64 {
slen := len(s)
if slen <= 64 {
return naHash64WithSeeds(s, seed0, seed1)
}
// For strings over 64 bytes we loop.
// Internal state consists of 64 bytes: u, v, w, x, y, and z.
x := seed0
y := seed1*k2 + 113
z := shiftMix(y*k2) * k2
v := uint128{seed0, seed1}
var w uint128
u := x - z
x *= k2
mul := k2 + (u & 0x82)
// Set end so that after the loop we have 1 to 64 bytes left to process.
endIdx := ((slen - 1) / 64) * 64
last64Idx := endIdx + ((slen - 1) & 63) - 63
last64 := s[last64Idx:]
for len(s) > 64 {
a0 := binary.LittleEndian.Uint64(s[0 : 0+8])
a1 := binary.LittleEndian.Uint64(s[8 : 8+8])
a2 := binary.LittleEndian.Uint64(s[16 : 16+8])
a3 := binary.LittleEndian.Uint64(s[24 : 24+8])
a4 := binary.LittleEndian.Uint64(s[32 : 32+8])
a5 := binary.LittleEndian.Uint64(s[40 : 40+8])
a6 := binary.LittleEndian.Uint64(s[48 : 48+8])
a7 := binary.LittleEndian.Uint64(s[56 : 56+8])
x += a0 + a1
y += a2
z += a3
v.lo += a4
v.hi += a5 + a1
w.lo += a6
w.hi += a7
x = bits.RotateLeft64(x, -26)
x *= 9
y = bits.RotateLeft64(y, -29)
z *= mul
v.lo = bits.RotateLeft64(v.lo, -33)
v.hi = bits.RotateLeft64(v.hi, -30)
w.lo ^= x
w.lo *= 9
z = bits.RotateLeft64(z, -32)
z += w.hi
w.hi += z
z *= 9
u, y = y, u
z += a0 + a6
v.lo += a2
v.hi += a3
w.lo += a4
w.hi += a5 + a6
x += a1
y += a7
y += v.lo
v.lo += x - y
v.hi += w.lo
w.lo += v.hi
w.hi += x - y
x += w.hi
w.hi = bits.RotateLeft64(w.hi, -34)
u, z = z, u
s = s[64:]
}
// Make s point to the last 64 bytes of input.
s = last64
u *= 9
v.hi = bits.RotateLeft64(v.hi, -28)
v.lo = bits.RotateLeft64(v.lo, -20)
w.lo += (uint64(slen-1) & 63)
u += y
y += u
x = bits.RotateLeft64(y-x+v.lo+binary.LittleEndian.Uint64(s[8:8+8]), -37) * mul
y = bits.RotateLeft64(y^v.hi^binary.LittleEndian.Uint64(s[48:48+8]), -42) * mul
x ^= w.hi * 9
y += v.lo + binary.LittleEndian.Uint64(s[40:40+8])
z = bits.RotateLeft64(z+w.lo, -33) * mul
v.lo, v.hi = weakHashLen32WithSeeds(s, v.hi*mul, x+w.lo)
w.lo, w.hi = weakHashLen32WithSeeds(s[32:], z+w.hi, y+binary.LittleEndian.Uint64(s[16:16+8]))
return uoH(hashLen16Mul(v.lo+x, w.lo^y, mul)+z-u,
uoH(v.hi+y, w.hi+z, k2, 30)^x,
k2,
31)
}
// Hash64WithSeed hashes a byte slice and a uint64 seed and returns a uint64 hash value
func Hash64WithSeed(s []byte, seed uint64) uint64 {
if len(s) <= 64 {
return naHash64WithSeed(s, seed)
}
return Hash64WithSeeds(s, 0, seed)
}
// Hash64 hashes a byte slice and returns a uint64 hash value
func uoHash64(s []byte) uint64 {
if len(s) <= 64 {
return naHash64(s)
}
return Hash64WithSeeds(s, 81, 0)
} | vendor/github.com/dgryski/go-farm/farmhashuo.go | 0.731346 | 0.409575 | farmhashuo.go | starcoder |
package chart
import (
"math"
"sort"
)
// ValueSequence returns a sequence for a given values set.
func ValueSequence(values ...float64) Seq {
return Seq{NewArray(values...)}
}
// Sequence is a provider for values for a seq.
type Sequence interface {
Len() int
GetValue(int) float64
}
// Seq is a utility wrapper for seq providers.
type Seq struct {
Sequence
}
// Values enumerates the seq into a slice.
func (s Seq) Values() (output []float64) {
if s.Len() == 0 {
return
}
output = make([]float64, s.Len())
for i := 0; i < s.Len(); i++ {
output[i] = s.GetValue(i)
}
return
}
// Each applies the `mapfn` to all values in the value provider.
func (s Seq) Each(mapfn func(int, float64)) {
for i := 0; i < s.Len(); i++ {
mapfn(i, s.GetValue(i))
}
}
// Map applies the `mapfn` to all values in the value provider,
// returning a new seq.
func (s Seq) Map(mapfn func(i int, v float64) float64) Seq {
output := make([]float64, s.Len())
for i := 0; i < s.Len(); i++ {
mapfn(i, s.GetValue(i))
}
return Seq{Array(output)}
}
// FoldLeft collapses a seq from left to right.
func (s Seq) FoldLeft(mapfn func(i int, v0, v float64) float64) (v0 float64) {
if s.Len() == 0 {
return 0
}
if s.Len() == 1 {
return s.GetValue(0)
}
v0 = s.GetValue(0)
for i := 1; i < s.Len(); i++ {
v0 = mapfn(i, v0, s.GetValue(i))
}
return
}
// FoldRight collapses a seq from right to left.
func (s Seq) FoldRight(mapfn func(i int, v0, v float64) float64) (v0 float64) {
if s.Len() == 0 {
return 0
}
if s.Len() == 1 {
return s.GetValue(0)
}
v0 = s.GetValue(s.Len() - 1)
for i := s.Len() - 2; i >= 0; i-- {
v0 = mapfn(i, v0, s.GetValue(i))
}
return
}
// Min returns the minimum value in the seq.
func (s Seq) Min() float64 {
if s.Len() == 0 {
return 0
}
min := s.GetValue(0)
var value float64
for i := 1; i < s.Len(); i++ {
value = s.GetValue(i)
if value < min {
min = value
}
}
return min
}
// Max returns the maximum value in the seq.
func (s Seq) Max() float64 {
if s.Len() == 0 {
return 0
}
max := s.GetValue(0)
var value float64
for i := 1; i < s.Len(); i++ {
value = s.GetValue(i)
if value > max {
max = value
}
}
return max
}
// MinMax returns the minimum and the maximum in one pass.
func (s Seq) MinMax() (min, max float64) {
if s.Len() == 0 {
return
}
min = s.GetValue(0)
max = min
var value float64
for i := 1; i < s.Len(); i++ {
value = s.GetValue(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return
}
// Sort returns the seq sorted in ascending order.
// This fully enumerates the seq.
func (s Seq) Sort() Seq {
if s.Len() == 0 {
return s
}
values := s.Values()
sort.Float64s(values)
return Seq{Array(values)}
}
// Reverse reverses the sequence
func (s Seq) Reverse() Seq {
if s.Len() == 0 {
return s
}
values := s.Values()
valuesLen := len(values)
valuesLen1 := len(values) - 1
valuesLen2 := valuesLen >> 1
var i, j float64
for index := 0; index < valuesLen2; index++ {
i = values[index]
j = values[valuesLen1-index]
values[index] = j
values[valuesLen1-index] = i
}
return Seq{Array(values)}
}
// Median returns the median or middle value in the sorted seq.
func (s Seq) Median() (median float64) {
l := s.Len()
if l == 0 {
return
}
sorted := s.Sort()
if l%2 == 0 {
v0 := sorted.GetValue(l/2 - 1)
v1 := sorted.GetValue(l/2 + 1)
median = (v0 + v1) / 2
} else {
median = sorted.GetValue(l << 1)
}
return
}
// Sum adds all the elements of a series together.
func (s Seq) Sum() (accum float64) {
if s.Len() == 0 {
return 0
}
for i := 0; i < s.Len(); i++ {
accum += s.GetValue(i)
}
return
}
// Average returns the float average of the values in the buffer.
func (s Seq) Average() float64 {
if s.Len() == 0 {
return 0
}
return s.Sum() / float64(s.Len())
}
// Variance computes the variance of the buffer.
func (s Seq) Variance() float64 {
if s.Len() == 0 {
return 0
}
m := s.Average()
var variance, v float64
for i := 0; i < s.Len(); i++ {
v = s.GetValue(i)
variance += (v - m) * (v - m)
}
return variance / float64(s.Len())
}
// StdDev returns the standard deviation.
func (s Seq) StdDev() float64 {
if s.Len() == 0 {
return 0
}
return math.Pow(s.Variance(), 0.5)
}
// Percentile finds the relative standing in a slice of floats.
// `percent` should be given on the interval [0,1.0).
func (s Seq) Percentile(percent float64) (percentile float64) {
l := s.Len()
if l == 0 {
return 0
}
if percent < 0 || percent > 1.0 {
panic("percent out of range [0.0, 1.0)")
}
sorted := s.Sort()
index := percent * float64(l)
i := f64i(index)
if index == float64(int64(index)) {
ci := sorted.GetValue(i - 1)
c := sorted.GetValue(i)
percentile = (ci + c) / 2.0
} else {
percentile = sorted.GetValue(i)
}
return percentile
}
// Normalize maps every value to the interval [0, 1.0].
func (s Seq) Normalize() Seq {
min, max := s.MinMax()
delta := max - min
output := make([]float64, s.Len())
for i := 0; i < s.Len(); i++ {
output[i] = (s.GetValue(i) - min) / delta
}
return Seq{Array(output)}
} | seq.go | 0.911156 | 0.511595 | seq.go | starcoder |
package crawl
import "github.com/cdipaolo/sentiment"
// SentimentAnalyzer defines an interface for retrieving the sentiment score of a tweet.
type SentimentAnalyzer interface {
GetScoreForTweet(tweet string) int32
}
type simpleSentimentAnalyzer struct {
model sentiment.Models
}
func calculateSentimentScore(analysis *sentiment.Analysis) int32 {
numOfWords := len(analysis.Words)
numOfSentences := len(analysis.Sentences)
totalAvg := 0.0
// Calculate average score of words.
if numOfWords > 0 {
wordsAvg := 0.0
for _, word := range analysis.Words {
wordsAvg += float64(word.Score)
}
wordsAvg /= float64(numOfWords)
totalAvg += wordsAvg
}
// Calculate average score of sentences.
if numOfSentences > 0 {
sentencesAvg := 0.0
for _, sentence := range analysis.Sentences {
sentencesAvg += float64(sentence.Score)
}
sentencesAvg /= float64(len(analysis.Sentences))
totalAvg += sentencesAvg
}
totalAvg += float64(analysis.Score)
// Calculate total average.
totalAvg /= 3.0
return int32(totalAvg * 100)
}
// GetScoreForTweet returns the sentiment score for a tweet using the simple
// sentiment analyzer. Smaller scores indicate meaner tweets.
func (analyzer *simpleSentimentAnalyzer) GetScoreForTweet(tweet string) int32 {
analysis := analyzer.model.SentimentAnalysis(tweet, sentiment.English)
return calculateSentimentScore(analysis)
}
// NewSentimentAnalyzer returns a new simple sentiment analyzer.
func NewSentimentAnalyzer() (analyzer SentimentAnalyzer, err error) {
// Restore the pre-trained model.
model, err := sentiment.Restore()
if err != nil {
return
}
analyzer = &simpleSentimentAnalyzer{model}
return
}
// SentimentAnalyzerMock provides a mock SentimentAnalyzer for unit tests of files
// that depend on a SentimentAnalyzer.
type SentimentAnalyzerMock struct {
Scores map[string]int32
}
// GetScoreForTweet returns the sentiment score for a given tweet.
func (analyzer SentimentAnalyzerMock) GetScoreForTweet(tweet string) int32 {
return analyzer.Scores[tweet]
} | crawl/sentiment.go | 0.808105 | 0.425605 | sentiment.go | starcoder |
package consentconstants
import base "github.com/aclrys/go-gdpr/consentconstants"
// TCF 2.0 Purposes:
const (
// InfoStorageAccess includes the storage of information, or access to information that is already stored,
// on your device such as advertising identifiers, device identifiers, cookies, and similar technologies.
InfoStorageAccess base.Purpose = 1
// Cookies, device identifiers, or other information can be stored or accessed on your device for the purposes presented to you.
// Vendors can:
// * Store and access information on the device such as cookies and device identifiers presented to a user.
// Reuse InfoStorageAccess above
// Ads can be shown to you based on the content you are viewing, the app you are using, your approximate location, or your device type.
// To do basic ad selection vendors can:
// * Use real-time information about the context in which the ad will be shown, to show the ad, including information about the content and
// the device, such as: device type and capabilities, user agent, URL, IP address
// * Use a user's non-precise geolocation data
// * Control the frequency of ads shown to a user.\n* Sequence the order in which ads are shown to a user.
// * Prevent an ad from serving in an unsuitable editorial (brand-unsafe) context
// Vendors cannot:
// * Create a personalised ads profile using this information for the selection of future ads.
// * N.B. Non-precise means only an approximate location involving at least a radius of 500 meters is permitted.
BasicAdserving base.Purpose = 2
// A profile can be built about you and your interests to show you personalised ads that are relevant to you.
// To create a personalised ads profile vendors can:
// * Collect information about a user, including a user's activity, interests, demographic information, or location, to create or edit a user profile for use in personalised advertising.
// * Combine this information with other information previously collected, including from across websites and apps, to create or edit a user profile for use in personalised advertising.
PersonalizationProfile base.Purpose = 3
// Personalised ads can be shown to you based on a profile about you.
// To select personalised ads vendors can:
// * Select personalised ads based on a user profile or other historical user data, including a user's prior activity, interests, visits to sites or apps, location, or demographic information.
PersonalizationSelection base.Purpose = 4
// A profile can be built about you and your interests to show you personalised content that is relevant to you.
// To create a personalised content profile vendors can:
// * Collect information about a user, including a user's activity, interests, visits to sites or apps, demographic information, or location, to create or edit a user profile for personalising content.
// * Combine this information with other information previously collected, including from across websites and apps, to create or edit a user profile for use in personalising content.
ContentProfile base.Purpose = 5
// Personalised content can be shown to you based on a profile about you.
// To select personalised content vendors can:
// * Select personalised content based on a user profile or other historical user data, including a user\u2019s prior activity, interests, visits to sites or apps, location, or demographic information.
ContentSelection base.Purpose = 6
// The performance and effectiveness of ads that you see or interact with can be measured.
// To measure ad performance vendors can:
// * Measure whether and how ads were delivered to and interacted with by a user
// * Provide reporting about ads including their effectiveness and performance
// * Provide reporting about users who interacted with ads using data observed during the course of the user's interaction with that ad
// * Provide reporting to publishers about the ads displayed on their property
// * Measure whether an ad is serving in a suitable editorial environment (brand-safe) context
// * Determine the percentage of the ad that had the opportunity to be seen and the duration of that opportunity
// * Combine this information with other information previously collected, including from across websites and apps
// Vendors cannot:
// *Apply panel- or similarly-derived audience insights data to ad measurement data without a Legal Basis to apply market research to generate audience insights (Purpose 9)
AdPerformance base.Purpose = 7
// The performance and effectiveness of content that you see or interact with can be measured.
// To measure content performance vendors can:
// * Measure and report on how content was delivered to and interacted with by users.
// * Provide reporting, using directly measurable or known information, about users who interacted with the content
// * Combine this information with other information previously collected, including from across websites and apps.
// Vendors cannot:
// * Measure whether and how ads (including native ads) were delivered to and interacted with by a user.
// * Apply panel- or similarly derived audience insights data to ad measurement data without a Legal Basis to apply market research to generate audience insights (Purpose 9)
ContentPerformance base.Purpose = 8
// Market research can be used to learn more about the audiences who visit sites/apps and view ads.
// To apply market research to generate audience insights vendors can:
// * Provide aggregate reporting to advertisers or their representatives about the audiences reached by their ads, through panel-based and similarly derived insights.
// * Provide aggregate reporting to publishers about the audiences that were served or interacted with content and/or ads on their property by applying panel-based and similarly derived insights.
// * Associate offline data with an online user for the purposes of market research to generate audience insights if vendors have declared to match and combine offline data sources (Feature 1)
// * Combine this information with other information previously collected including from across websites and apps.
// Vendors cannot:
// * Measure the performance and effectiveness of ads that a specific user was served or interacted with, without a Legal Basis to measure ad performance.
// * Measure which content a specific user was served and how they interacted with it, without a Legal Basis to measure content performance.
MarketResearch base.Purpose = 9
// Your data can be used to improve existing systems and software, and to develop new products
// To develop new products and improve products vendors can:
// * Use information to improve their existing products with new features and to develop new products
// * Create new models and algorithms through machine learning
// Vendors cannot:
// * Conduct any other data processing operation allowed under a different purpose under this purpose
DevelopImprove base.Purpose = 10
) | consentconstants/tcf2/purposes.go | 0.597843 | 0.554953 | purposes.go | starcoder |
package sys
import "tools/sysdec"
var GPIO = &sysdec.PeripheralDef{
Version: 1,
Description: `There are 54 general-purpose I/O (GPIO) lines split into
two banks. All GPIO pins have at least two alternative functions within BCM.
The alternate functions are usually peripheral IO and a single peripheral may
appear in each bank to allow flexibility on the choice of IO voltage.
Note: Most users will want to use the function GPIOSetup rather than setting
or clearing the function select registers and then manipulating the Pull-Up/Down
Register and the associated clocks. GPIOSetup allows you to choose the
function for a particular pin and it handles these operations for you.
`,
AddressBlock: sysdec.AddressBlockDef{BaseAddress: 0x20_0000, Size: 0x9C},
Register: map[string]*sysdec.RegisterDef{
"FSel[%s]": {
Description: `The function select registers are used to define
the operation of the general-purpose I/O pins. Each of the 54 GPIO pins has
at least two alternative functions as defined in section 16.2. The FSEL{n}
field determines the functionality of the nth GPIO pin. All unused alternative
function lines are tied to ground and will output a “0” if selected. All
pins reset to normal GPIO input operation.`,
Dim: 6,
DimIncrement: 4,
AddressOffset: 0x0,
Size: 32,
Access: sysdec.Access("rw"),
},
"GPSet[%s]": {
Description: `The output set registers are used to set a GPIO pin.
The SET{n} field defines the respective GPIO pin to set, writing a “0” to the
field has no effect. If the GPIO pin is being used as in input (by default)
then the value in the SET{n} field is ignored. However, if the pin is
subsequently defined as an output then the bit will be set according to the
last set/clear operation. Separating the set and clear functions removes the
need for read-modify-write operations.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x1C,
Access: sysdec.Access("w"),
},
"GPClr[%s]": {
Description: `The output clear registers are used to clear a GPIO
pin. The CLR{n} field defines the respective GPIO pin to clear, writing a
“0” to the field has no effect. If the GPIO pin is being used as in input
(by default) then the value in the CLR{n} field is ignored. However, if the
pin is subsequently defined as an output then the bit will be set
according to the last set/clear operation. Separating the set and clear
functions removes the need for read-modify-write operations.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x28,
Access: sysdec.Access("w"),
},
"GPLev[%s]": {
Description: `The pin level registers return the actual
value of the pin. The LEV{n} field gives the value of the respective GPIO
pin.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x34,
Access: sysdec.Access("r"),
},
"GPPED[%s]": {
Description: `The event detect status registers are used to record
level and edge events on the GPIO pins. The relevant bit in the event
detect status registers is set whenever: 1) an edge is detected that matches
the type of edge programmed in the rising/falling edge detect enable registers,
or 2) a level is detected that matches the type of level programmed in the
high/low level detect enable registers. The bit is cleared by writing a “1”
to the relevant bit.
The interrupt controller can be programmed to interrupt the processor when
any of the status bits are set. The GPIO peripheral has three dedicated
interrupt lines. Each GPIO bank can generate an independent interrupt. The
third line generates a single interrupt whenever any bit is set.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x40,
Access: sysdec.Access("rw"),
},
"GPRE[%s]": {
Description: `The rising edge detect enable registers define
the pins for which a rising edge transition sets a bit in the event detect
status registers (GPEDSn). When the relevant bits are set in both the GPRENn
and GPFENn registers, any transition (1 to 0 and 0 to 1) will set a bit in
the GPEDSn registers. The GPRENn registers use synchronous edge detection.
This means the input signal is sampled using the system clock and then it
is looking for a “011” pattern on the sampled signal. This has the effect
of suppressing glitches.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x4C,
Access: sysdec.Access("rw"),
},
"GPFE[%s]": {
Description: `The falling edge detect enable registers define
the pins for which a falling edge transition sets a bit in the event detect
status registers (GPEDSn). When the relevant bits are set in both the GPRENn
and GPFENn registers, any transition (1 to 0 and 0 to 1) will set a bit in
the GPEDSn registers. The GPFENn registers use synchronous edge detection.
This means the input signal is sampled using the system clock and then it is
looking for a “100” pattern on the sampled signal. This has the effect of
suppressing glitches.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x58,
Access: sysdec.Access("rw"),
},
"GPHE[%s]": {
Description: `The high level detect enable registers define
the pins for which a high level sets a bit in the event detect status register
(GPEDSn). If the pin is still high when an attempt is made to clear the status
bit in GPEDSn then the status bit will remain set.`,
Dim: 2,
Size: 32,
DimIncrement: 4,
AddressOffset: 0x64,
Access: sysdec.Access("rw"),
},
"GPLEn[%s]": {
Description: `The low level detect enable registers define
the pins for which a low level sets a bit in the event detect status
register (GPEDSn). If the pin is still low when an attempt is made to
clear the status bit in GPEDSn then the status bit will remain set.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x70,
Access: sysdec.Access("rw"),
},
"GPARE[%s]": {
Description: `The asynchronous rising edge detect enable
registers define the pins for which a asynchronous rising edge transition
sets a bit in the event detect status registers (GPEDSn).
Asynchronous means the incoming signal is not sampled by the system clock.
As such rising edges of very short duration can be detected.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x7C,
Access: sysdec.Access("rw"),
},
"GPAFE[%s]": {
Description: `The asynchronous falling edge detect enable
registers define the pins for which a asynchronous falling edge transition
sets a bit in the event detect status registers (GPEDSn). Asynchronous
means the incoming signal is not sampled by the system clock. As such falling
edges of very short duration can be detected.`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x88,
Access: sysdec.Access("rw"),
},
"GPPUD": {
Description: `The GPIO Pull-up/down Register controls the
actuation of the internal pull-up/down control line to ALL the GPIO pins.
This register must be used in conjunction with the 2 GPPUDCLKn registers.
Note that it is not possible to read back the current Pull-up/down settings
and so it is the users’ responsibility to ‘remember’ which pull-up/downs are
active. The reason for this is that GPIO pull-ups are maintained even in
power-down mode when the core is off, when all register contents is lost.
The Alternate function table also has the pull state which is applied after
a power down.`,
Size: 32,
AddressOffset: 0x94,
Access: sysdec.Access("rw"),
},
"GPUDClk[%s]": {
Description: `The GPIO Pull-up/down Clock Registers
control the actuation of internal pull-downs on the respective GPIO pins.
These registers must be used in conjunction with the GPPUD register to effect
GPIO Pull-up/down changes. The following sequence of events is required:
1. Write to GPPUD to set the required control signal (i.e. Pull-up or
Pull-Down or neither to remove the current Pull-up/down)
2. Wait 150 cycles – this provides the required set-up time for the
control signal
3. Write to GPPUDCLK0/1 to clock the control signal into the GPIO pads
you wish to modify – NOTE only the pads which receive a clock will be modified,
all others will retain their previous state.
4. Wait 150 cycles – this provides the required hold time for the
control signal
5. Write to GPPUD to remove the control signal
6. Write to GPPUDCLK0/1 to remove the clock`,
Dim: 2,
DimIncrement: 4,
Size: 32,
AddressOffset: 0x98,
Access: sysdec.Access("rw"),
},
},
} | src/tools/sysdec/sys/bcm-2837-gpio.go | 0.500244 | 0.621397 | bcm-2837-gpio.go | starcoder |
package iso20022
// Set of key elements of the original transaction being referred to.
type OriginalTransactionReference1 struct {
// Amount of money moved between the instructing agent and the instructed agent.
InterbankSettlementAmount *CurrencyAndAmount `xml:"IntrBkSttlmAmt,omitempty"`
// Amount of money to be moved between the debtor and creditor, before deduction of charges, expressed in the currency as ordered by the initiating party.
Amount *AmountType2Choice `xml:"Amt,omitempty"`
// Date on which the amount of money ceases to be available to the agent that owes it and when the amount of money becomes available to the agent to which it is due.
InterbankSettlementDate *ISODate `xml:"IntrBkSttlmDt,omitempty"`
// Date at which the initiating party requests that the clearing agent to process the payment. If payment by cheque, the date when the cheque must be generated by the bank.
//
// Usage: This is the date on which the debtor's account(s) is (are) to be debited.
RequestedExecutionDate *ISODate `xml:"ReqdExctnDt,omitempty"`
// Date at which the creditor requests the amount of money to be collected from the debtor.
RequestedCollectionDate *ISODate `xml:"ReqdColltnDt,omitempty"`
// Credit party that signs the direct debit mandate.
CreditorSchemeIdentification *PartyIdentification8 `xml:"CdtrSchmeId,omitempty"`
// Specifies the details on how the settlement of the original transaction(s) between the instructing agent and the instructed agent was completed.
SettlementInformation *SettlementInformation3 `xml:"SttlmInf,omitempty"`
// Set of elements used to further specify the type of transaction.
PaymentTypeInformation *PaymentTypeInformation6 `xml:"PmtTpInf,omitempty"`
// Specifies the transfer method that will be used by the instructing agent to transfer the funds to the creditor.
PaymentMethod *PaymentMethod4Code `xml:"PmtMtd,omitempty"`
// Set of elements used to provide further details related to a direct debit mandate signed between the creditor and the debtor.
//
// Usage: Mandate related information is to be used only when the direct debit relates to a mandate signed between the debtor and the creditor.
MandateRelatedInformation *MandateRelatedInformation1 `xml:"MndtRltdInf,omitempty"`
// Information supplied to enable the matching of an entry with the items that the transfer is intended to settle, such as commercial invoices in an accounts' receivable system.
RemittanceInformation *RemittanceInformation1 `xml:"RmtInf,omitempty"`
// Ultimate party that owes an amount of money to the (ultimate) creditor.
UltimateDebtor *PartyIdentification8 `xml:"UltmtDbtr,omitempty"`
// Party that owes an amount of money to the (ultimate) creditor.
Debtor *PartyIdentification8 `xml:"Dbtr,omitempty"`
// Unambiguous identification of the account of the debtor to which a debit entry will be made as a result of the transaction.
DebtorAccount *CashAccount7 `xml:"DbtrAcct,omitempty"`
// Financial institution servicing an account for the debtor.
DebtorAgent *BranchAndFinancialInstitutionIdentification3 `xml:"DbtrAgt,omitempty"`
// Unambiguous identification of the account of the debtor agent at its servicing agent in the payment chain.
DebtorAgentAccount *CashAccount7 `xml:"DbtrAgtAcct,omitempty"`
// Financial institution servicing an account for the creditor.
CreditorAgent *BranchAndFinancialInstitutionIdentification3 `xml:"CdtrAgt,omitempty"`
// Unambiguous identification of the account of the creditor agent at its servicing agent to which a credit entry will be made as a result of the payment transaction.
CreditorAgentAccount *CashAccount7 `xml:"CdtrAgtAcct,omitempty"`
// Party to which an amount of money is due.
Creditor *PartyIdentification8 `xml:"Cdtr,omitempty"`
// Unambiguous identification of the account of the creditor to which a credit entry will be posted as a result of the payment transaction.
CreditorAccount *CashAccount7 `xml:"CdtrAcct,omitempty"`
// Ultimate party to which an amount of money is due.
UltimateCreditor *PartyIdentification8 `xml:"UltmtCdtr,omitempty"`
}
func (o *OriginalTransactionReference1) SetInterbankSettlementAmount(value, currency string) {
o.InterbankSettlementAmount = NewCurrencyAndAmount(value, currency)
}
func (o *OriginalTransactionReference1) AddAmount() *AmountType2Choice {
o.Amount = new(AmountType2Choice)
return o.Amount
}
func (o *OriginalTransactionReference1) SetInterbankSettlementDate(value string) {
o.InterbankSettlementDate = (*ISODate)(&value)
}
func (o *OriginalTransactionReference1) SetRequestedExecutionDate(value string) {
o.RequestedExecutionDate = (*ISODate)(&value)
}
func (o *OriginalTransactionReference1) SetRequestedCollectionDate(value string) {
o.RequestedCollectionDate = (*ISODate)(&value)
}
func (o *OriginalTransactionReference1) AddCreditorSchemeIdentification() *PartyIdentification8 {
o.CreditorSchemeIdentification = new(PartyIdentification8)
return o.CreditorSchemeIdentification
}
func (o *OriginalTransactionReference1) AddSettlementInformation() *SettlementInformation3 {
o.SettlementInformation = new(SettlementInformation3)
return o.SettlementInformation
}
func (o *OriginalTransactionReference1) AddPaymentTypeInformation() *PaymentTypeInformation6 {
o.PaymentTypeInformation = new(PaymentTypeInformation6)
return o.PaymentTypeInformation
}
func (o *OriginalTransactionReference1) SetPaymentMethod(value string) {
o.PaymentMethod = (*PaymentMethod4Code)(&value)
}
func (o *OriginalTransactionReference1) AddMandateRelatedInformation() *MandateRelatedInformation1 {
o.MandateRelatedInformation = new(MandateRelatedInformation1)
return o.MandateRelatedInformation
}
func (o *OriginalTransactionReference1) AddRemittanceInformation() *RemittanceInformation1 {
o.RemittanceInformation = new(RemittanceInformation1)
return o.RemittanceInformation
}
func (o *OriginalTransactionReference1) AddUltimateDebtor() *PartyIdentification8 {
o.UltimateDebtor = new(PartyIdentification8)
return o.UltimateDebtor
}
func (o *OriginalTransactionReference1) AddDebtor() *PartyIdentification8 {
o.Debtor = new(PartyIdentification8)
return o.Debtor
}
func (o *OriginalTransactionReference1) AddDebtorAccount() *CashAccount7 {
o.DebtorAccount = new(CashAccount7)
return o.DebtorAccount
}
func (o *OriginalTransactionReference1) AddDebtorAgent() *BranchAndFinancialInstitutionIdentification3 {
o.DebtorAgent = new(BranchAndFinancialInstitutionIdentification3)
return o.DebtorAgent
}
func (o *OriginalTransactionReference1) AddDebtorAgentAccount() *CashAccount7 {
o.DebtorAgentAccount = new(CashAccount7)
return o.DebtorAgentAccount
}
func (o *OriginalTransactionReference1) AddCreditorAgent() *BranchAndFinancialInstitutionIdentification3 {
o.CreditorAgent = new(BranchAndFinancialInstitutionIdentification3)
return o.CreditorAgent
}
func (o *OriginalTransactionReference1) AddCreditorAgentAccount() *CashAccount7 {
o.CreditorAgentAccount = new(CashAccount7)
return o.CreditorAgentAccount
}
func (o *OriginalTransactionReference1) AddCreditor() *PartyIdentification8 {
o.Creditor = new(PartyIdentification8)
return o.Creditor
}
func (o *OriginalTransactionReference1) AddCreditorAccount() *CashAccount7 {
o.CreditorAccount = new(CashAccount7)
return o.CreditorAccount
}
func (o *OriginalTransactionReference1) AddUltimateCreditor() *PartyIdentification8 {
o.UltimateCreditor = new(PartyIdentification8)
return o.UltimateCreditor
} | OriginalTransactionReference1.go | 0.755457 | 0.544256 | OriginalTransactionReference1.go | starcoder |
package graphics
import (
"fmt"
"github.com/go-gl/gl/v4.6-core/gl"
"github.com/mokiat/gomath/sprec"
"github.com/mokiat/lacking/framework/opengl"
"github.com/mokiat/lacking/framework/opengl/game/graphics/internal"
"github.com/mokiat/lacking/game/graphics"
)
func NewEngine() *Engine {
return &Engine{
renderer: newRenderer(),
}
}
var _ graphics.Engine = (*Engine)(nil)
type Engine struct {
renderer *Renderer
}
func (e *Engine) Create() {
e.renderer.Allocate()
}
func (e *Engine) CreateScene() graphics.Scene {
return newScene(e.renderer)
}
func (e *Engine) CreateTwoDTexture(definition graphics.TwoDTextureDefinition) graphics.TwoDTexture {
allocateInfo := opengl.TwoDTextureAllocateInfo{
Width: int32(definition.Width),
Height: int32(definition.Height),
WrapS: e.convertWrap(definition.WrapS),
WrapT: e.convertWrap(definition.WrapT),
MinFilter: e.convertMinFilter(definition.MinFilter),
MagFilter: e.convertMagFilter(definition.MagFilter),
UseAnisotropy: definition.UseAnisotropy,
GenerateMipmaps: definition.GenerateMipmaps,
DataFormat: e.convertDataFormat(definition.DataFormat),
DataComponentType: e.convertDataComponentType(definition.DataFormat),
InternalFormat: e.convertInternalFormat(definition.InternalFormat),
Data: definition.Data,
}
result := newTwoDTexture()
result.TwoDTexture.Allocate(allocateInfo)
return result
}
func (e *Engine) CreateCubeTexture(definition graphics.CubeTextureDefinition) graphics.CubeTexture {
allocateInfo := opengl.CubeTextureAllocateInfo{
Dimension: int32(definition.Dimension),
WrapS: e.convertWrap(definition.WrapS),
WrapT: e.convertWrap(definition.WrapT),
MinFilter: e.convertMinFilter(definition.MinFilter),
MagFilter: e.convertMagFilter(definition.MagFilter),
DataFormat: e.convertDataFormat(definition.DataFormat),
DataComponentType: e.convertDataComponentType(definition.DataFormat),
InternalFormat: e.convertInternalFormat(definition.InternalFormat),
FrontSideData: definition.FrontSideData,
BackSideData: definition.BackSideData,
LeftSideData: definition.LeftSideData,
RightSideData: definition.RightSideData,
TopSideData: definition.TopSideData,
BottomSideData: definition.BottomSideData,
}
result := newCubeTexture()
result.CubeTexture.Allocate(allocateInfo)
return result
}
func (e *Engine) CreateMeshTemplate(definition graphics.MeshTemplateDefinition) graphics.MeshTemplate {
vertexBuffer := opengl.NewBuffer()
vertexBuffer.Allocate(opengl.BufferAllocateInfo{
Dynamic: false,
Data: definition.VertexData,
})
indexBuffer := opengl.NewBuffer()
indexBuffer.Allocate(opengl.BufferAllocateInfo{
Dynamic: false,
Data: definition.IndexData,
})
var attributes []opengl.VertexArrayAttribute
if definition.VertexFormat.HasCoord {
attributes = append(attributes, opengl.VertexArrayAttribute{
Index: coordAttributeIndex,
ComponentCount: 3,
ComponentType: gl.FLOAT,
Normalized: false,
OffsetBytes: uint32(definition.VertexFormat.CoordOffsetBytes),
BufferBinding: 0,
})
}
if definition.VertexFormat.HasNormal {
attributes = append(attributes, opengl.VertexArrayAttribute{
Index: normalAttributeIndex,
ComponentCount: 3,
ComponentType: gl.FLOAT,
Normalized: false,
OffsetBytes: uint32(definition.VertexFormat.NormalOffsetBytes),
BufferBinding: 0,
})
}
if definition.VertexFormat.HasTangent {
attributes = append(attributes, opengl.VertexArrayAttribute{
Index: tangentAttributeIndex,
ComponentCount: 3,
ComponentType: gl.FLOAT,
Normalized: false,
OffsetBytes: uint32(definition.VertexFormat.TangentOffsetBytes),
BufferBinding: 0,
})
}
if definition.VertexFormat.HasTexCoord {
attributes = append(attributes, opengl.VertexArrayAttribute{
Index: texCoordAttributeIndex,
ComponentCount: 2,
ComponentType: gl.FLOAT,
Normalized: false,
OffsetBytes: uint32(definition.VertexFormat.TexCoordOffsetBytes),
BufferBinding: 0,
})
}
if definition.VertexFormat.HasColor {
attributes = append(attributes, opengl.VertexArrayAttribute{
Index: colorAttributeIndex,
ComponentCount: 4,
ComponentType: gl.FLOAT,
Normalized: false,
OffsetBytes: uint32(definition.VertexFormat.ColorOffsetBytes),
BufferBinding: 0,
})
}
vertexArray := opengl.NewVertexArray()
vertexArray.Allocate(opengl.VertexArrayAllocateInfo{
BufferBindings: []opengl.VertexArrayBufferBinding{
{
VertexBuffer: vertexBuffer,
OffsetBytes: 0,
StrideBytes: int32(definition.VertexFormat.CoordStrideBytes), // FIXME: Not accurate
},
},
Attributes: attributes,
IndexBuffer: indexBuffer,
})
result := &MeshTemplate{
vertexBuffer: vertexBuffer,
indexBuffer: indexBuffer,
vertexArray: vertexArray,
subMeshes: make([]SubMeshTemplate, len(definition.SubMeshes)),
}
for i, subMesh := range definition.SubMeshes {
result.subMeshes[i] = SubMeshTemplate{
material: subMesh.Material.(*Material),
primitive: e.convertPrimitive(subMesh.Primitive),
indexCount: int32(subMesh.IndexCount),
indexOffsetBytes: subMesh.IndexOffset,
}
}
return result
}
func (e *Engine) CreatePBRMaterial(definition graphics.PBRMaterialDefinition) graphics.Material {
extractTwoDTexture := func(src graphics.TwoDTexture) *opengl.TwoDTexture {
if src == nil {
return nil
}
return src.(*TwoDTexture).TwoDTexture
}
return &Material{
backfaceCulling: definition.BackfaceCulling,
alphaBlending: definition.AlphaBlending,
alphaTesting: definition.AlphaTesting,
alphaThreshold: definition.AlphaThreshold,
twoDTextures: []*opengl.TwoDTexture{
extractTwoDTexture(definition.AlbedoTexture),
extractTwoDTexture(definition.NormalTexture),
extractTwoDTexture(definition.MetalnessTexture),
extractTwoDTexture(definition.RoughnessTexture),
},
cubeTextures: []*opengl.CubeTexture{},
vectors: []sprec.Vec4{
definition.AlbedoColor,
sprec.NewVec4(definition.NormalScale, definition.Metalness, definition.Roughness, 0.0),
},
geometryPresentation: internal.NewPBRGeometryPresentation(definition),
shadowPresentation: nil, // TODO
}
}
func (e *Engine) Destroy() {
e.renderer.Release()
}
func (e *Engine) convertWrap(wrap graphics.Wrap) int32 {
switch wrap {
case graphics.WrapClampToEdge:
return gl.CLAMP_TO_EDGE
case graphics.WrapRepeat:
return gl.REPEAT
default:
panic(fmt.Errorf("unknown wrap mode: %d", wrap))
}
}
func (e *Engine) convertMinFilter(filter graphics.Filter) int32 {
switch filter {
case graphics.FilterNearest:
return gl.NEAREST
case graphics.FilterLinear:
return gl.LINEAR
case graphics.FilterNearestMipmapNearest:
return gl.NEAREST_MIPMAP_NEAREST
case graphics.FilterNearestMipmapLinear:
return gl.NEAREST_MIPMAP_LINEAR
case graphics.FilterLinearMipmapNearest:
return gl.LINEAR_MIPMAP_NEAREST
case graphics.FilterLinearMipmapLinear:
return gl.LINEAR_MIPMAP_LINEAR
default:
panic(fmt.Errorf("unknown min filter mode: %d", filter))
}
}
func (e *Engine) convertMagFilter(filter graphics.Filter) int32 {
switch filter {
case graphics.FilterNearest:
return gl.NEAREST
case graphics.FilterLinear:
return gl.LINEAR
default:
panic(fmt.Errorf("unknown mag filter mode: %d", filter))
}
}
func (e *Engine) convertDataFormat(format graphics.DataFormat) uint32 {
switch format {
case graphics.DataFormatRGBA8:
return gl.RGBA
case graphics.DataFormatRGBA32F:
return gl.RGBA
default:
panic(fmt.Errorf("unknown data format: %d", format))
}
}
func (e *Engine) convertDataComponentType(format graphics.DataFormat) uint32 {
switch format {
case graphics.DataFormatRGBA8:
return gl.UNSIGNED_BYTE
case graphics.DataFormatRGBA32F:
return gl.FLOAT
default:
panic(fmt.Errorf("unknown data format: %d", format))
}
}
func (e *Engine) convertInternalFormat(format graphics.InternalFormat) uint32 {
switch format {
case graphics.InternalFormatRGBA8:
return gl.SRGB8_ALPHA8
case graphics.InternalFormatRGBA32F:
return gl.RGBA32F
default:
panic(fmt.Errorf("unknown internal format: %d", format))
}
}
func (e *Engine) convertPrimitive(primitive graphics.Primitive) uint32 {
switch primitive {
case graphics.PrimitivePoints:
return gl.POINTS
case graphics.PrimitiveLines:
return gl.LINES
case graphics.PrimitiveLineStrip:
return gl.LINE_STRIP
case graphics.PrimitiveLineLoop:
return gl.LINE_LOOP
case graphics.PrimitiveTriangles:
return gl.TRIANGLES
case graphics.PrimitiveTriangleStrip:
return gl.TRIANGLE_STRIP
case graphics.PrimitiveTriangleFan:
return gl.TRIANGLE_FAN
default:
panic(fmt.Errorf("unknown primitive: %d", primitive))
}
} | framework/opengl/game/graphics/engine.go | 0.622345 | 0.417628 | engine.go | starcoder |
package bytes
import (
"math"
)
// SimpleBuilder builds a "sparse" array of replacement mappings based on the indexes that were added to it.
// The array will be from 0 to the maximum index given.
// All non-set indexes will contain nil (so it's not really a sparse array, just a pseudo sparse array).
type SimpleBuilder struct {
// replMap holds the replacement mappings.
replMap map[byte]string
// max is the highest index we've seen so far.
max byte
}
// NewSimpleBuilder constructs a new sparse array builder.
func NewSimpleBuilder() *SimpleBuilder {
return &SimpleBuilder{replMap: map[byte]string{}}
}
// AddEscape adds a new mapping from an index to a string to the escaping.
func (b *SimpleBuilder) AddEscape(c byte, r string) *SimpleBuilder {
b.replMap[c] = r
if c > b.max {
b.max = c
}
return b
}
// AddEscapes adds multiple mappings at once for a particular index.
func (b *SimpleBuilder) AddEscapes(cs []byte, r string) *SimpleBuilder {
for _, c := range cs {
b.AddEscape(c, r)
}
return b
}
// ToArray converts this builder into a [][]byte where the maximum index is the value of the highest byte that has been seen.
// The array will be sparse in the sense that any unseen index will default to nil.
func (b *SimpleBuilder) ToArray() [][]byte {
result := make([][]byte, b.max+1)
for k, v := range b.replMap {
result[k] = []byte(v)
}
return result
}
// ToEscaper converts this SimpleBuilder into an escaper, which is just a decorator around the underlying array of replacement [][]byte.
func (b *SimpleBuilder) ToEscaper() *byteArrayDecorator {
return NewByteArrayEscaper(b.ToArray())
}
// byteArrayDecorator is a simple decorator that turns an array of replacement [][]bytes into an escaper, this results in a very fast escape method.
type byteArrayDecorator struct {
*Escaper
repl [][]byte
replLen byte
}
// NewByteArrayEscaper returns an escaper that escapes based on the underlying array.
func NewByteArrayEscaper(repl [][]byte) (e *byteArrayDecorator) {
replLen := len(repl)
if replLen > math.MaxUint8 {
repl := repl[:math.MaxUint8]
replLen = len(repl)
}
e = &byteArrayDecorator{
Escaper: NewEscaper(func(c byte) []byte { return e.escapeByte(c) }),
repl: repl,
replLen: uint8(replLen),
}
return e
}
func (e *byteArrayDecorator) Escape(str string) string {
for i := 0; i < len(str); i++ {
c := str[i]
if c < e.replLen && e.repl[c] != nil {
return e.EscapeSlow(str, i)
}
}
return str
}
func (e *byteArrayDecorator) escapeByte(c byte) []byte {
if c < e.replLen {
return e.repl[c]
}
return nil
} | escape/bytes/simplebuilder.go | 0.820218 | 0.47926 | simplebuilder.go | starcoder |
package graph
import (
"errors"
)
type tState int
const (
stateNew = iota
stateOpen
stateClosed
)
// NodeProvider is the interface between the vertices stored in the graph
// and various graph functions.
// This interface enables the consumers of graph functions to adopt their
// data structures for graph related operations without converting to
// a strict format beforehand.
type NodeProvider interface {
// ID returns an identifier that can be used to uniquely identify
// the vertex. This identifier is used internally to determine if
// two nodes are same.
ID(vertex interface{}) interface{}
// ChildCount returns the number of children this vertex has.
ChildCount(vertex interface{}) int
// Child returns the child vertex at index in vertex.
Child(vertex interface{}, index int) (interface{}, error)
}
// CycleError occurs when a cyclic reference is detected in a directed
// acyclic graph.
type CycleError struct {
Path []interface{}
}
func (e *CycleError) Error() string {
return "not a dag"
}
// TopSort performs a topological sort of the provided graph.
// Returns an array containing the sorted graph or an
// error if the provided graph is not a directed acyclic graph (DAG).
func TopSort(nodeProvider NodeProvider, graph ...interface{}) ([]interface{}, error) {
if nodeProvider == nil {
return nil, errors.New("nodeProvider should be a valid reference")
}
traversalState := make(map[interface{}]tState)
results := make([]interface{}, 0)
for _, node := range graph {
err := dfsVisit(nodeProvider, node, traversalState, &results, make([]interface{}, 0))
if err != nil {
return nil, err
}
}
return results, nil
}
func dfsVisit(nodeProvider NodeProvider, node interface{}, traversalState map[interface{}]tState, sorted *[]interface{}, path []interface{}) error {
id := nodeProvider.ID(node)
if traversalState[id] == stateOpen {
return &CycleError{Path: append(path, node)}
}
if traversalState[id] == stateClosed {
return nil
}
traversalState[id] = stateOpen
path = append(path, node)
for i := 0; i < nodeProvider.ChildCount(node); i++ {
c, err := nodeProvider.Child(node, i)
if err != nil {
return err
}
err = dfsVisit(nodeProvider, c, traversalState, sorted, path)
if err != nil {
return err
}
}
traversalState[id] = stateClosed
*sorted = append(*sorted, node)
return nil
} | graph/top_sort.go | 0.805403 | 0.461502 | top_sort.go | starcoder |
package difference_digest
import (
"database/sql"
"fmt"
"math"
)
const (
stratumCount = 64
cellsCount = 80
)
// StrataEstimator is a data structure used to estimate the number of differences between 2 sets probablistically
type StrataEstimator struct {
Stratum []InvertibleBloomFilter
}
// NewStrataEstimator initalizes a new StrataEstimator
func NewStrataEstimator() *StrataEstimator {
se := StrataEstimator{
Stratum: make([]InvertibleBloomFilter, stratumCount),
}
for i := range se.Stratum {
se.Stratum[i] = *NewIBF(cellsCount)
}
return &se
}
// Add adds an element to the StrataEstimator
func (se *StrataEstimator) Add(element uint64) {
j := estimatorHash(element)
se.Stratum[j].Add(element)
}
// EstimateDifference returns the estimated number of differences between the receiver and a 2nd Strata Estimator
func (se *StrataEstimator) EstimateDifference(se2 *StrataEstimator) uint64 {
var Count uint64 = 0
for i := 63; i >= 0; i-- {
diff := se.Stratum[i].Subtract(&se2.Stratum[i])
aWb, _, ok := diff.Decode()
if ok {
Count += uint64(len(aWb))
} else {
return uint64(math.Pow(2.0, float64(i+1))) * (Count + 1)
}
}
return Count
}
// EncodeEstimatorDB queries a PostgreSQL database and returns a StrataEstimator for the specified table and column
func EncodeEstimatorDB(db *sql.DB, table string, column string) (*StrataEstimator, error) {
rows, err := db.Query(fmt.Sprintf(query("strata_estimator"), table, column, cellsCount))
if err != nil {
return nil, err
}
defer rows.Close()
estimator := NewStrataEstimator()
for rows.Next() {
var (
strata, cell int
IDSum, HashSum uint64
Count int64
)
err := rows.Scan(&strata, &cell, &IDSum, &HashSum, &Count)
if err != nil {
return nil, err
}
idBitmap := ToBitmap(IDSum)
hashBitmap := ToBitmap(HashSum)
el := IBFCell{
IDSum: *idBitmap,
HashSum: *hashBitmap,
Count: Count,
}
estimator.Stratum[strata].Cells[cell] = el
}
return estimator, nil
} | strata_estimator.go | 0.738198 | 0.517327 | strata_estimator.go | starcoder |
package goel
import (
"context"
"github.com/pkg/errors"
"go/ast"
"reflect"
)
type sliceCompiledExpression struct {
nopExpression
sliceExp *ast.SliceExpr
xexp, hexp, lexp, mexp compiledExpression
returnType reflect.Type
slice3 bool
}
func (sce *sliceCompiledExpression) ReturnType() (reflect.Type, error) {
return sce.returnType, nil
}
func verifyIntExpression(executionContext context.Context, lexp compiledExpression, min, max int) (int, error) {
_l, err := lexp.Execute(executionContext)
if err != nil {
return -1, err
}
l, ok := _l.(int)
if !ok {
return -1, errors.Errorf("%d: type mismatch expected an int but found %T", lexp.Pos(), _l)
}
if min <= l && l <= max {
return l, nil
}
return -1, errors.Errorf("%d: index out of range: %d", lexp.Pos(), l)
}
func (sce *sliceCompiledExpression) Execute(executionContext context.Context) (interface{}, error) {
x, err := sce.xexp.Execute(executionContext)
if err != nil {
return nil, err
}
xv := reflect.ValueOf(x)
if xv.Kind() != reflect.Slice && xv.Kind() != reflect.String {
return nil, errors.Errorf("%d: type mismatch expected a slice or string but found %T", sce.xexp.Pos(), x)
}
l, err := verifyIntExpression(executionContext, sce.lexp, 0, xv.Len()-1)
if err != nil {
return nil, err
}
h, err := verifyIntExpression(executionContext, sce.hexp, l, xv.Len())
if err != nil {
return nil, err
}
if sce.slice3 {
if xv.Kind() != reflect.Slice {
return nil, errors.Errorf("%d: type mismatch expected a slice but found %T", sce.xexp.Pos(), x)
}
m, err := verifyIntExpression(executionContext, sce.mexp, h, xv.Cap())
if err != nil {
return nil, err
}
return xv.Slice3(l, h, m).Interface(), nil
}
return xv.Slice(l, h).Interface(), nil
}
func newSliceCompiledExpression(sliceExp *ast.SliceExpr, returnType reflect.Type, xexp, hexp, lexp, mexp compiledExpression, slice3 bool) compiledExpression {
return &sliceCompiledExpression{nopExpression{sliceExp}, sliceExp, xexp, hexp, lexp, mexp, returnType, slice3}
}
type lengthCompiledExpression struct {
nopExpression
xexp CompiledExpression
}
func (lce *lengthCompiledExpression) ReturnType() (reflect.Type, error) {
return IntType, nil
}
func (lce *lengthCompiledExpression) Execute(executionContext context.Context) (interface{}, error) {
s, err := lce.xexp.Execute(executionContext)
if err != nil {
return nil, err
}
vs := reflect.ValueOf(s)
if vs.Kind() == reflect.Array || vs.Kind() == reflect.Slice || vs.Kind() == reflect.String {
return vs.Len(), nil
}
return nil, errors.Errorf("%d: expected an array, slice, or string found %T", 0, s)
}
func newLengthCompiledExpression(xexp compiledExpression) compiledExpression {
return &lengthCompiledExpression{nopExpression{}, xexp}
}
func evalSliceExpr(pctx context.Context, exp *ast.SliceExpr) compiledExpression {
xexp := compile(pctx, exp.X)
if xexp.Error() != nil {
return xexp
}
xt, _ := xexp.ReturnType()
if (xt.Kind() != reflect.Slice && xt.Kind() != reflect.String) || (exp.Slice3 && xt.Kind() == reflect.String) {
if exp.Slice3 {
return newErrorExpression(errors.Errorf("%d: type mismatch expected a slice but found %s", xexp.Pos(), xt))
}
return newErrorExpression(errors.Errorf("%d: type mismatch expected a slice or string but found %s", xexp.Pos(), xt))
}
returnType := xt
var hexp, lexp, mexp compiledExpression
if exp.Low != nil {
lexp = compile(pctx, exp.Low)
} else {
lexp = literal(exp, 0, IntType)
}
if exp.High != nil {
hexp = compile(pctx, exp.High)
} else {
hexp = newLengthCompiledExpression(xexp)
}
if exp.Slice3 {
if xt.Kind() == reflect.String {
return newErrorExpression(errors.Errorf("%d: type mismatch expected an array or slice but found %s", xexp.Pos(), xt.Name()))
}
mexp = compile(pctx, exp.Max)
}
return newSliceCompiledExpression(exp, returnType, xexp, hexp, lexp, mexp, exp.Slice3)
} | slice.go | 0.538498 | 0.404184 | slice.go | starcoder |
package match
import (
"bytes"
"errors"
"fmt"
"unicode"
"github.com/gdey/ppc/parse"
)
// AnyRune will match one rune
func AnyRune() parse.Parser {
return Rune(func(_ rune) bool { return true }, errors.New("unable to match letter"))
}
// Digit matches one unicode digit
// result is a rune
func Digit() parse.Parser {
return parse.Func(func(state parse.State) parse.State {
r, n, err := state.ReadNextRune()
if err != nil || !unicode.IsDigit(r) {
return state.WithError(
errors.New("unable to match letter"),
)
}
return state.WithResult(r, state.Index+int64(n))
})
}
// Letter matches one unicode letter
// result is a rune
func Letter() parse.Parser {
return Rune(unicode.IsLetter, errors.New("unable to match letter"))
}
// Letters matches one or more letters
// result is a string
func Letters() parse.Parser {
return parse.Map(
Runes(unicode.IsLetter, errors.New("failed to match any letters")),
func(r interface{}) interface{} {
result, ok := r.([]rune)
if !ok {
return r
}
return string(result)
},
)
}
// Rune matches as rune described by the provided function
// result is a rune
func Rune(fn func(rune) bool, errVal error) parse.Parser {
return parse.Func(func(state parse.State) parse.State {
r, n, err := state.ReadNextRune()
if err != nil || !fn(r) {
return state.WithError(errVal)
}
return state.WithResult(r, state.Index+int64(n))
})
}
func RuneN(n int) parse.Parser {
return parse.Func(func(state parse.State) parse.State {
var (
results = make([]rune, n)
cstate = state
)
for i := 0; i < n; i++ {
r, nn, err := cstate.ReadNextRune()
if err != nil {
return state.WithError(fmt.Errorf("unable to match %v runes", n))
}
results[i] = r
cstate.Index += int64(nn)
}
return state.WithResult(results, cstate.Index)
})
}
// Runes matches one or more runes described by the provided function
// results in an array of runes
func Runes(fn func(rune) bool, errVal error) parse.Parser {
return parse.Func(func(state parse.State) parse.State {
var (
runesRead []rune
// Make a copy of the state, that we will modify.
cstate = state
)
for {
r, n, err := cstate.ReadNextRune()
if err != nil || !fn(r) {
if len(runesRead) >= 1 {
return state.WithResult(
runesRead,
cstate.Index,
)
}
return state.WithError(errVal)
}
runesRead = append(runesRead, r)
cstate.Index += int64(n)
}
})
}
// Space matches one space
func Space() parse.Parser {
return Rune(unicode.IsSpace, errors.New("unable to match a space"))
}
// String matches a string exactly
func String(match string) parse.Parser {
matchBytes := []byte(match)
return parse.Func(func(state parse.State) parse.State {
buff, n, err := state.ReadNextBytes(len(matchBytes))
if err != nil {
return state.WithError(
fmt.Errorf("unable to match %v", match),
)
}
if !bytes.HasPrefix(matchBytes, buff) {
return state.WithError(
fmt.Errorf("unable to match %v", match),
)
}
return state.WithResult(
match,
state.Index+int64(n),
)
})
}
// StringInsensitive matches a string insensitive to the casing
func StringInsensitive(match string) parse.Parser {
matchBytes := bytes.ToUpper([]byte(match))
return parse.Func(func(state parse.State) parse.State {
buff, n, err := state.ReadNextBytes(len(matchBytes))
if err != nil {
return state.WithError(
fmt.Errorf("unable to match %v", match),
)
}
if !bytes.HasPrefix(matchBytes, bytes.ToUpper(buff)) {
return state.WithError(
fmt.Errorf("unable to match %v", match),
)
}
return state.WithResult(
match,
state.Index+int64(n),
)
})
}
// Until will apply the body parser until the end parser matches.
// State will be left at end parser
// result is []interface{}
func Until(end parse.Parser) func(parse.Parser) parse.Parser {
return func(body parse.Parser) parse.Parser {
return parse.Func(func(state parse.State) parse.State {
var results []interface{}
cstate := state
for {
// Check until condition
nextState := end.Run(cstate)
if !nextState.IsError {
// We need to stop.
return cstate.WithResult(results, cstate.Index)
}
// run the body parser.
cstate = body.Run(cstate)
if cstate.IsError {
// Return the error.
return nextState
}
results = append(results, cstate.Result)
}
})
}
} | parse/match/match.go | 0.627609 | 0.468061 | match.go | starcoder |
package factory
import (
"github.com/Yiling-J/carrier/examples/ent_recipe/ent"
"context"
)
type EntIngredientMutator struct {
Name string
_creator *ent.IngredientCreate
}
func (m *EntIngredientMutator) EntCreator() *ent.IngredientCreate {
return m._creator
}
type entIngredientMutation struct {
nameType int
nameFunc func(ctx context.Context, i *EntIngredientMutator, c int) error
beforeCreateFunc func(ctx context.Context, i *EntIngredientMutator) error
afterCreateFunc func(ctx context.Context, i *ent.Ingredient) error
}
type EntIngredientMetaFactory struct {
mutation entIngredientMutation
}
type entIngredientTrait struct {
mutation entIngredientMutation
updates []func(m *entIngredientMutation)
}
func EntIngredientTrait() *entIngredientTrait {
return &entIngredientTrait{}
}
func (*entIngredientMutation) beforeCreateMutateFunc(fn func(ctx context.Context, i *EntIngredientMutator) error) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.beforeCreateFunc = fn
}
}
func (*entIngredientMutation) afterCreateMutateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.afterCreateFunc = fn
}
}
func (*entIngredientMutation) nameSequenceMutateFunc(fn func(ctx context.Context, i int) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeSequence
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int) error {
if fn == nil {
return nil
}
value, err := fn(ctx, c)
if err != nil {
return err
}
i.EntCreator().SetName(value)
i.Name = value
return nil
}
}
}
func (*entIngredientMutation) nameLazyMutateFunc(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeLazy
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int) error {
if fn == nil {
return nil
}
value, err := fn(ctx, i)
if err != nil {
return err
}
i.EntCreator().SetName(value)
i.Name = value
return nil
}
}
}
func (*entIngredientMutation) nameDefaultMutateFunc(v string) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeDefault
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int) error {
i.EntCreator().SetName(v)
i.Name = v
return nil
}
}
}
func (*entIngredientMutation) nameFactoryMutateFunc(fn func(ctx context.Context) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeFactory
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int) error {
if fn == nil {
return nil
}
value, err := fn(ctx)
if err != nil {
return err
}
i.EntCreator().SetName(value)
i.Name = value
return nil
}
}
}
// SetNameSequence register a function which accept a sequence counter and set return value to Name field
func (f *EntIngredientMetaFactory) SetNameSequence(fn func(ctx context.Context, i int) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameSequenceMutateFunc(fn)(&f.mutation)
return f
}
// SetNameLazy register a function which accept the build struct and set return value to Name field
func (f *EntIngredientMetaFactory) SetNameLazy(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameLazyMutateFunc(fn)(&f.mutation)
return f
}
// SetNameDefault assign a default value to Name field
func (f *EntIngredientMetaFactory) SetNameDefault(v string) *EntIngredientMetaFactory {
f.mutation.nameDefaultMutateFunc(v)(&f.mutation)
return f
}
// SetNameFactory register a factory function and assign return value to Name, you can also use related factory's Create/CreateV as input function here
func (f *EntIngredientMetaFactory) SetNameFactory(fn func(ctx context.Context) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameFactoryMutateFunc(fn)(&f.mutation)
return f
}
// SetNameSequence register a function which accept a sequence counter and set return value to Name field
func (t *entIngredientTrait) SetNameSequence(fn func(ctx context.Context, i int) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameSequenceMutateFunc(fn))
return t
}
// SetNameLazy register a function which accept the build struct and set return value to Name field
func (t *entIngredientTrait) SetNameLazy(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameLazyMutateFunc(fn))
return t
}
// SetNameDefault assign a default value to Name field
func (t *entIngredientTrait) SetNameDefault(v string) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameDefaultMutateFunc(v))
return t
}
// SetNameFactory register a factory function and assign return value to Name, you can also use related factory's Create/CreateV as input function here
func (t *entIngredientTrait) SetNameFactory(fn func(ctx context.Context) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameFactoryMutateFunc(fn))
return t
}
// SetAfterCreateFunc register a function to be called after struct create
func (f *EntIngredientMetaFactory) SetAfterCreateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) *EntIngredientMetaFactory {
f.mutation.afterCreateFunc = fn
return f
}
// SetBeforeCreateFunc register a function to be called before struct create
func (f *EntIngredientMetaFactory) SetBeforeCreateFunc(fn func(ctx context.Context, i *EntIngredientMutator) error) *EntIngredientMetaFactory {
f.mutation.beforeCreateFunc = fn
return f
}
// SetAfterCreateFunc register a function to be called after struct create
func (t *entIngredientTrait) SetAfterCreateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.afterCreateMutateFunc(fn))
return t
}
// SetBeforeCreateFunc register a function to be called before struct create
func (t *entIngredientTrait) SetBeforeCreateFunc(fn func(ctx context.Context, i *EntIngredientMutator) error) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.beforeCreateMutateFunc(fn))
return t
}
// Build create a EntIngredientFactory from EntIngredientMetaFactory
func (f *EntIngredientMetaFactory) Build() *EntIngredientFactory {
return &EntIngredientFactory{meta: *f, counter: &Counter{}}
}
type EntIngredientFactory struct {
meta EntIngredientMetaFactory
counter *Counter
client *ent.Client
}
// SetName set the Name field
func (f *EntIngredientFactory) SetName(i string) *EntIngredientBuilder {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.SetName(i)
builder.client = f.client
return builder
}
// Create return a new *ent.Ingredient
func (f *EntIngredientFactory) Create(ctx context.Context) (*ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.Create(ctx)
}
// CreateV return a new ent.Ingredient
func (f *EntIngredientFactory) CreateV(ctx context.Context) (ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateV(ctx)
}
// CreateBatch return a []*ent.Ingredient slice
func (f *EntIngredientFactory) CreateBatch(ctx context.Context, n int) ([]*ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateBatch(ctx, n)
}
// CreateBatchV return a []ent.Ingredient slice
func (f *EntIngredientFactory) CreateBatchV(ctx context.Context, n int) ([]ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateBatchV(ctx, n)
}
// Client set ent client to EntIngredientFactory
func (f *EntIngredientFactory) Client(c *ent.Client) *EntIngredientFactory {
f.client = c
return f
}
type EntIngredientBuilder struct {
factory *EntIngredientFactory
mutation entIngredientMutation
counter *Counter
nameOverride string
nameOverriden bool
client *ent.Client
}
func (b *EntIngredientBuilder) Client(c *ent.Client) *EntIngredientBuilder {
b.client = c
return b
}
// SetName set the Name field
func (b *EntIngredientBuilder) SetName(i string) *EntIngredientBuilder {
b.nameOverride = i
b.nameOverriden = true
return b
}
// CreateV return a new ent.Ingredient
func (b *EntIngredientBuilder) CreateV(ctx context.Context) (ent.Ingredient, error) {
var d ent.Ingredient
p, err := b.Create(ctx)
if err == nil {
d = *p
}
return d, err
}
// Create return a new *ent.Ingredient
func (b *EntIngredientBuilder) Create(ctx context.Context) (*ent.Ingredient, error) {
var preSlice = []func(ctx context.Context, i *EntIngredientMutator, c int) error{}
var lazySlice = []func(ctx context.Context, i *EntIngredientMutator, c int) error{}
var postSlice = []func(ctx context.Context, i *ent.Ingredient, c int) error{}
index := b.counter.Get()
_ = index
client := b.client
entBuilder := client.Ingredient.Create()
if b.nameOverriden {
preSlice = append(preSlice, func(ctx context.Context, i *EntIngredientMutator, c int) error {
value := b.nameOverride
i.EntCreator().SetName(value)
i.Name = value
return nil
})
} else {
switch b.mutation.nameType {
case TypeDefault:
preSlice = append(preSlice, b.mutation.nameFunc)
case TypeLazy:
lazySlice = append(lazySlice, b.mutation.nameFunc)
case TypeSequence:
preSlice = append(preSlice, b.mutation.nameFunc)
case TypeFactory:
preSlice = append(preSlice, b.mutation.nameFunc)
}
}
v := &EntIngredientMutator{}
v._creator = entBuilder
for _, f := range preSlice {
err := f(ctx, v, index)
if err != nil {
return nil, err
}
}
for _, f := range lazySlice {
err := f(ctx, v, index)
if err != nil {
return nil, err
}
}
if b.mutation.beforeCreateFunc != nil {
if err := b.mutation.beforeCreateFunc(ctx, v); err != nil {
return nil, err
}
}
new, err := entBuilder.Save(ctx)
if err != nil {
return nil, err
}
if b.mutation.afterCreateFunc != nil {
err := b.mutation.afterCreateFunc(ctx, new)
if err != nil {
return nil, err
}
}
for _, f := range postSlice {
err := f(ctx, new, index)
if err != nil {
return nil, err
}
}
return new, nil
}
func (b *EntIngredientBuilder) CreateBatch(ctx context.Context, n int) ([]*ent.Ingredient, error) {
var results []*ent.Ingredient
for i := 0; i < n; i++ {
d, err := b.Create(ctx)
if err != nil {
return results, err
}
results = append(results, d)
}
return results, nil
}
func (b *EntIngredientBuilder) CreateBatchV(ctx context.Context, n int) ([]ent.Ingredient, error) {
var results []ent.Ingredient
for i := 0; i < n; i++ {
d, err := b.CreateV(ctx)
if err != nil {
return results, err
}
results = append(results, d)
}
return results, nil
} | examples/ent_recipe/carrier/factory/ent_ingredient.go | 0.529263 | 0.462352 | ent_ingredient.go | starcoder |
package assert
import (
"errors"
"fmt"
"reflect"
"regexp"
"strconv"
"strings"
)
// ErrInvalid is value invalid for operation
var ErrInvalid = errors.New("value if invalid")
// ErrLess is expect to be greater error
var ErrLess = errors.New("left is less the right")
// ErrGreater is expect to be less error
var ErrGreater = errors.New("left is greater then right")
// CMP is compare operation
var CMP = struct {
LT string
LE string
GT string
GE string
}{
"<",
"<=",
">",
">=",
}
// IsZero returns value is zero value
func IsZero(v interface{}) bool {
vv := reflect.ValueOf(v)
switch vv.Kind() {
case reflect.Invalid:
return true
case reflect.Bool:
return !vv.Bool()
case reflect.Ptr, reflect.Interface:
return vv.IsNil()
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
return vv.Len() == 0
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return vv.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return vv.Uint() == 0
case reflect.Float32, reflect.Float64:
return vv.Float() == 0
default:
return false
}
}
// IsContains returns whether value is within array
func IsContains(array interface{}, value interface{}) bool {
vv := reflect.ValueOf(array)
if vv.Kind() == reflect.Ptr || vv.Kind() == reflect.Interface {
if vv.IsNil() {
return false
}
vv = vv.Elem()
}
switch vv.Kind() {
case reflect.Invalid:
return false
case reflect.Slice:
for i := 0; i < vv.Len(); i++ {
if reflect.DeepEqual(value, vv.Index(i).Interface()) {
return true
}
}
return false
case reflect.Map:
s := vv.MapKeys()
for i := 0; i < len(s); i++ {
if reflect.DeepEqual(value, s[i].Interface()) {
return true
}
}
return false
case reflect.String:
ss := reflect.ValueOf(value)
switch ss.Kind() {
case reflect.String:
return strings.Contains(vv.String(), ss.String())
}
return false
default:
return reflect.DeepEqual(array, value)
}
}
// IsMatch returns if value v contains any match of pattern r
// IsMatch(regexp.MustCompile("v\d+"), "v100")
// IsMatch("v\d+", "v100")
// IsMatch("\d+\.\d+", 100.1)
func IsMatch(r interface{}, v interface{}) bool {
var re *regexp.Regexp
if v, ok := r.(*regexp.Regexp); ok {
re = v
} else {
re = regexp.MustCompile(fmt.Sprint(r))
}
return re.MatchString(fmt.Sprint(v))
}
// Length returns length of value
func Length(v interface{}) int {
vv := reflect.ValueOf(v)
if vv.Kind() == reflect.Ptr || vv.Kind() == reflect.Interface {
if vv.IsNil() {
return 0
}
vv = vv.Elem()
}
switch vv.Kind() {
case reflect.Invalid:
return 0
case reflect.Ptr, reflect.Interface:
return 0
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
return vv.Len()
default:
return len(fmt.Sprintf("%#v", v))
}
}
// IsLt returns if x less than y, value invalid will returns false
func IsLt(x, y interface{}) bool {
return Compare(x, y, CMP.LT) == nil
}
// IsLe returns if x less than or equal to y, value invalid will returns false
func IsLe(x, y interface{}) bool {
return Compare(x, y, CMP.LE) == nil
}
// IsGt returns if x greater than y, value invalid will returns false
func IsGt(x, y interface{}) bool {
return Compare(x, y, CMP.GT) == nil
}
// IsGe returns if x greater than or equal to y, value invalid will returns false
func IsGe(x, y interface{}) bool {
return Compare(x, y, CMP.GE) == nil
}
// Compare compare x and y, by operation
// It returns nil for true, ErrInvalid for invalid operation, err for false
// Compare(1, 2, ">") // number compare -> true
// Compare("a", "a", ">=") // string compare -> true
// Compare([]string{"a", "b"}, []string{"a"}, "<") // slice len compare -> false
func Compare(x, y interface{}, op string) error {
if !IsContains([]string{CMP.LT, CMP.LE, CMP.GT, CMP.GE}, op) {
return ErrInvalid
}
vv := reflect.ValueOf(x)
if vv.Kind() == reflect.Ptr || vv.Kind() == reflect.Interface {
if vv.IsNil() {
return ErrInvalid
}
vv = vv.Elem()
}
var c float64
switch vv.Kind() {
case reflect.Invalid:
return ErrInvalid
case reflect.String:
yy := reflect.ValueOf(y)
switch yy.Kind() {
case reflect.String:
c = float64(strings.Compare(vv.String(), yy.String()))
default:
return ErrInvalid
}
case reflect.Slice, reflect.Map, reflect.Array:
yy := reflect.ValueOf(y)
switch yy.Kind() {
case reflect.Slice, reflect.Map, reflect.Array:
c = float64(vv.Len() - yy.Len())
default:
return ErrInvalid
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
yy, err := ToInt64(y)
if err != nil {
return ErrInvalid
}
c = float64(vv.Int() - yy)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
yy, err := ToUint64(y)
if err != nil {
return ErrInvalid
}
c = float64(vv.Uint()) - float64(yy)
case reflect.Float32, reflect.Float64:
yy, err := ToFloat64(y)
if err != nil {
return ErrInvalid
}
c = float64(vv.Float() - yy)
default:
return ErrInvalid
}
switch {
case c < 0:
switch op {
case CMP.LT, CMP.LE:
return nil
default:
return ErrLess
}
case c > 0:
switch op {
case CMP.GT, CMP.GE:
return nil
default:
return ErrGreater
}
default:
switch op {
case CMP.LT:
return ErrGreater
case CMP.GT:
return ErrLess
default:
return nil
}
}
}
// ToInt64 returns int value for int or uint or float
func ToInt64(v interface{}) (int64, error) {
vv := reflect.ValueOf(v)
switch vv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return int64(vv.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return int64(vv.Uint()), nil
case reflect.Float32, reflect.Float64:
return int64(vv.Float()), nil
case reflect.String:
r, err := strconv.ParseInt(vv.String(), 10, 64)
if err != nil {
return 0, ErrInvalid
}
return r, nil
default:
return 0, ErrInvalid
}
}
// ToUint64 returns uint value for int or uint or float
func ToUint64(v interface{}) (uint64, error) {
vv := reflect.ValueOf(v)
switch vv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return uint64(vv.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return uint64(vv.Uint()), nil
case reflect.Float32, reflect.Float64:
return uint64(vv.Float()), nil
case reflect.String:
r, err := strconv.ParseUint(vv.String(), 10, 64)
if err != nil {
return 0, ErrInvalid
}
return r, nil
default:
return 0, ErrInvalid
}
}
// ToFloat64 returns float64 value for int or uint or float
func ToFloat64(v interface{}) (float64, error) {
vv := reflect.ValueOf(v)
switch vv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(vv.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return float64(vv.Uint()), nil
case reflect.Float32, reflect.Float64:
return float64(vv.Float()), nil
case reflect.String:
r, err := strconv.ParseFloat(vv.String(), 64)
if err != nil {
return 0, ErrInvalid
}
return r, nil
default:
return 0, ErrInvalid
}
}
// If returns x if c is true, else y
// z = If(c, x, y)
// equal to:
// z = c ? x : y
func If(c bool, x, y interface{}) interface{} {
if c {
return x
}
return y
} | terraform/terraform/vendor/github.com/likexian/gokit/assert/values.go | 0.692642 | 0.476336 | values.go | starcoder |
package main
/*
In any comparison, the first operand must be assignable to the type of the second operand, or vice versa.
The equality operators == and != apply to operands that are comparable. The ordering operators <, <=, >, and >= apply
to operands that are ordered. These terms and the result of the comparisons are defined as follows:
1. Boolean values are comparable. Two boolean values are equal if they are either both true or both false.
2. Integer values are comparable and ordered, in the usual way.
3. Floating-point values are comparable and ordered, as defined by the IEEE-754 standard.
4. Complex values are comparable. Two complex values u and v are equal if both real(u) == real(v) and imag(u) == imag(v).
5. String values are comparable and ordered, lexically byte-wise.
6. Pointer values are comparable. Two pointer values are equal if they point to the same variable or if both have
value nil. Pointers to distinct zero-size variables may or may not be equal.
7. Channel values are comparable. Two channel values are equal if they were created by the same call to make or if both have value nil.
8. Interface values are comparable. Two interface values are equal if they have identical dynamic types and
equal dynamic values or if both have value nil.
9. A value x of non-interface type X and a value t of interface type T are comparable when values of type X are
comparable and X implements T. They are equal if t's dynamic type is identical to X and t's dynamic value is equal to x.
10. Struct values are comparable if all their fields are comparable. Two struct values are equal if their
corresponding non-blank fields are equal.
11. Array values are comparable if values of the array element type are comparable. Two array values are equal if
their corresponding elements are equal.
12. A comparison of two interface values with identical dynamic types causes a run-time panic if values of
that type are not comparable. This behavior applies not only to direct interface value comparisons but also
when comparing arrays of interface values or structs with interface-valued fields.
Slice, map, and function values are not comparable. However, as a special case, a slice, map, or function value may be
compared to the predeclared identifier nil. Comparison of pointer, channel, and interface values to nil is also allowed
and follows from the general rules above.
*/ | go/projects/effective-go/comparision/doc.go | 0.763484 | 0.869991 | doc.go | starcoder |
package proximityhash
import (
"github.com/mmcloughlin/geohash"
)
func checkCircleIntersectsRectangleGeometrically(radius float64, center point, rect geohash.Box) bool {
nw := point{rect.MaxLat, rect.MinLng}
ne := point{rect.MaxLat, rect.MaxLng}
se := point{rect.MinLat, rect.MaxLng}
sw := point{rect.MinLat, rect.MinLng}
if radius >= distToLine(center, nw, ne) ||
radius >= distToLine(center, sw, se) ||
radius >= distToLine(center, sw, nw) ||
radius >= distToLine(center, se, ne) {
return true
}
return false
}
func checkInsideRadiusSimple(radius float64, center point, rect geohash.Box) (partial bool, full bool) {
nwCornerInside := radius >= haversinDist(center, point{rect.MaxLat, rect.MinLng})
neCornerInside := radius >= haversinDist(center, point{rect.MaxLat, rect.MaxLng})
seCornerInside := radius >= haversinDist(center, point{rect.MinLat, rect.MaxLng})
swCornerInside := radius >= haversinDist(center, point{rect.MinLat, rect.MinLng})
if nwCornerInside && neCornerInside && seCornerInside && swCornerInside {
return false, true
}
if nwCornerInside || neCornerInside || seCornerInside || swCornerInside {
return true, false
}
return false, false
}
func isGeohashInsideRadius(radius float64, center point, hash string) (partial bool, full bool) {
rect := geohash.BoundingBox(hash)
prt, fll := checkInsideRadiusSimple(radius, center, rect)
if prt || fll {
return prt, fll
}
prt = checkCircleIntersectsRectangleGeometrically(radius, center, rect)
return prt, false
}
func filterAlreadyChecked(toFilter []string, alreadyChecked map[string]bool) []string {
var res []string
for _, item := range toFilter {
if !alreadyChecked[item] {
res = append(res, item)
alreadyChecked[item] = true
}
}
return res
}
// The FindGeohashesWithinRadius function finds all geohashes within the given radius (in meters) of the given (lat, lng)
// coordinate point. The geohashes will have the given precision. Geohashes which are 100% inside the radius will be in
// the fullMatches return value. Geohashes which lie partially but not fully within the radius will be in the
// partialMatches return value.
func FindGeohashesWithinRadius(lat, lng, radius float64, precision uint) (fullMatches, partialMatches []string) {
alreadyChecked := make(map[string]bool)
queue := newStringQueue()
firstHash := geohash.EncodeWithPrecision(lat, lng, precision)
center := point{lat, lng}
alreadyChecked[firstHash] = true
queue.enqueue(firstHash)
for !queue.isEmpty() {
curHash, _ := queue.dequeue()
prt, fll := isGeohashInsideRadius(radius, center, curHash)
if fll {
fullMatches = append(fullMatches, curHash)
} else if prt {
partialMatches = append(partialMatches, curHash)
}
if prt || fll {
neighbors := geohash.Neighbors(curHash)
unchecked := filterAlreadyChecked(neighbors, alreadyChecked)
queue.enqueue(unchecked...)
}
}
if len(fullMatches) < 1 && len(partialMatches) < 1 {
partialMatches = append(partialMatches, firstHash)
}
return fullMatches, partialMatches
} | proximityhash.go | 0.818628 | 0.437403 | proximityhash.go | starcoder |
package sort
/*
Source: https://en.wikipedia.org/wiki/Merge_sort
- Top-down implementation using lists
*/
//MergeSort -- Merge sort
func MergeSort(list []int) []int {
//Base Case: A list of zero or one elements is sorted
if len(list) <= 1 {
return list
}
//Recusive Case:
//First, divide the list into equal-sized sublists consisting
//of the first half and second half of the list
middlePoint := len(list) / 2
left := list[:middlePoint]
right := list[middlePoint:]
//The recursive Call
left = MergeSort(left)
right = MergeSort(right)
//Then merge the now sorted lists
return merge(left, right)
}
func merge(left, right []int) (result []int) {
for {
//Base Case to break loop
if len(left) == 0 || len(right) == 0 {
break
}
if left[0] <= right[0] {
result = append(result, left[0])
left = left[1:]
} else {
result = append(result, right[0])
right = right[1:]
}
}
//Either left or right may have elements left. Consume them.
//(Only one of the below cases will be true)
if len(left) > 0 {
result = append(result, left...)
}
if len(right) > 0 {
result = append(result, right...)
}
return result
}
//MergeSortStrings -- merge sort for slice of strings
func MergeSortStrings(list []string) []string {
//Base Case: A list of zero or one elements is sorted
if len(list) <= 1 {
return list
}
//Recusive Case:
//First, divide the list into equal-sized sublists consisting
//of the first half and second half of the list
middlePoint := len(list) / 2
left := list[:middlePoint]
right := list[middlePoint:]
//The recursive Call
left = MergeSortStrings(left)
right = MergeSortStrings(right)
//Then merge the now sorted lists
return mergeStrings(left, right)
}
func mergeStrings(left, right []string) (result []string) {
for {
//Base Case to break loop
if len(left) == 0 || len(right) == 0 {
break
}
if left[0] <= right[0] {
result = append(result, left[0])
left = left[1:]
} else {
result = append(result, right[0])
right = right[1:]
}
}
//Either left or right may have elements left. Consume them.
//(Only one of the below cases will be true)
if len(left) > 0 {
result = append(result, left...)
}
if len(right) > 0 {
result = append(result, right...)
}
return result
} | data_structures/sorting/mergesort.go | 0.801742 | 0.526282 | mergesort.go | starcoder |
package mki3d
/* data structures */
// 3D vector in MKI3D - represents coordinates and RGB colors
type Vector3dType [3]float32
// 3x3 Matrix in MKI3D - represents linear transformation
type Matrix3dType [3]Vector3dType
// Endpoint contains Position, Color, and Set index
type EndpointType struct {
Position Vector3dType `json:"position"`
Color Vector3dType `json:"color"`
Set int `json:"set"`
}
// Segment consists of two endpoints
type SegmentType [2]EndpointType
// SegmentsType is a sequence of segments
type SegmentsType []SegmentType
// Triangle consists of three endpoints
type TriangleType [3]EndpointType
// TrianglesType is a sequence of triangles
type TrianglesType []TriangleType
// Model consists of a sequence of segments and a sequence of triangles
type ModelType struct {
Segments SegmentsType `json:"segments"`
Triangles TrianglesType `json:"triangles"`
}
// ViewType contains view parameters from MKI3D editor
type ViewType struct {
FocusPoint Vector3dType `json:"focusPoint"`
RotationMatrix Matrix3dType `json:"rotationMatrix"`
Scale float32 `json:"scale"`
ScreenShift Vector3dType `json:"screenShift"`
// more fields
}
// Projection contains camera parametres from MKI3D editor
type ProjectionType struct {
ZNear float32 `json:"zNear"`
ZFar float32 `json:"zFar"`
ZoomY float32 `json:"zoomY"`
}
// CursorType is a state of cursor
type CursorType struct {
Position Vector3dType `json:"position"`
Marker1 *EndpointType `json:"marker1"`
Marker2 *EndpointType `json:"marker2"`
Color Vector3dType `json:"color"`
Step float32 `json:"step"`
}
// Light is described by:
// Vector - direction of diffuse light, and
// AbmientFraction - the fraction of light that is ambient
type LightType struct {
Vector Vector3dType `json:"vector"`
AmbientFraction float32 `json:"ambientFraction"`
}
// Set - the current set index
type SetType struct {
Current int `json:"current"`
}
// The type of MKI3D data in Go.
type Mki3dType struct {
Model ModelType `json:"model"`
View ViewType `json:"view"`
Projection ProjectionType `json:"projection"`
BackgroundColor Vector3dType `json:"backgroundColor"`
Cursor CursorType `json:"cursor"`
Light LightType `json:"light"`
ClipMaxVector Vector3dType `json:"clipMaxVector"`
ClipMinVector Vector3dType `json:"clipMinVector"`
Set SetType `json:"set"`
Texture *TextureType `json:"texture"`
} | mki3d/data.go | 0.72594 | 0.612397 | data.go | starcoder |
package basematchers
import (
"fmt"
"math"
"strings"
)
// The following block enumerates numeric comparator prefixes.
const (
LessThanOrEqualTo = "<="
GreaterThanOrEqualTo = ">="
LessThan = "<"
GreaterThan = ">"
)
func intComparator(cmp string) (func(a, b int64) bool, error) {
switch cmp {
case LessThanOrEqualTo:
return func(a, b int64) bool { return a <= b }, nil
case GreaterThanOrEqualTo:
return func(a, b int64) bool { return a >= b }, nil
case LessThan:
return func(a, b int64) bool { return a < b }, nil
case GreaterThan:
return func(a, b int64) bool { return a > b }, nil
case "":
return func(a, b int64) bool { return a == b }, nil
default:
return nil, fmt.Errorf("unrecognized comparator: %s", cmp)
}
}
func uintComparator(cmp string) (func(a, b uint64) bool, error) {
switch cmp {
case LessThanOrEqualTo:
return func(a, b uint64) bool { return a <= b }, nil
case GreaterThanOrEqualTo:
return func(a, b uint64) bool { return a >= b }, nil
case LessThan:
return func(a, b uint64) bool { return a < b }, nil
case GreaterThan:
return func(a, b uint64) bool { return a > b }, nil
case "":
return func(a, b uint64) bool { return a == b }, nil
default:
return nil, fmt.Errorf("unrecognized comparator: %s", cmp)
}
}
const (
floatTol = 1e-4
)
func floatComparator(cmp string) (func(a, b float64) bool, error) {
switch cmp {
case LessThanOrEqualTo:
return func(a, b float64) bool { return a <= b }, nil
case GreaterThanOrEqualTo:
return func(a, b float64) bool { return a >= b }, nil
case LessThan:
return func(a, b float64) bool { return a < b }, nil
case GreaterThan:
return func(a, b float64) bool { return a > b }, nil
case "":
return func(a, b float64) bool { return math.Abs(a-b) < floatTol }, nil
default:
return nil, fmt.Errorf("unrecognized comparator: %s", cmp)
}
}
func parseNumericPrefix(value string) (prefix string, trimmedValue string) {
// The order which these checks are executed must be maintained.
// If we for instance look for "<" before "<=", we will never find "<=" because "<" will be found as its prefix.
for _, prefix := range []string{LessThanOrEqualTo, GreaterThanOrEqualTo, LessThan, GreaterThan} {
if strings.HasPrefix(value, prefix) {
return prefix, strings.TrimSpace(value[len(prefix):])
}
}
return "", value
} | pkg/search/predicate/basematchers/comparator.go | 0.748076 | 0.496582 | comparator.go | starcoder |
package stream
import (
"fmt"
"reflect"
"github.com/google/gapid/core/data/protoutil"
)
var (
// U1 represents a 1-bit unsigned integer.
U1 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{1}}}
// U2 represents a 2-bit unsigned integer.
U2 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{2}}}
// U4 represents a 4-bit unsigned integer.
U4 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{4}}}
// U5 represents a 5-bit unsigned integer.
U5 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{5}}}
// U6 represents a 6-bit unsigned integer.
U6 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{6}}}
// U8 represents a 8-bit unsigned integer.
U8 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{8}}}
// U9 represents a 9-bit unsigned integer.
U9 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{9}}}
// U10 represents a 10-bit unsigned integer.
U10 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{10}}}
// U11 represents a 11-bit unsigned integer.
U11 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{11}}}
// U16 represents a 16-bit unsigned integer.
U16 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{16}}}
// U24 represents a 24-bit unsigned integer.
U24 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{24}}}
// U32 represents a 32-bit unsigned integer.
U32 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{32}}}
// U64 represents a 64-bit unsigned integer.
U64 = DataType{Signed: false, Kind: &DataType_Integer{&Integer{64}}}
// S2 represents a 2-bit signed integer.
S2 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{1}}}
// S8 represents a 8-bit signed integer.
S8 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{7}}}
// S10 represents a 10-bit signed integer.
S10 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{9}}}
// S11 represents a 11-bit signed integer.
S11 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{10}}}
// S16 represents a 16-bit signed integer.
S16 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{15}}}
// S32 represents a 32-bit signed integer.
S32 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{31}}}
// S64 represents a 64-bit signed integer.
S64 = DataType{Signed: true, Kind: &DataType_Integer{&Integer{63}}}
// F10 represents a 10-bit unsigned floating-point number.
F10 = DataType{Signed: false, Kind: &DataType_Float{&Float{5, 5}}}
// F11 represents a 11-bit unsigned floating-point number.
F11 = DataType{Signed: false, Kind: &DataType_Float{&Float{5, 6}}}
// F16 represents a 16-bit signed, floating-point number.
F16 = DataType{Signed: true, Kind: &DataType_Float{&Float{5, 10}}}
// F32 represents a 32-bit signed, floating-point number.
F32 = DataType{Signed: true, Kind: &DataType_Float{&Float{7, 24}}}
// F64 represents a 64-bit signed, floating-point number.
F64 = DataType{Signed: true, Kind: &DataType_Float{&Float{10, 53}}}
// S16_16 represents a 16.16 bit signed, fixed-point number.
S16_16 = DataType{Signed: true, Kind: &DataType_Fixed{&Fixed{15, 16}}}
)
// Format prints the DataType to f.
func (t DataType) Format(f fmt.State, r rune) {
switch {
case t.Is(F10):
fmt.Fprintf(f, "F10")
case t.Is(F11):
fmt.Fprintf(f, "F11")
case t.Is(F16):
fmt.Fprintf(f, "F16")
case t.Is(F32):
fmt.Fprintf(f, "F32")
case t.Is(F64):
fmt.Fprintf(f, "F64")
case t.IsFloat() && t.Signed:
fmt.Fprintf(f, "F:s:%d:%d", t.GetFloat().ExponentBits, t.GetFloat().MantissaBits)
case t.IsFloat() && !t.Signed:
fmt.Fprintf(f, "F:u:%d:%d", t.GetFloat().ExponentBits, t.GetFloat().MantissaBits)
case t.IsInteger() && t.Signed:
fmt.Fprintf(f, "S%d", t.GetInteger().Bits+1)
case t.IsInteger() && !t.Signed:
fmt.Fprintf(f, "U%d", t.GetInteger().Bits)
case t.IsFixed() && t.Signed:
fmt.Fprintf(f, "S%d.%d", t.GetFixed().IntegerBits+1, t.GetFixed().FractionalBits)
case t.IsFixed() && !t.Signed:
fmt.Fprintf(f, "U%d.%d", t.GetFixed().IntegerBits, t.GetFixed().FractionalBits)
default:
fmt.Fprintf(f, "<unknown kind %T>", t.Kind)
}
}
// Bits returns the size of the data type in bits.
func (t *DataType) Bits() uint32 {
bits := uint32(0)
switch k := protoutil.OneOf(t.Kind).(type) {
case *Integer:
bits = k.Bits
case *Float:
bits = k.ExponentBits + k.MantissaBits
case *Fixed:
bits = k.IntegerBits + k.FractionalBits
default:
panic(fmt.Errorf("Unknown data type kind %T", k))
}
if t.Signed {
bits++
}
return bits
}
// IsInteger returns true if t is an integer.
func (t *DataType) IsInteger() bool { return t.GetInteger() != nil }
// IsFloat returns true if t is a float.
func (t *DataType) IsFloat() bool { return t.GetFloat() != nil }
// IsFixed returns true if the DataType is a fixed point number.
func (t *DataType) IsFixed() bool { return t.GetFixed() != nil }
// Is returns true if t is equivalent to o.
func (t DataType) Is(o DataType) bool { return reflect.DeepEqual(t, o) } | core/stream/datatype.go | 0.652463 | 0.771865 | datatype.go | starcoder |
package metrics
import (
"bytes"
"fmt"
"math/rand"
"runtime"
"strconv"
"sync"
"time"
)
type Client interface {
// Close closes the connection and cleans up.
Close() error
// Increments a statsd count type.
// stat is a string name for the metric.
// value is the integer value
// rate is the sample rate (0.0 to 1.0)
Inc(stat interface{}, value int64, rate float32) error
// Decrements a statsd count type.
// stat is a string name for the metric.
// value is the integer value.
// rate is the sample rate (0.0 to 1.0).
Dec(stat interface{}, value int64, rate float32) error
// Submits/Updates a statsd gauge type.
// stat is a string name for the metric.
// value is the integer value.
// rate is the sample rate (0.0 to 1.0).
Gauge(stat interface{}, value int64, rate float32) error
// Submits a delta to a statsd gauge.
// stat is the string name for the metric.
// value is the (positive or negative) change.
// rate is the sample rate (0.0 to 1.0).
GaugeDelta(stat interface{}, value int64, rate float32) error
// Submits a statsd timing type.
// stat is a string name for the metric.
// value is the integer value.
// rate is the sample rate (0.0 to 1.0).
Timing(stat interface{}, delta int64, rate float32) error
// Emit duration in milliseconds
TimingMs(stat interface{}, tm time.Duration, rate float32) error
// Submits a stats set type, where value is the unique string
// rate is the sample rate (0.0 to 1.0).
UniqueString(stat interface{}, value string, rate float32) error
// Submits a stats set type
// rate is the sample rate (0.0 to 1.0).
UniqueInt64(stat interface{}, value int64, rate float32) error
// Reports runtime metrics
ReportRuntimeMetrics(prefix string, rate float32) error
// Sets/Updates the statsd client prefix
SetPrefix(prefix string)
Metric(p ...string) Metric
}
// StatsdOptions allows tuning client for efficiency
type Options struct {
// UseBuffering turns on buffering of metrics what reduces amount of UDP packets
UseBuffering bool
// FlushBytes will trigger the packet send whenever accumulated buffer size will reach this value, default is 1440
FlushBytes int
// FlushPeriod will trigger periodic flushes in case of inactivity to avoid metric loss
FlushPeriod time.Duration
}
func NewWithOptions(addr, prefix string, opts Options) (Client, error) {
s, err := newUDPSender(addr)
if err != nil {
return nil, err
}
if opts.UseBuffering {
if opts.FlushBytes == 0 {
opts.FlushBytes = 1440 // 1500(Ethernet MTU) - 60(Max UDP header size)
}
if opts.FlushBytes < 128 {
return nil, fmt.Errorf("Too small flush bytes value, min is 128")
}
if opts.FlushPeriod == 0 {
opts.FlushPeriod = 100 * time.Millisecond
}
// 60 bytes for UDP max header length
b, err := newBufSender(s, opts.FlushBytes, opts.FlushPeriod)
if err != nil {
return nil, err
}
s = b
}
client := &client{
s: s,
prefix: prefix,
mtx: &sync.Mutex{},
prevNumGC: -1,
}
return client, nil
}
// New returns a new statsd Client, and an error.
// addr is a string of the format "hostname:port", and must be parsable by net.ResolveUDPAddr.
// prefix is the statsd client prefix. Can be "" if no prefix is desired.
func New(addr, prefix string) (Client, error) {
return NewWithOptions(addr, prefix, Options{})
}
type client struct {
s sender
// prefix for statsd name
prefix string
// To report memory stats correctly
mtx *sync.Mutex
// Previosly reported garbage collection number
prevNumGC int32
// Last garbage collection time
lastGC uint64
}
func (s *client) Close() error {
return s.s.Close()
}
func (s *client) Metric(p ...string) Metric {
return NewMetric(s.prefix, p...)
}
func (s *client) Inc(stat interface{}, value int64, rate float32) error {
return s.submit("c", stat, value, false, "", rate)
}
func (s *client) Dec(stat interface{}, value int64, rate float32) error {
return s.Inc(stat, -value, rate)
}
func (s *client) Gauge(stat interface{}, value int64, rate float32) error {
return s.submit("g", stat, value, false, "", rate)
}
func (s *client) GaugeDelta(stat interface{}, value int64, rate float32) error {
return s.submit("g", stat, value, true, "", rate)
}
func (s *client) Timing(stat interface{}, delta int64, rate float32) error {
return s.submit("ms", stat, delta, false, "", rate)
}
func (s *client) TimingMs(stat interface{}, d time.Duration, rate float32) error {
return s.Timing(stat, int64(d/time.Millisecond), rate)
}
func (s *client) UniqueString(stat interface{}, value string, rate float32) error {
return s.submit("s", stat, 0, false, value, rate)
}
func (s *client) UniqueInt64(stat interface{}, value int64, rate float32) error {
return s.submit("s", stat, value, false, "", rate)
}
func (s *client) ReportRuntimeMetrics(prefix string, rate float32) error {
stats := &runtime.MemStats{}
runtime.ReadMemStats(stats)
s.mtx.Lock()
defer s.mtx.Unlock()
s.Gauge(s.Metric(prefix, "runtime", "goroutines"), int64(runtime.NumGoroutine()), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "alloc"), int64(stats.Alloc), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "sys"), int64(stats.Sys), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "lookups"), int64(stats.Lookups), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "mallocs"), int64(stats.Mallocs), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "frees"), int64(stats.Frees), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "alloc"), int64(stats.HeapAlloc), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "sys"), int64(stats.HeapSys), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "idle"), int64(stats.HeapIdle), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "inuse"), int64(stats.HeapInuse), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "released"), int64(stats.HeapReleased), rate)
s.Gauge(s.Metric(prefix, "runtime", "mem", "heap", "objects"), int64(stats.HeapObjects), rate)
prevNumGC := s.prevNumGC
lastGC := s.lastGC
s.prevNumGC = int32(stats.NumGC)
s.lastGC = stats.LastGC
if prevNumGC == -1 {
return nil
}
countGC := int32(stats.NumGC) - prevNumGC
if countGC < 0 {
return fmt.Errorf("Invalid number of garbage collections: %d", countGC)
}
// Nothing changed since last call, nothing to report
if countGC == 0 {
return nil
}
// We have missed some reportings and overwrote the data
if countGC > 256 {
countGC = 256
}
s.Timing(s.Metric(prefix, "runtime", "gc", "periodns"), int64(stats.LastGC-lastGC), rate)
for i := int32(0); i < countGC; i += 1 {
idx := int((stats.NumGC-uint32(i))+255) % 256
s.Timing(s.Metric(prefix, "runtime", "gc", "pausens"), int64(stats.PauseNs[idx]), rate)
}
return nil
}
func (s *client) SetPrefix(prefix string) {
s.prefix = prefix
}
// submit formats the statsd event data, handles sampling, and prepares it,
// and sends it to the server.
func (s *client) submit(metricType string, stat interface{}, value int64, sign bool, sval string, rate float32) error {
if rate < 1 && rand.Float32() > rate {
return nil
}
var buf *bytes.Buffer
switch m := stat.(type) {
case string:
buf = &bytes.Buffer{}
if s.prefix != "" {
buf.WriteString(s.prefix)
buf.WriteString(".")
}
buf.WriteString(escape(m))
case *metric:
buf = m.b
default:
return fmt.Errorf("Unexpected argument type: %T", stat)
}
buf.WriteByte(':')
if sval != "" {
buf.WriteString(escape(sval))
} else {
if sign {
if value >= 0 {
buf.WriteByte('+')
}
}
buf.WriteString(strconv.FormatInt(value, 10))
}
buf.WriteByte('|')
buf.WriteString(metricType)
if rate < 1 {
buf.WriteString("|@")
buf.WriteString(strconv.FormatFloat(float64(rate), 'f', -1, 32))
}
_, err := buf.WriteTo(s.s)
return err
} | vendor/github.com/mailgun/metrics/client.go | 0.73678 | 0.402539 | client.go | starcoder |
package structure
import (
"regexp"
"time"
)
type Validatable interface {
Validate(validator Validator)
}
type Validator interface {
OriginReporter
SourceReporter
MetaReporter
ErrorReporter
Validate(validatable Validatable) error
Bool(reference string, value *bool) Bool
Float64(reference string, value *float64) Float64
Int(reference string, value *int) Int
String(reference string, value *string) String
StringArray(reference string, value *[]string) StringArray
Time(reference string, value *time.Time) Time
Object(reference string, value *map[string]interface{}) Object
Array(reference string, value *[]interface{}) Array
WithOrigin(origin Origin) Validator
WithSource(source Source) Validator
WithMeta(meta interface{}) Validator
WithReference(reference string) Validator
}
type BoolUsingFunc func(value bool, errorReporter ErrorReporter)
type Bool interface {
Exists() Bool
NotExists() Bool
True() Bool
False() Bool
Using(usingFunc BoolUsingFunc) Bool
}
type Float64UsingFunc func(value float64, errorReporter ErrorReporter)
type Float64 interface {
Exists() Float64
NotExists() Float64
EqualTo(value float64) Float64
NotEqualTo(value float64) Float64
LessThan(limit float64) Float64
LessThanOrEqualTo(limit float64) Float64
GreaterThan(limit float64) Float64
GreaterThanOrEqualTo(limit float64) Float64
InRange(lowerLimit float64, upperLimit float64) Float64
OneOf(allowedValues ...float64) Float64
NotOneOf(disallowedValues ...float64) Float64
Using(usingFunc Float64UsingFunc) Float64
}
type IntUsingFunc func(value int, errorReporter ErrorReporter)
type Int interface {
Exists() Int
NotExists() Int
EqualTo(value int) Int
NotEqualTo(value int) Int
LessThan(limit int) Int
LessThanOrEqualTo(limit int) Int
GreaterThan(limit int) Int
GreaterThanOrEqualTo(limit int) Int
InRange(lowerLimit int, upperLimit int) Int
OneOf(allowedValues ...int) Int
NotOneOf(disallowedValues ...int) Int
Using(usingFunc IntUsingFunc) Int
}
type StringUsingFunc func(value string, errorReporter ErrorReporter)
type String interface {
Exists() String
NotExists() String
Empty() String
NotEmpty() String
EqualTo(value string) String
NotEqualTo(value string) String
LengthEqualTo(limit int) String
LengthNotEqualTo(limit int) String
LengthLessThan(limit int) String
LengthLessThanOrEqualTo(limit int) String
LengthGreaterThan(limit int) String
LengthGreaterThanOrEqualTo(limit int) String
LengthInRange(lowerLimit int, upperLimit int) String
OneOf(allowedValues ...string) String
NotOneOf(disallowedValues ...string) String
Matches(expression *regexp.Regexp) String
NotMatches(expression *regexp.Regexp) String
Using(usingFunc StringUsingFunc) String
AsTime(layout string) Time
Email() String
Alphanumeric() String
Hexadecimal() String
UUID() String
}
type StringArrayEachFunc func(stringValidator String)
type StringArrayEachUsingFunc func(value string, errorReporter ErrorReporter)
type StringArrayUsingFunc func(value []string, errorReporter ErrorReporter)
type StringArray interface {
Exists() StringArray
NotExists() StringArray
Empty() StringArray
NotEmpty() StringArray
LengthEqualTo(limit int) StringArray
LengthNotEqualTo(limit int) StringArray
LengthLessThan(limit int) StringArray
LengthLessThanOrEqualTo(limit int) StringArray
LengthGreaterThan(limit int) StringArray
LengthGreaterThanOrEqualTo(limit int) StringArray
LengthInRange(lowerLimit int, upperLimit int) StringArray
Each(eachFunc StringArrayEachFunc) StringArray
EachNotEmpty() StringArray
EachOneOf(allowedValues ...string) StringArray
EachNotOneOf(disallowedValues ...string) StringArray
EachMatches(expression *regexp.Regexp) StringArray
EachNotMatches(expression *regexp.Regexp) StringArray
EachUsing(eachUsingFunc StringArrayEachUsingFunc) StringArray
EachUnique() StringArray
Using(usingFunc StringArrayUsingFunc) StringArray
}
type TimeUsingFunc func(value time.Time, errorReporter ErrorReporter)
type Time interface {
Exists() Time
NotExists() Time
Zero() Time
NotZero() Time
After(limit time.Time) Time
AfterNow(threshold time.Duration) Time
Before(limit time.Time) Time
BeforeNow(threshold time.Duration) Time
Using(usingFunc TimeUsingFunc) Time
}
type ObjectUsingFunc func(value map[string]interface{}, errorReporter ErrorReporter)
type Object interface {
Exists() Object
NotExists() Object
Empty() Object
NotEmpty() Object
Using(usingFunc ObjectUsingFunc) Object
}
type ArrayUsingFunc func(value []interface{}, errorReporter ErrorReporter)
type Array interface {
Exists() Array
NotExists() Array
Empty() Array
NotEmpty() Array
Using(usingFunc ArrayUsingFunc) Array
} | structure/validator.go | 0.654343 | 0.42674 | validator.go | starcoder |
package logger
import (
"bytes"
"context"
"fmt"
"io"
"os"
"sync"
"time"
"github.com/edoger/zkits-logger/internal"
)
// Log interface defines an extensible log.
type Log interface {
// Name returns the logger name.
Name() string
// WithField adds the given extended data to the log.
WithField(string, interface{}) Log
// WithError adds the given error to the log.
// This method is relative to WithField("error", error).
WithError(error) Log
// WithFields adds the given multiple extended data to the log.
WithFields(map[string]interface{}) Log
// WithContext adds the given context to the log.
WithContext(context.Context) Log
// WithCaller forces the caller report of the current log to be enabled.
WithCaller(...int) Log
// Log uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
Log(Level, ...interface{})
// Logln uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
Logln(Level, ...interface{})
// Logf uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
Logf(Level, string, ...interface{})
// Trace uses the given parameters to record a TraceLevel log.
Trace(...interface{})
// Traceln uses the given parameters to record a TraceLevel log.
Traceln(...interface{})
// Tracef uses the given parameters to record a TraceLevel log.
Tracef(string, ...interface{})
// Print uses the given parameters to record a TraceLevel log.
Print(...interface{})
// Println uses the given parameters to record a TraceLevel log.
Println(...interface{})
// Printf uses the given parameters to record a TraceLevel log.
Printf(string, ...interface{})
// Debug uses the given parameters to record a DebugLevel log.
Debug(...interface{})
// Debugln uses the given parameters to record a DebugLevel log.
Debugln(...interface{})
// Debugf uses the given parameters to record a DebugLevel log.
Debugf(string, ...interface{})
// Info uses the given parameters to record a InfoLevel log.
Info(...interface{})
// Infoln uses the given parameters to record a InfoLevel log.
Infoln(...interface{})
// Infof uses the given parameters to record a InfoLevel log.
Infof(string, ...interface{})
// Echo uses the given parameters to record a InfoLevel log.
Echo(...interface{})
// Echoln uses the given parameters to record a InfoLevel log.
Echoln(...interface{})
// Echof uses the given parameters to record a InfoLevel log.
Echof(string, ...interface{})
// Warn uses the given parameters to record a WarnLevel log.
Warn(...interface{})
// Warnln uses the given parameters to record a WarnLevel log.
Warnln(...interface{})
// Warnf uses the given parameters to record a WarnLevel log.
Warnf(string, ...interface{})
// Warning uses the given parameters to record a WarnLevel log.
Warning(...interface{})
// Warningln uses the given parameters to record a WarnLevel log.
Warningln(...interface{})
// Warningf uses the given parameters to record a WarnLevel log.
Warningf(string, ...interface{})
// Error uses the given parameters to record a ErrorLevel log.
Error(...interface{})
// Errorln uses the given parameters to record a ErrorLevel log.
Errorln(...interface{})
// Errorf uses the given parameters to record a ErrorLevel log.
Errorf(string, ...interface{})
// Fatal uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
Fatal(...interface{})
// Fatalln uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
Fatalln(...interface{})
// Fatalf uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
Fatalf(string, ...interface{})
// Panic uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
Panic(...interface{})
// Panicln uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
Panicln(...interface{})
// Panicf uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
Panicf(string, ...interface{})
}
// The core type defines the collection of shared attributes within the log,
// and each independent Logger shares the same core instance.
type core struct {
name string
level Level
formatter Formatter
writer io.Writer
levelWriter map[Level]io.Writer
pool sync.Pool
hooks HookBag
timeFormat string
nowFunc func() time.Time
exitFunc func(int)
panicFunc func(string)
caller *internal.CallerReporter
levelCaller map[Level]*internal.CallerReporter
interceptor func(Summary, io.Writer) (int, error)
}
// Create a new core instance and bind the logger name.
func newCore(name string) *core {
return &core{
name: name,
level: TraceLevel,
formatter: DefaultJSONFormatter(),
writer: os.Stdout,
levelWriter: make(map[Level]io.Writer),
pool: sync.Pool{New: func() interface{} { return new(logEntity) }},
hooks: NewHookBag(),
timeFormat: internal.DefaultTimeFormat,
nowFunc: internal.DefaultNowFunc,
exitFunc: internal.DefaultExitFunc,
panicFunc: internal.DefaultPanicFunc,
levelCaller: make(map[Level]*internal.CallerReporter),
}
}
// Get a log entity from the pool and initialize it.
func (c *core) getEntity(l *log, level Level, message, caller string) *logEntity {
o := c.pool.Get().(*logEntity)
o.name = c.name
o.time = c.nowFunc()
o.timeFormat = c.timeFormat
o.level = level
o.message = message
o.ctx = l.ctx
o.caller = caller
o.fields = l.fields
return o
}
// Clean up and recycle the given log entity.
func (c *core) putEntity(o *logEntity) {
// If the log size exceeds 1KB, we need to discard this buffer to
// free memory faster.
if o.buffer.Cap() > 1024 {
o.buffer = bytes.Buffer{}
} else {
o.buffer.Reset()
}
o.name = ""
o.timeFormat = ""
o.message = ""
o.fields = nil
o.ctx = nil
o.caller = ""
c.pool.Put(o)
}
// Internal implementation of the Log interface.
type log struct {
core *core
ctx context.Context
fields internal.Fields
caller *internal.CallerReporter
}
// Name returns the logger name.
func (o *log) Name() string {
return o.core.name
}
// WithField adds the given extended data to the log.
func (o *log) WithField(key string, value interface{}) Log {
if len(o.fields) == 0 {
return &log{core: o.core, fields: internal.Fields{key: value}, ctx: o.ctx, caller: o.caller}
}
r := &log{core: o.core, fields: o.fields.Clone(1), ctx: o.ctx, caller: o.caller}
r.fields[key] = value
return r
}
// WithError adds the given error to the log.
// This method is relative to WithField("error", error).
func (o *log) WithError(err error) Log {
return o.WithField("error", err)
}
// WithFields adds the given multiple extended data to the log.
func (o *log) WithFields(fields map[string]interface{}) Log {
if len(o.fields) == 0 {
return &log{core: o.core, fields: internal.MakeFields(fields), ctx: o.ctx, caller: o.caller}
}
return &log{core: o.core, fields: o.fields.With(fields), ctx: o.ctx, caller: o.caller}
}
// WithContext adds the given context to the log.
func (o *log) WithContext(ctx context.Context) Log {
return &log{core: o.core, fields: o.fields, ctx: ctx, caller: o.caller}
}
// WithCaller forces the caller report of the current log to be enabled.
func (o *log) WithCaller(skip ...int) Log {
var n int
if len(skip) > 0 && skip[0] > 0 {
n = skip[0]
}
// If the caller is equaled, we don't need to create a new log instance.
if o.caller != nil && o.caller.Equal(n) {
return o
}
return &log{core: o.core, fields: o.fields, ctx: o.ctx, caller: internal.NewCallerReporter(n)}
}
// Format and record the current log.
func (o *log) record(level Level, message string) {
entity := o.core.getEntity(o, level, message, o.getCaller(level))
defer o.core.putEntity(entity)
if err := o.core.formatter.Format(entity, &entity.buffer); err != nil {
// When the format log fails, we terminate the logging and report the error.
internal.EchoError("(%s) Failed to format log: %s", o.core.name, err)
} else {
err = o.core.hooks.Fire(entity)
if err != nil {
internal.EchoError("(%s) Failed to fire log hook: %s", o.core.name, err)
}
err = o.write(entity)
if err != nil {
internal.EchoError("(%s) Failed to write log: %s", o.core.name, err)
}
}
if level < ErrorLevel {
switch level {
case FatalLevel:
o.core.exitFunc(1)
case PanicLevel:
o.core.panicFunc(message)
}
}
}
// Write the current log.
func (o *log) write(entity *logEntity) (err error) {
var w io.Writer
if writer, found := o.core.levelWriter[entity.level]; found && writer != nil {
w = writer
} else {
w = o.core.writer
}
if o.core.interceptor == nil {
// When there is no interceptor, make sure that the log written is not empty.
if entity.Size() > 0 {
_, err = w.Write(entity.Bytes())
}
} else {
_, err = o.core.interceptor(entity, w)
}
return
}
// Get the caller report. If caller reporting is not enabled in the current
// log, an empty string is always returned.
func (o *log) getCaller(level Level) string {
if o.caller != nil {
return o.caller.GetCaller()
}
if caller, found := o.core.levelCaller[level]; found {
return caller.GetCaller()
}
if o.core.caller != nil {
return o.core.caller.GetCaller()
}
return ""
}
// Log uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
func (o *log) Log(level Level, args ...interface{}) {
o.log(level, fmt.Sprint(args...))
}
// Uses the given parameters to record a log of the specified level.
func (o *log) log(level Level, args ...interface{}) {
if !o.core.level.IsEnabled(level) {
return
}
o.record(level, fmt.Sprint(args...))
}
// Logln uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
func (o *log) Logln(level Level, args ...interface{}) {
o.logln(level, args...)
}
// Uses the given parameters to record a log of the specified level.
func (o *log) logln(level Level, args ...interface{}) {
if !o.core.level.IsEnabled(level) {
return
}
s := fmt.Sprintln(args...)
o.record(level, s[:len(s)-1])
}
// Logf uses the given parameters to record a log of the specified level.
// If the given log level is PanicLevel, the given panic function will be
// called automatically after logging is completed.
// If the given log level is FatalLevel, the given exit function will be
// called automatically after logging is completed.
// If the given log level is invalid, the log will be discarded.
func (o *log) Logf(level Level, format string, args ...interface{}) {
o.logf(level, format, args...)
}
// Uses the given parameters to record a log of the specified level.
func (o *log) logf(level Level, format string, args ...interface{}) {
if !o.core.level.IsEnabled(level) {
return
}
o.record(level, fmt.Sprintf(format, args...))
}
// Trace uses the given parameters to record a TraceLevel log.
func (o *log) Trace(args ...interface{}) {
o.log(TraceLevel, args...)
}
// Traceln uses the given parameters to record a TraceLevel log.
func (o *log) Traceln(args ...interface{}) {
o.logln(TraceLevel, args...)
}
// Tracef uses the given parameters to record a TraceLevel log.
func (o *log) Tracef(format string, args ...interface{}) {
o.logf(TraceLevel, format, args...)
}
// Print uses the given parameters to record a TraceLevel log.
func (o *log) Print(args ...interface{}) {
o.log(TraceLevel, args...)
}
// Println uses the given parameters to record a TraceLevel log.
func (o *log) Println(args ...interface{}) {
o.logln(TraceLevel, args...)
}
// Printf uses the given parameters to record a TraceLevel log.
func (o *log) Printf(format string, args ...interface{}) {
o.logf(TraceLevel, format, args...)
}
// Debug uses the given parameters to record a DebugLevel log.
func (o *log) Debug(args ...interface{}) {
o.log(DebugLevel, args...)
}
// Debugln uses the given parameters to record a DebugLevel log.
func (o *log) Debugln(args ...interface{}) {
o.logln(DebugLevel, args...)
}
// Debugf uses the given parameters to record a DebugLevel log.
func (o *log) Debugf(format string, args ...interface{}) {
o.logf(DebugLevel, format, args...)
}
// Info uses the given parameters to record a InfoLevel log.
func (o *log) Info(args ...interface{}) {
o.log(InfoLevel, args...)
}
// Infoln uses the given parameters to record a InfoLevel log.
func (o *log) Infoln(args ...interface{}) {
o.logln(InfoLevel, args...)
}
// Infof uses the given parameters to record a InfoLevel log.
func (o *log) Infof(format string, args ...interface{}) {
o.logf(InfoLevel, format, args...)
}
// Echo uses the given parameters to record a InfoLevel log.
func (o *log) Echo(args ...interface{}) {
o.log(InfoLevel, args...)
}
// Echoln uses the given parameters to record a InfoLevel log.
func (o *log) Echoln(args ...interface{}) {
o.logln(InfoLevel, args...)
}
// Echof uses the given parameters to record a InfoLevel log.
func (o *log) Echof(format string, args ...interface{}) {
o.logf(InfoLevel, format, args...)
}
// Warn uses the given parameters to record a WarnLevel log.
func (o *log) Warn(args ...interface{}) {
o.log(WarnLevel, args...)
}
// Warnln uses the given parameters to record a WarnLevel log.
func (o *log) Warnln(args ...interface{}) {
o.logln(WarnLevel, args...)
}
// Warnf uses the given parameters to record a WarnLevel log.
func (o *log) Warnf(format string, args ...interface{}) {
o.logf(WarnLevel, format, args...)
}
// Warning uses the given parameters to record a WarnLevel log.
func (o *log) Warning(args ...interface{}) {
o.log(WarnLevel, args...)
}
// Warningln uses the given parameters to record a WarnLevel log.
func (o *log) Warningln(args ...interface{}) {
o.logln(WarnLevel, args...)
}
// Warningf uses the given parameters to record a WarnLevel log.
func (o *log) Warningf(format string, args ...interface{}) {
o.logf(WarnLevel, format, args...)
}
// Error uses the given parameters to record a ErrorLevel log.
func (o *log) Error(args ...interface{}) {
o.log(ErrorLevel, args...)
}
// Errorln uses the given parameters to record a ErrorLevel log.
func (o *log) Errorln(args ...interface{}) {
o.logln(ErrorLevel, args...)
}
// Errorf uses the given parameters to record a ErrorLevel log.
func (o *log) Errorf(format string, args ...interface{}) {
o.logf(ErrorLevel, format, args...)
}
// Fatal uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
func (o *log) Fatal(args ...interface{}) {
o.log(FatalLevel, args...)
}
// Fatalln uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
func (o *log) Fatalln(args ...interface{}) {
o.logln(FatalLevel, args...)
}
// Fatalf uses the given parameters to record a FatalLevel log.
// After the log record is completed, the system will automatically call
// the exit function given in advance.
func (o *log) Fatalf(format string, args ...interface{}) {
o.logf(FatalLevel, format, args...)
}
// Panic uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
func (o *log) Panic(args ...interface{}) {
o.log(PanicLevel, args...)
}
// Panicln uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
func (o *log) Panicln(args ...interface{}) {
o.logln(PanicLevel, args...)
}
// Panicf uses the given parameters to record a PanicLevel log.
// After the log record is completed, the system will automatically call
// the panic function given in advance.
func (o *log) Panicf(format string, args ...interface{}) {
o.logf(PanicLevel, format, args...)
} | log.go | 0.573917 | 0.445891 | log.go | starcoder |
package MathLibGO
type constantMathValues struct {
pi float64
goldDivision float64
}
func GoldDivision() float64 {
constantMathValues := constantMathValues{}
constantMathValues.goldDivision = 1.618033988749894848204586834366
return constantMathValues.goldDivision
}
func PI() float64 {
constantMathValues := constantMathValues{}
constantMathValues.pi = 3.14159265359
return constantMathValues.pi
}
func AddFloat(a float64, b float64) float64 {
return a + b
}
func AddInt(a int, b int) int {
return a + b
}
func MultiplyFloat(a float64, b float64) float64 {
return a * b
}
func MultiplyInt(a int, b int) int {
return a * b
}
func DivideFloat(a float64, b float64) float64 {
return a / b
}
func DivideInt(a int, b int) int {
return a / b
}
func IsIntDivisible(a int, b int) bool {
answerID := false
m := a % b
if m == 0 {
answerID = true
}
return answerID
}
func IsIntEven(a int) bool {
answerIE := false
num := a % 2
if num == 0 {
answerIE = true
}
return answerIE
}
func ReducingFloatByPercent(a float64, pUser float64) float64 {
p := pUser / 100
percentOfA := p * a
reduceF := a - percentOfA
return reduceF
}
func ReducingIntByPercent(a int, pUser int) int {
p := pUser / 100
percentOfA := p * a
reduceI := a - percentOfA
return reduceI
}
func SqrtFloat(a float64) float64 {
return a * a
}
func SqrtInt(a int) int {
return a * a
}
func ToThePowerInt(a int, n int) int {
scorePI := a
if n >= 1 {
for i := 1; i < n; i++ {
scorePI = scorePI * a
}
} else if n == 0 {
scorePI = 1
} else {
scorePI = 0
}
return scorePI
}
func ToThePowerFloat(a float64, n float64) float64 {
scorePF := a
if n >= 1 {
for i := 1.0; i < n; i++ {
scorePF = scorePF * a
}
} else if n == 0 {
scorePF = 1
} else {
scorePF = 0
}
return scorePF
}
func CircleField(r float64) float64 {
return PI() * SqrtFloat(r)
}
func RectangleFieldFloat(a float64, b float64) float64 {
return a * b
}
func RectangleFieldInt(a int, b int) int {
return a * b
}
func SquareFieldFloat(a float64) float64 {
return SqrtFloat(a)
}
func SquareFieldInt(a int) int {
return SqrtInt(a)
}
func TriangleFieldFloat(a float64, h float64) float64 {
return 0.5 * (a * h)
}
func HexagonFieldFloat(a float64) float64 {
return 6 * SqrtFloat(a)
}
func HexagonFieldInt(a int) int {
return 6 * SqrtInt(a)
}
func CorrectCuboidFieldFloat(a float64, h float64) float64 {
return 2*SqrtFloat(a) + 4*MultiplyFloat(a, h)
}
func CorrectCuboidFieldInt(a int, h int) int {
return 2*SqrtInt(a) + 4*MultiplyInt(a, h)
} | MathLibGO/libMainFile.go | 0.878679 | 0.515376 | libMainFile.go | starcoder |
package serve
import (
"math"
"sort"
)
//NearestObstacle() returns a integer corresponding to the stick that is nearest to the ball at that moment.
func (ball *ball) NearestObstacle(c1 chan int) {
distance := [8]int32{61, 136, 211, 286, 361, 436, 511, 586}
i := sort.Search(len(distance), func(i int) bool { return distance[i] >= int32(ball.X) })
if i < len(distance) && distance[i] == int32(ball.X) {
c1 <- i
} else if i == 0 {
c1 <- i
} else if i == 8 {
c1 <- i - 1
} else {
if math.Abs(float64(distance[i]-int32(ball.X))) < math.Abs(float64(distance[i-1]-int32(ball.X))) {
c1 <- i
} else {
c1 <- i - 1
}
}
}
//Collides() returns true if the ball Collides with a player.
func (ball *ball) Collides(c2 player) bool {
distance := math.Sqrt(math.Pow(c2.X-ball.X+c2.Radius, 2) + math.Pow(c2.Y-ball.Y+c2.Radius, 2))
return distance <= ball.Radius+c2.Radius
}
//CheckCollision() checks at every frame if a collision has happened with a player or not.
//If a collision has happened, it invokes onCollisionwithPlayer().
func (ball *ball) CheckCollision(t team, teamid int32) {
c1 := make(chan int)
go ball.NearestObstacle(c1)
arr := [2][]int{{0, 1, 3, 5}, {7, 6, 4, 2}}
var stick [4][]player
stick[0] = t.GoalKeeper[0:1]
stick[1] = t.Defence[0:2]
stick[2] = t.Mid[0:5]
stick[3] = t.Attack[0:3]
index := <-c1
for i, j := range arr[teamid-1] {
if j == index {
for k := range stick[i] {
go ball.Collision(t, teamid, stick[i][k])
}
}
}
}
// ram bhfshfduhufher
func (ball *ball) Collision(t team, teamid int32, p player) {
if ball.Collides(p) {
OnCollisionwithPlayer(ball, teamid, t.LastMotion)
}
}
//onCollisionwithPlayer changes the direction of the ball.
//It also changes the speed of the ball if it has not been increased by a collision with another player.
func OnCollisionwithPlayer(ball *ball, teamid int32, lastMotion int32) {
if (ball.Xv < 0 && teamid == 1) || (ball.Xv > 0 && teamid == 2) {
ball.Xv = -ball.Xv
}
if math.Abs(ball.Xv) <= BallSpeedX {
ball.Xv *= 2
ball.Yv *= 2
}
ball.Yv += float64(lastMotion) * 0.2
}
//CollidesWall() checks if a collision has happened with a wall or not.
//It also checks and returns if a goal has happened.
//index -1 means no collision
// 1 means collision with left wall
// 2 right wall
// 3 upper wall
// 4 means lower wall
func (ball *ball) CollidesWall() (goal int, index int) {
if ball.X < boundarywidth+radius && ball.Xv < 0 {
if ball.Y <= 297-radius && ball.Y >= 201+radius {
insideGoal = true
return 2, -1
}
return 0, 1
} else if ball.X > boxWidth-boundarywidth-radius-1 && ball.Xv > 0 {
if ball.Y <= 297-radius && ball.Y >= 201+radius {
insideGoal = true
return 1, -1
}
return 0, 2
} else if ball.Y < boundarywidth+radius && ball.Yv < 0 {
return 0, 3
} else if ball.Y > boxHeight-boundarywidth-radius-1 && ball.Yv > 0 {
return 0, 4
}
return 0, -1
}
//onCollisionWithWall() changes the direction and speed of the ball.
func OnCollisionWithWall(ball *ball, index int) {
if index == 1 || index == 2 {
ball.Xv = -ball.Xv
} else if index == 3 || index == 4 {
ball.Yv = -ball.Yv
}
if math.Abs(ball.Xv) > BallSpeedX {
ball.Xv /= 2
ball.Yv = BallSpeedY * (ball.Yv / math.Abs(ball.Yv))
}
}
//movementInsidePost() decides the movement of the ball inside the goal post.
func (ball *ball) movementInsidePost() {
ball.Yv = 0
} | src/server/collision.go | 0.724773 | 0.410047 | collision.go | starcoder |
package List
import (
coll "github.com/wushilin/gojava/Collection"
)
// Defines requirement for a list
type List[T any] interface {
// List must be a collection
coll.Collection[T]
//Add an element at specific location. It moves the element at the current location to the right
// [1,2,3,4,5].AddAt(0, 12) => [12,1,2,3,4,5]
AddAt(index int, element T) (added bool)
// Add all elements at specific location, moves the current elements to the right
AddAllAt(index int, elements coll.Collection[T]) (added int)
// Get item at index
Get(index int) (item T)
// Get the first Index of what with equal tester
IndexOfFunc(what T, equalizer coll.Equalizer[T]) (index int)
// Get the first Index of what with default equal tester
IndexOf(what T) (index int)
// Get the last Index of what with equal tester
LastIndexOfFunc(what T, equaliser coll.Equalizer[T]) (index int)
// Get the last Index of what with default equal tester
LastIndexOf(what T) (index int)
// Set the element at index to new value newValue, returns the old value at that position
Set(index int, newValue T) (oldValue T)
// Remove first element that equals data with specified equalizer
// Returns if any item was removed
RemoveFirstFunc(data T, equals coll.Equalizer[T]) (anyItemRemoved bool)
// Same as RemoveFirstFunc, but with default equals tester
RemoveFirst(v T) (anyItemRemoved bool)
// Remove at index
RemoveAt(index int) T
// Make a copy of list as sublist, from fromIndexIncluded, to endIndexExcluded
CopySubList(fromIndexIncluded int, endIndexExcluded int) (newList List[T])
// Make a copy of the list
Copy() (newList List[T])
// Reverse a list and return as new list
Reverse() (newList List[T])
}
func LinkedListOf[T any](arg ...T) *LinkedList[T] {
list := NewLinkedList[T]()
coll.AddElementsTo[T](list, arg...)
return list
}
func ArrayListOf[T any](arg ...T) *ArrayList[T] {
list := NewArrayList[T]()
coll.AddElementsTo[T](list, arg...)
return list
}
func ListEquals[T any](list1, list2 List[T], equalFunc coll.Equalizer[T]) bool {
if list1.Size() != list2.Size() {
return false
}
iter1 := list1.Iterator()
iter2 := list2.Iterator()
for next, ok := iter1.Next(); ok; next, ok = iter1.Next() {
next2, ok2 := iter2.Next()
if !ok2 {
return false
}
if !equalFunc(next, next2) {
return false
}
}
return true
}
func FindItem[T any](iter coll.Iterator[T], data T, equals coll.Equalizer[T]) int {
return FindItemFlag(iter, data, equals, true)
}
func FindItemFlag[T any](iter coll.Iterator[T], data T, equals coll.Equalizer[T], breakOnFirst bool) int {
index := -1
theIndex := 0
for next, ok := iter.Next(); ok; next, ok = iter.Next() {
if equals(next, data) {
index = theIndex
if breakOnFirst {
break
}
}
theIndex++
}
return index
}
func FindLastItem[T any](iter coll.Iterator[T], data T, equals coll.Equalizer[T]) int {
return FindItemFlag(iter, data, equals, false)
}
func RemoveAll[T any](src coll.Collection[T], collection coll.Collection[T]) int {
return RemoveAllFunc(src, collection, coll.DefaultEqualizer[T]())
}
func RemoveAllFunc[T any](src coll.Collection[T], what coll.Collection[T], equals coll.Equalizer[T]) int {
count := 0
what.ForEach(
func(toDelete T) bool {
iter := src.Iterator()
for next, ok := iter.Next(); ok; next, ok = iter.Next() {
if equals(next, toDelete) {
iter.Remove()
count++
}
}
return true
})
return count
}
func RetainAll[T any](src coll.Collection[T], what coll.Collection[T]) int {
return RetainAllFunc[T](src, what, coll.DefaultEqualizer[T]())
}
func RetainAllFunc[T any](src coll.Collection[T], what coll.Collection[T], equals coll.Equalizer[T]) int {
removedCount := 0
iter := src.Iterator()
for next, ok := iter.Next(); ok; next, ok = iter.Next() {
iter2 := what.Iterator()
found := false
for next2, ok2 := iter2.Next(); ok2; next2, ok2 = iter2.Next() {
if equals(next, next2) {
found = true
break
}
}
if !found {
iter.Remove()
removedCount++
}
}
return removedCount
}
func RemoveFirstFunc[T any](iter coll.Iterator[T], data T, equals coll.Equalizer[T]) bool {
return true
}
func RemoveAt[T any](iter coll.Iterator[T], index int) T {
count := 0
var result T
found := false
for item, ok := iter.Next(); ok; item, ok = iter.Next() {
if count == index {
found = true
iter.Remove()
result = item
break
}
count++
}
if found {
return result
}
panic("Invalid index")
} | List/List.go | 0.724481 | 0.59305 | List.go | starcoder |
package ecschnorr
import (
"math/big"
"github.com/emmyzkp/crypto/common"
"github.com/emmyzkp/crypto/ec"
)
// BlindedTrans represents a blinded transcript.
type BlindedTrans struct {
Alpha_1 *big.Int
Alpha_2 *big.Int
Beta_1 *big.Int
Beta_2 *big.Int
Hash *big.Int
ZAlpha *big.Int
}
func NewBlindedTrans(alpha_1, alpha_2, beta_1, beta_2, hash, zAlpha *big.Int) *BlindedTrans {
return &BlindedTrans{
Alpha_1: alpha_1,
Alpha_2: alpha_2,
Beta_1: beta_1,
Beta_2: beta_2,
Hash: hash,
ZAlpha: zAlpha,
}
}
// Verifies that the blinded transcript is valid. That means the knowledge of log_g1(t1), log_G2(T2)
// and log_g1(t1) = log_G2(T2). Note that G2 = g2^gamma, T2 = t2^gamma where gamma was chosen
// by verifier.
func (t *BlindedTrans) Verify(curve ec.Curve, g1, t1, G2, T2 *ec.GroupElement) bool {
group := ec.NewGroup(curve)
// check hash:
hashNum := common.Hash(t.Alpha_1, t.Alpha_2,
t.Beta_1, t.Beta_2)
if hashNum.Cmp(t.Hash) != 0 {
return false
}
// We need to verify (note that c-beta = hash(alpha11, alpha12, beta11, beta12))
// g1^(z+alpha) = (alpha11, alpha12) * t1^(c-beta)
// G2^(z+alpha) = (beta11, beta12) * T2^(c-beta)
left1 := group.Exp(g1, t.ZAlpha)
right1 := group.Exp(t1, t.Hash)
Alpha := ec.NewGroupElement(t.Alpha_1, t.Alpha_2)
right1 = group.Mul(Alpha, right1)
left2 := group.Exp(G2, t.ZAlpha)
right2 := group.Exp(T2, t.Hash)
Beta := ec.NewGroupElement(t.Beta_1, t.Beta_2)
right2 = group.Mul(Beta, right2)
return left1.Equals(right1) && left2.Equals(right2)
}
type BTEqualityProver struct {
Group *ec.Group
r *big.Int
secret *big.Int
g1 *ec.GroupElement
g2 *ec.GroupElement
}
func NewBTEqualityProver(curve ec.Curve) *BTEqualityProver {
group := ec.NewGroup(curve)
prover := BTEqualityProver{
Group: group,
}
return &prover
}
// Prove that you know dlog_g1(h1), dlog_g2(h2) and that dlog_g1(h1) = dlog_g2(h2).
func (p *BTEqualityProver) GetProofRandomData(secret *big.Int,
g1, g2 *ec.GroupElement) (*ec.GroupElement, *ec.GroupElement) {
// Set the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
p.secret = secret
p.g1 = g1
p.g2 = g2
r := common.GetRandomInt(p.Group.Q)
p.r = r
a := p.Group.Exp(p.g1, r)
b := p.Group.Exp(p.g2, r)
return a, b
}
func (p *BTEqualityProver) GetProofData(challenge *big.Int) *big.Int {
// z = r + challenge * secret
z := new(big.Int)
z.Mul(challenge, p.secret)
z.Add(z, p.r)
z.Mod(z, p.Group.Q)
return z
}
type BTEqualityVerifier struct {
Group *ec.Group
gamma *big.Int
challenge *big.Int
g1 *ec.GroupElement
g2 *ec.GroupElement
x1 *ec.GroupElement
x2 *ec.GroupElement
t1 *ec.GroupElement
t2 *ec.GroupElement
alpha *big.Int
transcript *BlindedTrans
}
func NewBTEqualityVerifier(curve ec.Curve,
gamma *big.Int) *BTEqualityVerifier {
group := ec.NewGroup(curve)
if gamma == nil {
gamma = common.GetRandomInt(group.Q)
}
verifier := BTEqualityVerifier{
Group: group,
gamma: gamma,
}
return &verifier
}
func (v *BTEqualityVerifier) GetChallenge(g1, g2, t1, t2, x1,
x2 *ec.GroupElement) *big.Int {
// Set the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
v.g1 = g1
v.g2 = g2
v.t1 = t1
v.t2 = t2
// Set the values g1^r1 and g2^r2.
v.x1 = x1
v.x2 = x2
alpha := common.GetRandomInt(v.Group.Q)
beta := common.GetRandomInt(v.Group.Q)
// alpha1 = g1^r * g1^alpha * t1^beta
// beta1 = (g2^r * g2^alpha * t2^beta)^gamma
alpha1 := v.Group.Exp(v.g1, alpha)
alpha1 = v.Group.Mul(v.x1, alpha1)
tmp := v.Group.Exp(v.t1, beta)
alpha1 = v.Group.Mul(alpha1, tmp)
beta1 := v.Group.Exp(v.g2, alpha)
beta1 = v.Group.Mul(v.x2, beta1)
tmp = v.Group.Exp(v.t2, beta)
beta1 = v.Group.Mul(beta1, tmp)
beta1 = v.Group.Exp(beta1, v.gamma)
// c = hash(alpha1, beta) + beta mod q
hashNum := common.Hash(alpha1.X, alpha1.Y, beta1.X, beta1.Y)
challenge := new(big.Int).Add(hashNum, beta)
challenge.Mod(challenge, v.Group.Q)
v.challenge = challenge
v.transcript = NewBlindedTrans(alpha1.X, alpha1.Y, beta1.X, beta1.Y, hashNum, nil)
v.alpha = alpha
return challenge
}
// It receives z = r + secret * challenge.
//It returns true if g1^z = g1^r * (g1^secret) ^ challenge and g2^z = g2^r * (g2^secret) ^ challenge.
func (v *BTEqualityVerifier) Verify(z *big.Int) (bool, *BlindedTrans,
*ec.GroupElement, *ec.GroupElement) {
left1 := v.Group.Exp(v.g1, z)
left2 := v.Group.Exp(v.g2, z)
r1 := v.Group.Exp(v.t1, v.challenge)
r2 := v.Group.Exp(v.t2, v.challenge)
right1 := v.Group.Mul(r1, v.x1)
right2 := v.Group.Mul(r2, v.x2)
// transcript [(alpha11, alpha12, beta11, beta12), hash(alpha11, alpha12, beta11, beta12), z+alpha]
// however, we are actually returning:
// [alpha11, alpha12, beta11, beta12, hash(alpha11, alpha12, beta11, beta12), z+alpha]
z1 := new(big.Int).Add(z, v.alpha)
v.transcript.ZAlpha = z1
G2 := v.Group.Exp(v.g2, v.gamma)
T2 := v.Group.Exp(v.t2, v.gamma)
if left1.Equals(right1) && left2.Equals(right2) {
return true, v.transcript, G2, T2
} else {
return false, nil, nil, nil
}
} | ecschnorr/dlog_equality_bt.go | 0.707405 | 0.463323 | dlog_equality_bt.go | starcoder |
package faceutil
import (
"image"
"image/color"
"image/draw"
"math"
)
type ByCenterY []image.Rectangle
func (b ByCenterY) Len() int {
return len(b)
}
func (b ByCenterY) Swap(i, j int) {
b[i], b[j] = b[j], b[i]
}
func (b ByCenterY) Less(i, j int) bool {
var (
p1 = getRectCenter(b[i])
p2 = getRectCenter(b[j])
)
return p1.Y < p2.Y
}
func addRectPadding(pct float64, rect image.Rectangle, bounds image.Rectangle) image.Rectangle {
var (
width = float64(rect.Dx())
height = float64(rect.Dy())
minScale = 0.1
widthScale = math.Max(1.0-(height/float64(bounds.Dx()))-0.3, minScale)
heightScale = math.Max(1.0-(width/float64(bounds.Dy()))-0.3, minScale)
widthPadding = int(widthScale * pct * (width / 100) / 2)
heightPadding = int(heightScale * pct * (height / 100) / 2)
)
return image.Rect(
rect.Min.X-widthPadding,
rect.Min.Y-heightPadding*3,
rect.Max.X+widthPadding,
rect.Max.Y+heightPadding,
)
}
func canvasFromImage(i image.Image) *image.NRGBA {
bounds := i.Bounds()
canvas := image.NewNRGBA(bounds)
draw.Draw(canvas, bounds, i, bounds.Min, draw.Src)
return canvas
}
func drawRect(img *image.NRGBA, rect image.Rectangle, c color.Color) {
var (
x1 = rect.Min.X
x2 = rect.Max.X
y1 = rect.Min.Y
y2 = rect.Max.Y
thickness = 2
)
for t := 0; t < thickness; t++ {
// draw horizontal lines
for x := x1; x <= x2; x++ {
img.Set(x, y1+t, c)
img.Set(x, y2-t, c)
}
// draw vertical lines
for y := y1; y <= y2; y++ {
img.Set(x1+t, y, c)
img.Set(x2-t, y, c)
}
}
}
func getRectCenter(rect image.Rectangle) image.Point {
return image.Point{
X: rect.Min.X + rect.Dx()/2,
Y: rect.Min.Y + rect.Dy()/2,
}
}
func getRectCenteredIn(child, parent image.Rectangle) image.Rectangle {
var (
center = getRectCenter(parent)
halfX = child.Dx() / 2
halfY = child.Dy() / 2
)
return image.Rectangle{
Min: image.Point{
X: center.X - halfX,
Y: center.Y - halfY,
},
Max: image.Point{
X: center.X + halfX,
Y: center.Y + halfY,
},
}
} | faceutil/utils.go | 0.707809 | 0.422505 | utils.go | starcoder |
package binpack
type Axis int
const (
AxisX Axis = iota
AxisY
AxisZ
)
type Item struct {
ID int
Score int
Size Vector
}
type Box struct {
Origin Vector
Size Vector
}
func (box Box) Cut(axis Axis, offset int) (Box, Box) {
o1 := box.Origin
o2 := box.Origin
s1 := box.Size
s2 := box.Size
switch axis {
case AxisX:
s1.X = offset
s2.X -= offset
o2.X += offset
case AxisY:
s1.Y = offset
s2.Y -= offset
o2.Y += offset
case AxisZ:
s1.Z = offset
s2.Z -= offset
o2.Z += offset
}
return Box{o1, s1}, Box{o2, s2}
}
func (box Box) Cuts(a1, a2, a3 Axis, s1, s2, s3 int) (Box, Box, Box) {
b := box
b, b1 := b.Cut(a1, s1)
b, b2 := b.Cut(a2, s2)
_, b3 := b.Cut(a3, s3)
return b1, b2, b3
}
type Placement struct {
Item Item
Position Vector
}
type Result struct {
Score int
Placements []Placement
}
func MakeResult(r0, r1, r2 Result, item Item, position Vector) Result {
r3 := Result{item.Score, []Placement{{item, position}}}
score := r0.Score + r1.Score + r2.Score + r3.Score
n := len(r0.Placements) + len(r1.Placements) + len(r2.Placements) + len(r3.Placements)
placements := make([]Placement, 0, n)
placements = append(placements, r0.Placements...)
placements = append(placements, r1.Placements...)
placements = append(placements, r2.Placements...)
placements = append(placements, r3.Placements...)
return Result{score, placements}
}
func (result Result) Translate(offset Vector) Result {
placements := make([]Placement, len(result.Placements))
for i, p := range result.Placements {
p.Position = p.Position.Add(offset)
placements[i] = p
}
return Result{result.Score, placements}
}
func Pack(items []Item, box Box) Result {
hash := NewSpatialHash(1000)
minVolume := items[0].Size.Sort()
for _, item := range items {
minVolume = minVolume.Min(item.Size.Sort())
}
return pack(items, box, hash, minVolume)
}
func pack(items []Item, box Box, hash *SpatialHash, minVolume Vector) Result {
bs := box.Size
if !bs.Sort().Fits(minVolume) {
return Result{}
}
if result, ok := hash.Get(bs); ok {
return result
}
best := Result{}
for _, item := range items {
s := item.Size
if s.X > bs.X || s.Y > bs.Y || s.Z > bs.Z {
continue
}
var b [6][3]Box
b[0][0], b[0][1], b[0][2] = box.Cuts(AxisX, AxisY, AxisZ, s.X, s.Y, s.Z)
b[1][0], b[1][1], b[1][2] = box.Cuts(AxisX, AxisZ, AxisY, s.X, s.Z, s.Y)
b[2][0], b[2][1], b[2][2] = box.Cuts(AxisY, AxisX, AxisZ, s.Y, s.X, s.Z)
b[3][0], b[3][1], b[3][2] = box.Cuts(AxisY, AxisZ, AxisX, s.Y, s.Z, s.X)
b[4][0], b[4][1], b[4][2] = box.Cuts(AxisZ, AxisX, AxisY, s.Z, s.X, s.Y)
b[5][0], b[5][1], b[5][2] = box.Cuts(AxisZ, AxisY, AxisX, s.Z, s.Y, s.X)
for i := 0; i < 6; i++ {
var r [3]Result
score := item.Score
for j := 0; j < 3; j++ {
r[j] = pack(items, b[i][j], hash, minVolume)
score += r[j].Score
}
if score > best.Score {
for j := 0; j < 3; j++ {
r[j] = r[j].Translate(b[i][j].Origin)
}
best = MakeResult(r[0], r[1], r[2], item, box.Origin)
}
}
}
best = best.Translate(box.Origin.Negate())
var size Vector
for _, p := range best.Placements {
size = size.Max(p.Position.Add(p.Item.Size))
}
hash.Add(size, bs, best)
return best
} | binpack/pack.go | 0.544317 | 0.475179 | pack.go | starcoder |
package commands
import (
"f1-discord-bot/ergast"
"fmt"
)
// Results performs the actions for the "results" command sent to the bot
func Results(args ...string) (string, error) {
if len(args) < 1 {
return "", fmt.Errorf("command 'results' needs more arguments")
}
subCommand := args[0]
switch subCommand {
case "circuit":
switch {
case len(args) < 2:
return "", fmt.Errorf("command 'results circuit' needs a circuitID as an argument")
case len(args) == 2:
message, err := CircuitResults(args[1], 10)
if err != nil {
return "", fmt.Errorf("getting circuit results: %v", err)
}
return message, nil
default:
return "", fmt.Errorf("invalid number of arguments for the command 'results'")
}
case "driver":
switch {
case len(args) < 2:
return "", fmt.Errorf("command 'results driver' needs a driverID as an argument")
case len(args) == 2:
message, err := DriverResults(args[1], 10)
if err != nil {
return "", fmt.Errorf("getting driver results: %v", err)
}
return message, nil
default:
return "", fmt.Errorf("invalid number of arguments for the command 'results'")
}
default:
return "", fmt.Errorf("subcommand '%s' of 'results' not recognized or not yet implemented", subCommand)
}
}
// CircuitResults performs the actions for the "results circuit <circuitID>" command sent to the bot
func CircuitResults(circuitID string, n int) (string, error) {
// Get circuits
circuitTable, err := ergast.Circuits()
if err != nil {
return "", fmt.Errorf("getting list of circuits from ergast: %v", err)
}
if !circuitTable.HasCircuit(circuitID) {
// The circuit requested was not found in the list of circuits
// Compute the levenshtein distances between the given argument and
// all the circuits to see the one the user probably meant
var lds LevenshteinDistances
for _, circuit := range circuitTable.Circuits {
lds = append(lds, LevenshteinDistance{Str1: circuitID, Str2: circuit.CircuitID})
}
lds.ComputeAll()
lds.SortByDistance()
return fmt.Sprintf("**UPS!**\nNo circuit with id '%s' was found.\nDid you mean?\n\t- %s", circuitID, lds[0].Str2), nil
}
// Get circuit results from the API
raceTable, err := ergast.RequestCircuitResults(circuitID)
if err != nil {
return "", fmt.Errorf("requesting circuit results to ergast: %v", err)
}
// Trim the first races
nRaces := len(raceTable.Races)
if nRaces < n {
n = nRaces
}
races := raceTable.Races[(nRaces - n):]
circuit := races[0].Circuit
// Buld message
var m TabularMessage
m.Header = fmt.Sprintf("WINNERS IN THE LAST %d RACES AT %s", n, circuit.CircuitName)
m.SetTableHeader("Year", "Driver", "Constructor", "Time", "Laps")
for i := len(races) - 1; i >= 0; i-- {
race := races[i]
m.AddRow(race.Season,
race.Results[0].Driver.FullName(),
race.Results[0].Constructor.Name,
race.Results[0].Time.Time,
race.Results[0].Laps)
}
return m.String(), nil
}
// DriverResults performs the actions for the "results driver <driverID>" command sent to the bot
func DriverResults(driverID string, n int) (string, error) {
// Get circuits
driverTable, err := ergast.Drivers()
if err != nil {
return "", fmt.Errorf("getting list of circuits from ergast: %v", err)
}
if !driverTable.HasDriver(driverID) {
var lds LevenshteinDistances
for _, driver := range driverTable.Drivers {
lds = append(lds, LevenshteinDistance{Str1: driverID, Str2: driver.DriverID})
}
lds.ComputeAll()
lds.SortByDistance()
return fmt.Sprintf("**UPS!**\nNo driver with id '%s' was found.\nDid you mean?\n\t- %s", driverID, lds[0].Str2), nil
}
// Get driver results from the API
raceTable, err := ergast.RequestDriverResults(driverID)
if err != nil {
return "", fmt.Errorf("requesting circuit results to ergast: %v", err)
}
// Trim the first races
nRaces := len(raceTable.Races)
if nRaces < n {
n = nRaces
}
races := raceTable.Races[(nRaces - n):]
driver := races[0].Results[0].Driver
// Build message
var m TabularMessage
m.Header = fmt.Sprintf("LAST %d RACE RESULTS FOR %s", n, driver.FullName())
m.SetTableHeader("Year", "GP", "Pos.", "Grid", "Constructor", "Time (ms)", "Laps", "Status")
for i := len(races) - 1; i >= 0; i-- {
race := races[i]
m.AddRow(race.Season,
race.RaceName,
race.Results[0].Position,
race.Results[0].Grid,
race.Results[0].Constructor.Name,
race.Results[0].Time.Millis,
race.Results[0].Laps,
race.Results[0].Status)
}
return m.String(), nil
} | commands/results.go | 0.688573 | 0.403009 | results.go | starcoder |
package biome
import (
"image"
"image/color"
"log"
"math"
)
// Gray :
func Gray(imgSrc image.Image) image.Image {
w, h := imgSrc.Bounds().Dx(), imgSrc.Bounds().Dy()
img := image.NewRGBA(image.Rect(0, 0, w, h))
for y := 0; y < h; y++ {
for x := 0; x < w; x++ {
rr, gg, bb, _ := imgSrc.At(x, y).RGBA()
v := grayRGB(rr, gg, bb)
img.Set(x, y, color.RGBA{v, v, v, 255})
}
}
return img
}
// Shade :
func Shade(num int, imgSrc image.Image) image.Image {
w, h := imgSrc.Bounds().Dx(), imgSrc.Bounds().Dy()
img := image.NewRGBA(image.Rect(0, 0, w, h))
done := make(chan bool, h)
for y := 0; y < h; y++ {
go func(line int) {
for x := 0; x < w; x++ {
r, g, b := calc(x, line, num, w, h, imgSrc)
img.Set(x, line, color.RGBA{r, g, b, 255})
}
done <- true
}(y)
}
for i := 0; i < h; i++ {
<-done
}
return img
}
func calc(x, y, num, w, h int, imgSrc image.Image) (rr, gg, bb uint8) {
if num < 2 {
log.Fatal("1より大きい奇数を入れてね")
}
n := num / 2
var count float64
var r1, g1, b1 uint32
for i := MaxInt(0, x-n); i < MinInt(w-1, x+n); i++ {
for j := MaxInt(0, y-n); j < MinInt(h-1, y+n); j++ {
r, g, b, _ := imgSrc.At(i, j).RGBA()
r1 += r
g1 += g
b1 += b
count++
}
}
rr = F2uint8(float64(r1) / count)
gg = F2uint8(float64(g1) / count)
bb = F2uint8(float64(b1) / count)
return
}
// Sepia :
func Sepia(imgSrc image.Image) image.Image {
w, h := imgSrc.Bounds().Dx(), imgSrc.Bounds().Dy()
img := image.NewRGBA(image.Rect(0, 0, w, h))
for y := 0; y < h; y++ {
for x := 0; x < w; x++ {
rr, gg, bb, _ := imgSrc.At(x, y).RGBA()
r, g, b := sepiaRGB(rr, gg, bb)
img.Set(x, y, color.RGBA{r, g, b, 255})
}
}
return img
}
func grayRGB(rr, gg, bb uint32) uint8 {
m := cieXYZ(rr, gg, bb)
return F2uint8(m)
}
func cieXYZ(rr, gg, bb uint32) float64 {
r := math.Pow(float64(rr), 2.2)
g := math.Pow(float64(gg), 2.2)
b := math.Pow(float64(bb), 2.2)
return math.Pow(0.2125*r+0.7154*g+0.0721*b, 1/2.2)
}
func sepiaRGB(rr, gg, bb uint32) (r, g, b uint8) {
m := cieXYZ(rr, gg, bb)
r = F2uint8(m * 107 / 107)
g = F2uint8(m * 74 / 107)
b = F2uint8(m * 43 / 107)
return
} | imgproc.go | 0.543348 | 0.433622 | imgproc.go | starcoder |
package avro
import (
"fmt"
"unsafe"
)
func createSkipDecoder(schema Schema) ValDecoder {
switch schema.Type() {
case Boolean:
return &boolSkipDecoder{}
case Int:
return &intSkipDecoder{}
case Long:
return &longSkipDecoder{}
case Float:
return &floatSkipDecoder{}
case Double:
return &doubleSkipDecoder{}
case String:
return &stringSkipDecoder{}
case Bytes:
return &bytesSkipDecoder{}
case Record:
return skipDecoderOfRecord(schema)
case Ref:
return createSkipDecoder(schema.(*RefSchema).Schema())
case Enum:
return &enumSkipDecoder{symbols: schema.(*EnumSchema).Symbols()}
case Array:
return skipDecoderOfArray(schema)
case Map:
return skipDecoderOfMap(schema)
case Union:
return skipDecoderOfUnion(schema)
case Fixed:
return &fixedSkipDecoder{size: schema.(*FixedSchema).Size()}
default:
return &errorDecoder{err: fmt.Errorf("avro: schema type %s is unsupported", schema.Type())}
}
}
type boolSkipDecoder struct{}
func (*boolSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipBool()
}
type intSkipDecoder struct{}
func (*intSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipInt()
}
type longSkipDecoder struct{}
func (*longSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipLong()
}
type floatSkipDecoder struct{}
func (*floatSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipFloat()
}
type doubleSkipDecoder struct{}
func (*doubleSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipDouble()
}
type stringSkipDecoder struct{}
func (*stringSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipString()
}
type bytesSkipDecoder struct{}
func (c *bytesSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipBytes()
}
func skipDecoderOfRecord(schema Schema) ValDecoder {
rec := schema.(*RecordSchema)
decoders := make([]ValDecoder, len(rec.Fields()))
for i, field := range rec.Fields() {
decoders[i] = createSkipDecoder(field.Type())
}
return &recordSkipDecoder{
decoders: decoders,
}
}
type recordSkipDecoder struct {
decoders []ValDecoder
}
func (d *recordSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
for _, decoder := range d.decoders {
decoder.Decode(nil, r)
}
}
type enumSkipDecoder struct {
symbols []string
}
func (c *enumSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipInt()
}
func skipDecoderOfArray(schema Schema) ValDecoder {
arr := schema.(*ArraySchema)
decoder := createSkipDecoder(arr.Items())
return &sliceSkipDecoder{
decoder: decoder,
}
}
type sliceSkipDecoder struct {
decoder ValDecoder
}
func (d *sliceSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
for {
l, size := r.ReadBlockHeader()
if l == 0 {
break
}
if size > 0 {
r.SkipNBytes(int(size))
continue
}
for i := 0; i < int(l); i++ {
d.decoder.Decode(nil, r)
}
}
}
func skipDecoderOfMap(schema Schema) ValDecoder {
m := schema.(*MapSchema)
decoder := createSkipDecoder(m.Values())
return &mapSkipDecoder{
decoder: decoder,
}
}
type mapSkipDecoder struct {
decoder ValDecoder
}
func (d *mapSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
for {
l, size := r.ReadBlockHeader()
if l == 0 {
break
}
if size > 0 {
r.SkipNBytes(int(size))
continue
}
for i := 0; i < int(l); i++ {
r.SkipString()
d.decoder.Decode(nil, r)
}
}
}
func skipDecoderOfUnion(schema Schema) ValDecoder {
union := schema.(*UnionSchema)
return &unionSkipDecoder{
schema: union,
}
}
type unionSkipDecoder struct {
schema *UnionSchema
}
func (d *unionSkipDecoder) Decode(ptr unsafe.Pointer, r *Reader) {
_, resSchema := getUnionSchema(d.schema, r)
if resSchema == nil {
return
}
// In a null case, just return
if resSchema.Type() == Null {
return
}
createSkipDecoder(resSchema).Decode(nil, r)
}
type fixedSkipDecoder struct {
size int
}
func (d *fixedSkipDecoder) Decode(_ unsafe.Pointer, r *Reader) {
r.SkipNBytes(d.size)
} | codec_skip.go | 0.596668 | 0.618233 | codec_skip.go | starcoder |
package main
var schemas = `
{
"API": {
"createAsset": {
"description": "Create an asset. One argument, a JSON encoded event. AssetID is required with zero or more writable properties. Establishes an initial asset state.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "The set of writable properties that define an asset's state. For asset creation, the only mandatory property is the 'assetID'. Updates should include at least one other writable property. This exemplifies the IoT contract pattern 'partial state as event'.",
"properties": {
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "createAsset function",
"enum": [
"createAsset"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"deleteAllAssets": {
"description": "Delete the state of all assets. No arguments are accepted. For each managed asset, the state and history are erased, and the asset is removed if necessary from recent states.",
"properties": {
"args": {
"description": "accepts no arguments",
"items": {},
"maxItems": 0,
"minItems": 0,
"type": "array"
},
"function": {
"description": "deleteAllAssets function",
"enum": [
"deleteAllAssets"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"deleteAsset": {
"description": "Delete an asset, its history, and any recent state activity. Argument is a JSON encoded string containing only an assetID.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "An object containing only an assetID for use as an argument to read or delete.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
}
},
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "deleteAsset function",
"enum": [
"deleteAsset"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"deletePropertiesFromAsset": {
"description": "Delete one or more properties from an asset. Argument is a JSON encoded string containing an AssetID and an array of qualified property names. An example would be {'assetID':'A1',['event.common.carrier', 'event.customer.temperature']} and the result of that invoke would be the removal of the carrier field and the temperature field with a recalculation of the alert and compliance status.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "Requested assetID with a list or qualified property names.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"qualPropsToDelete": {
"items": {
"description": "The qualified name of a property. E.g. 'event.common.carrier', 'event.custom.temperature', etc.",
"type": "string"
},
"type": "array"
}
},
"required": [
"assetID",
"qualPropsToDelete"
],
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "deletePropertiesFromAsset function",
"enum": [
"deletePropertiesFromAsset"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"init": {
"description": "Initializes the contract when started, either by deployment or by peer restart.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "event sent to init on deployment",
"properties": {
"nickname": {
"default": "TRADELANE",
"description": "The nickname of the current contract",
"type": "string"
},
"version": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
}
},
"required": [
"version"
],
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "init function",
"enum": [
"init"
],
"type": "string"
},
"method": "deploy"
},
"type": "object"
},
"readAllAssets": {
"description": "Returns the state of all assets as an array of JSON encoded strings. Accepts no arguments. For each managed asset, the state is read from the ledger and added to the returned array. Array is sorted by assetID.",
"properties": {
"args": {
"description": "accepts no arguments",
"items": {},
"maxItems": 0,
"minItems": 0,
"type": "array"
},
"function": {
"description": "readAllAssets function",
"enum": [
"readAllAssets"
],
"type": "string"
},
"method": "query",
"result": {
"description": "an array of states, often for different assets",
"items": {
"description": "A set of properties that constitute a complete asset state. Includes event properties and any other calculated properties such as compliance related alerts.",
"properties": {
"alerts": {
"description": "Active means that the alert is in force in this state. Raised means that the alert became active as the result of the event that generated this state. Cleared means that the alert became inactive as the result of the event that generated this state.",
"properties": {
"active": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"cleared": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"raised": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"compliant": {
"description": "A contract-specific indication that this asset is compliant.",
"type": "boolean"
},
"contactInformation": {
"description": "",
"properties": {
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
}
},
"type": "object"
},
"creditsBuyList": {
"description": "List of credits requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"creditsSellList": {
"description": "List of credits company willing to sell",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"lastEvent": {
"description": "function and string parameter that created this state object",
"properties": {
"args": {
"items": {
"description": "parameters to the function, usually args[0] is populated with a JSON encoded event object",
"type": "string"
},
"type": "array"
},
"function": {
"description": "function that created this state object",
"type": "string"
},
"redirectedFromFunction": {
"description": "function that originally received the event",
"type": "string"
}
},
"type": "object"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"priceBuyList": {
"description": "List of price requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"priceSellList": {
"description": "List of price for every credit put on sell by a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorWeatherHistory": {
"description": "sensorReading means history of all the carbon readings from the sensor, timestamp refers to time it was recorded and all the other fields refers to weather data",
"properties": {
"iconUrl": {
"type": "string"
},
"precipitation": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"sensorReading": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempCelsius": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempFahrenheit": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windDegrees": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windGustSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeHistory": {
"description": "Sold means how many credits were traded and Price refers for how much per credit. Company means institution trade was made to. Timestamp means what time trade occured. BuySell attribute is to indicate if it was a buy or sell ",
"properties": {
"buysell": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"company": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"credits": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"price": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"txntimestamp": {
"description": "Transaction timestamp matching that in the blockchain.",
"type": "string"
},
"txnuuid": {
"description": "Transaction UUID matching that in the blockchain.",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"type": "object"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"readAsset": {
"description": "Returns the state an asset. Argument is a JSON encoded string. AssetID is the only accepted property.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "An object containing only an assetID for use as an argument to read or delete.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
}
},
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "readAsset function",
"enum": [
"readAsset"
],
"type": "string"
},
"method": "query",
"result": {
"description": "A set of properties that constitute a complete asset state. Includes event properties and any other calculated properties such as compliance related alerts.",
"properties": {
"alerts": {
"description": "Active means that the alert is in force in this state. Raised means that the alert became active as the result of the event that generated this state. Cleared means that the alert became inactive as the result of the event that generated this state.",
"properties": {
"active": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"cleared": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"raised": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"compliant": {
"description": "A contract-specific indication that this asset is compliant.",
"type": "boolean"
},
"contactInformation": {
"description": "",
"properties": {
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
}
},
"type": "object"
},
"creditsBuyList": {
"description": "List of credits requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"creditsSellList": {
"description": "List of credits company willing to sell",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"lastEvent": {
"description": "function and string parameter that created this state object",
"properties": {
"args": {
"items": {
"description": "parameters to the function, usually args[0] is populated with a JSON encoded event object",
"type": "string"
},
"type": "array"
},
"function": {
"description": "function that created this state object",
"type": "string"
},
"redirectedFromFunction": {
"description": "function that originally received the event",
"type": "string"
}
},
"type": "object"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"priceBuyList": {
"description": "List of price requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"priceSellList": {
"description": "List of price for every credit put on sell by a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorWeatherHistory": {
"description": "sensorReading means history of all the carbon readings from the sensor, timestamp refers to time it was recorded and all the other fields refers to weather data",
"properties": {
"iconUrl": {
"type": "string"
},
"precipitation": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"sensorReading": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempCelsius": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempFahrenheit": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windDegrees": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windGustSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeHistory": {
"description": "Sold means how many credits were traded and Price refers for how much per credit. Company means institution trade was made to. Timestamp means what time trade occured. BuySell attribute is to indicate if it was a buy or sell ",
"properties": {
"buysell": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"company": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"credits": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"price": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"txntimestamp": {
"description": "Transaction timestamp matching that in the blockchain.",
"type": "string"
},
"txnuuid": {
"description": "Transaction UUID matching that in the blockchain.",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"readAssetHistory": {
"description": "Requests a specified number of history states for an assets. Returns an array of states sorted with the most recent first. AssetID is required and count is optional. A missing count, a count of zero, or too large a count returns all existing history states.",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "Requested assetID with item count.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"count": {
"type": "integer"
}
},
"required": [
"assetID"
],
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "readAssetHistory function",
"enum": [
"readAssetHistory"
],
"type": "string"
},
"method": "query",
"result": {
"description": "an array of states for one asset sorted by timestamp with the most recent entry first",
"items": {
"description": "A set of properties that constitute a complete asset state. Includes event properties and any other calculated properties such as compliance related alerts.",
"properties": {
"alerts": {
"description": "Active means that the alert is in force in this state. Raised means that the alert became active as the result of the event that generated this state. Cleared means that the alert became inactive as the result of the event that generated this state.",
"properties": {
"active": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"cleared": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"raised": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"compliant": {
"description": "A contract-specific indication that this asset is compliant.",
"type": "boolean"
},
"contactInformation": {
"description": "",
"properties": {
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
}
},
"type": "object"
},
"creditsBuyList": {
"description": "List of credits requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"creditsSellList": {
"description": "List of credits company willing to sell",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"lastEvent": {
"description": "function and string parameter that created this state object",
"properties": {
"args": {
"items": {
"description": "parameters to the function, usually args[0] is populated with a JSON encoded event object",
"type": "string"
},
"type": "array"
},
"function": {
"description": "function that created this state object",
"type": "string"
},
"redirectedFromFunction": {
"description": "function that originally received the event",
"type": "string"
}
},
"type": "object"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"priceBuyList": {
"description": "List of price requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"priceSellList": {
"description": "List of price for every credit put on sell by a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorWeatherHistory": {
"description": "sensorReading means history of all the carbon readings from the sensor, timestamp refers to time it was recorded and all the other fields refers to weather data",
"properties": {
"iconUrl": {
"type": "string"
},
"precipitation": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"sensorReading": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempCelsius": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempFahrenheit": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windDegrees": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windGustSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeHistory": {
"description": "Sold means how many credits were traded and Price refers for how much per credit. Company means institution trade was made to. Timestamp means what time trade occured. BuySell attribute is to indicate if it was a buy or sell ",
"properties": {
"buysell": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"company": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"credits": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"price": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"txntimestamp": {
"description": "Transaction timestamp matching that in the blockchain.",
"type": "string"
},
"txnuuid": {
"description": "Transaction UUID matching that in the blockchain.",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"type": "object"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"readRecentStates": {
"description": "Returns the state of recently updated assets as an array of objects sorted with the most recently updated asset first. Each asset appears exactly once up to a maxmum of 20 in this version of the contract.",
"properties": {
"args": {
"description": "accepts no arguments",
"items": {},
"maxItems": 0,
"minItems": 0,
"type": "array"
},
"function": {
"description": "readRecentStates function",
"enum": [
"readRecentStates"
],
"type": "string"
},
"method": "query",
"result": {
"description": "an array of states for one asset sorted by timestamp with the most recent entry first",
"items": {
"description": "A set of properties that constitute a complete asset state. Includes event properties and any other calculated properties such as compliance related alerts.",
"properties": {
"alerts": {
"description": "Active means that the alert is in force in this state. Raised means that the alert became active as the result of the event that generated this state. Cleared means that the alert became inactive as the result of the event that generated this state.",
"properties": {
"active": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"cleared": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"raised": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"compliant": {
"description": "A contract-specific indication that this asset is compliant.",
"type": "boolean"
},
"contactInformation": {
"description": "",
"properties": {
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
}
},
"type": "object"
},
"creditsBuyList": {
"description": "List of credits requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"creditsSellList": {
"description": "List of credits company willing to sell",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"lastEvent": {
"description": "function and string parameter that created this state object",
"properties": {
"args": {
"items": {
"description": "parameters to the function, usually args[0] is populated with a JSON encoded event object",
"type": "string"
},
"type": "array"
},
"function": {
"description": "function that created this state object",
"type": "string"
},
"redirectedFromFunction": {
"description": "function that originally received the event",
"type": "string"
}
},
"type": "object"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"priceBuyList": {
"description": "List of price requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"priceSellList": {
"description": "List of price for every credit put on sell by a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorWeatherHistory": {
"description": "sensorReading means history of all the carbon readings from the sensor, timestamp refers to time it was recorded and all the other fields refers to weather data",
"properties": {
"iconUrl": {
"type": "string"
},
"precipitation": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"sensorReading": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempCelsius": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempFahrenheit": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windDegrees": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windGustSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeHistory": {
"description": "Sold means how many credits were traded and Price refers for how much per credit. Company means institution trade was made to. Timestamp means what time trade occured. BuySell attribute is to indicate if it was a buy or sell ",
"properties": {
"buysell": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"company": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"credits": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"price": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"txntimestamp": {
"description": "Transaction timestamp matching that in the blockchain.",
"type": "string"
},
"txnuuid": {
"description": "Transaction UUID matching that in the blockchain.",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"type": "object"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"setCreateOnUpdate": {
"description": "Allow updateAsset to redirect to createAsset when assetID does not exist.",
"properties": {
"args": {
"description": "True for redirect allowed, false for error on asset does not exist.",
"items": {
"setCreateOnUpdate": {
"type": "boolean"
}
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "setCreateOnUpdate function",
"enum": [
"setCreateOnUpdate"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"setLoggingLevel": {
"description": "Sets the logging level in the contract.",
"properties": {
"args": {
"description": "logging levels indicate what you see",
"items": {
"logLevel": {
"enum": [
"CRITICAL",
"ERROR",
"WARNING",
"NOTICE",
"INFO",
"DEBUG"
],
"type": "string"
}
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "setLoggingLevel function",
"enum": [
"setLoggingLevel"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
},
"updateAsset": {
"description": "Update the state of an asset. The one argument is a JSON encoded event. AssetID is required along with one or more writable properties. Establishes the next asset state. ",
"properties": {
"args": {
"description": "args are JSON encoded strings",
"items": {
"description": "The set of writable properties that define an asset's state. For asset creation, the only mandatory property is the 'assetID'. Updates should include at least one other writable property. This exemplifies the IoT contract pattern 'partial state as event'.",
"properties": {
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
},
"maxItems": 1,
"minItems": 1,
"type": "array"
},
"function": {
"description": "updateAsset function",
"enum": [
"updateAsset"
],
"type": "string"
},
"method": "invoke"
},
"type": "object"
}
},
"objectModelSchemas": {
"assetIDKey": {
"description": "An object containing only an assetID for use as an argument to read or delete.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
}
},
"type": "object"
},
"assetIDandCount": {
"description": "Requested assetID with item count.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"count": {
"type": "integer"
}
},
"required": [
"assetID"
],
"type": "object"
},
"event": {
"description": "The set of writable properties that define an asset's state. For asset creation, the only mandatory property is the 'assetID'. Updates should include at least one other writable property. This exemplifies the IoT contract pattern 'partial state as event'.",
"properties": {
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
},
"initEvent": {
"description": "event sent to init on deployment",
"properties": {
"nickname": {
"default": "TRADELANE",
"description": "The nickname of the current contract",
"type": "string"
},
"version": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
}
},
"required": [
"version"
],
"type": "object"
},
"state": {
"description": "A set of properties that constitute a complete asset state. Includes event properties and any other calculated properties such as compliance related alerts.",
"properties": {
"alerts": {
"description": "Active means that the alert is in force in this state. Raised means that the alert became active as the result of the event that generated this state. Cleared means that the alert became inactive as the result of the event that generated this state.",
"properties": {
"active": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"cleared": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
},
"raised": {
"items": {
"description": "Alerts are triggered or cleared by rules that are run against incoming events. This contract considers any active alert to created a state of non-compliance.",
"enum": [
"OVERCARBONEMISSION"
],
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"allottedCredits": {
"description": "defines how much a company can spend",
"type": "number"
},
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"boughtCredits": {
"description": "Total number of credits bought from other companies",
"type": "number"
},
"compliant": {
"description": "A contract-specific indication that this asset is compliant.",
"type": "boolean"
},
"contactInformation": {
"description": "",
"properties": {
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
}
},
"type": "object"
},
"creditsBuyList": {
"description": "List of credits requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"creditsForSale": {
"description": "Total credits which are going to be put on sale by a company",
"type": "number"
},
"creditsRequestBuy": {
"description": "Total credits requested to buy from the market",
"type": "number"
},
"creditsSellList": {
"description": "List of credits company willing to sell",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"email": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"extension": {
"description": "Application-managed state. Opaque to contract.",
"properties": {},
"type": "object"
},
"iconUrl": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"lastEvent": {
"description": "function and string parameter that created this state object",
"properties": {
"args": {
"items": {
"description": "parameters to the function, usually args[0] is populated with a JSON encoded event object",
"type": "string"
},
"type": "array"
},
"function": {
"description": "function that created this state object",
"type": "string"
},
"redirectedFromFunction": {
"description": "function that originally received the event",
"type": "string"
}
},
"type": "object"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"notificationRead": {
"description": "Value will be true if company saw their weather notification, false otherwise",
"type": "boolean"
},
"phoneNum": {
"description": "Contact information of the company will be stored here",
"type": "string"
},
"precipitation": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"priceBuyList": {
"description": "List of price requested to buy from a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"pricePerCredit": {
"description": "Price set for every credit that is put on sale",
"type": "number"
},
"priceRequestBuy": {
"description": "Price put per credit requested to buy from the market",
"type": "number"
},
"priceSellList": {
"description": "List of price for every credit put on sell by a company",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"reading": {
"description": "defines one reading for a sensor",
"type": "number"
},
"sensorID": {
"description": "defines one sensor in a company",
"type": "number"
},
"sensorWeatherHistory": {
"description": "sensorReading means history of all the carbon readings from the sensor, timestamp refers to time it was recorded and all the other fields refers to weather data",
"properties": {
"iconUrl": {
"type": "string"
},
"precipitation": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"sensorReading": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempCelsius": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"tempFahrenheit": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windDegrees": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windGustSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"windSpeed": {
"items": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"sensorlocation": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"soldCredits": {
"description": "Total number of credits sold to other companies",
"type": "number"
},
"temperatureCelsius": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"temperatureFahrenheit": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"threshold": {
"description": "limit on credit consumption before it alerts",
"type": "number"
},
"timestamp": {
"description": "RFC3339nanos formatted timestamp.",
"type": "string"
},
"tradeBuySell": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCompany": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeCredits": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeHistory": {
"description": "Sold means how many credits were traded and Price refers for how much per credit. Company means institution trade was made to. Timestamp means what time trade occured. BuySell attribute is to indicate if it was a buy or sell ",
"properties": {
"buysell": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"company": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"credits": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"price": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"items": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"minItems": 0,
"type": "array"
}
},
"type": "object"
},
"tradePrice": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"tradeTimestamp": {
"description": "Trade values are triggered for every trade which is processed. This contract stores every trade which will be made between two companies",
"type": "string"
},
"txntimestamp": {
"description": "Transaction timestamp matching that in the blockchain.",
"type": "string"
},
"txnuuid": {
"description": "Transaction UUID matching that in the blockchain.",
"type": "string"
},
"updateBuyCredits": {
"description": "Credits that need to be updated in buy list",
"type": "number"
},
"updateBuyIndex": {
"description": "Index of the buy list array that needs to be updated",
"type": "number"
},
"updateSellCredits": {
"description": "Credits that need to be updated in sell list",
"type": "number"
},
"updateSellIndex": {
"description": "Index of the sell list array that needs to be updated",
"type": "number"
},
"windDegrees": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windGustSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
},
"windSpeed": {
"description": "Sensor and weather value will be stored in a string. So sensorWeatherData object could refer to this definition to store its value",
"type": "string"
}
},
"type": "object"
}
}
}` | contracts/industry/carbon_trading/schemas.go | 0.802633 | 0.700261 | schemas.go | starcoder |
package main
import (
"fmt"
"strings"
)
/**
--- Day 6: Custom Customs ---
As your flight approaches the regional airport where you'll switch to a much larger plane, customs declaration forms are distributed to the passengers.
The form asks a series of 26 yes-or-no questions marked a through z. All you need to do is identify the questions for which anyone in your group answers "yes". Since your group is just you, this doesn't take very long.
However, the person sitting next to you seems to be experiencing a language barrier and asks if you can help. For each of the people in their group, you write down the questions for which they answer "yes", one per line. For example:
abcx
abcy
abcz
In this group, there are 6 questions to which anyone answered "yes": a, b, c, x, y, and z. (Duplicate answers to the same question don't count extra; each question counts at most once.)
Another group asks for your help, then another, and eventually you've collected answers from every group on the plane (your puzzle input). Each group's answers are separated by a blank line, and within each group, each person's answers are on a single line. For example:
abc
a
b
c
ab
ac
a
a
a
a
b
This list represents answers from five groups:
The first group contains one person who answered "yes" to 3 questions: a, b, and c.
The second group contains three people; combined, they answered "yes" to 3 questions: a, b, and c.
The third group contains two people; combined, they answered "yes" to 3 questions: a, b, and c.
The fourth group contains four people; combined, they answered "yes" to only 1 question, a.
The last group contains one person who answered "yes" to only 1 question, b.
In this example, the sum of these counts is 3 + 3 + 3 + 1 + 1 = 11.
For each group, count the number of questions to which anyone answered "yes". What is the sum of those counts?
Your puzzle answer was 6351.
**/
func day6_part1() {
contents := getFilesContents("day06.input")
groups := strings.Split(contents, "\n\n")
var total uint = 0
for _, grp := range groups {
arr := strings.Split(grp, "\n")
joined := strings.Join(arr, "")
runes := []rune(joined)
letters := make([]uint, 97+26) // a = ascii 97, 26 alphabet letters
for _, l := range runes {
letters[int(l)] = 1
}
var count uint = 0
for _, v := range letters {
count += v
}
total += count
fmt.Println(joined, letters[97:], count)
}
fmt.Println("total unique letters of the alphabet, or 'yes' answers by groups", total)
}
/**
--- Part Two ---
As you finish the last group's customs declaration, you notice that you misread one word in the instructions:
You don't need to identify the questions to which anyone answered "yes"; you need to identify the questions to which everyone answered "yes"!
Using the same example as above:
abc
a
b
c
ab
ac
a
a
a
a
b
This list represents answers from five groups:
In the first group, everyone (all 1 person) answered "yes" to 3 questions: a, b, and c.
In the second group, there is no question to which everyone answered "yes".
In the third group, everyone answered yes to only 1 question, a. Since some people did not answer "yes" to b or c, they don't count.
In the fourth group, everyone answered yes to only 1 question, a.
In the fifth group, everyone (all 1 person) answered "yes" to 1 question, b.
In this example, the sum of these counts is 3 + 0 + 1 + 1 + 1 = 6.
For each group, count the number of questions to which everyone answered "yes". What is the sum of those counts?
Your puzzle answer was 3143.
Both parts of this puzzle are complete! They provide two gold stars: **
At this point, you should return to your Advent calendar and try another puzzle.
*/
func day6_part2() {
contents := getFilesContents("day06.input")
groups := strings.Split(contents, "\n\n")
var total uint = 0
for _, grp := range groups {
persons := strings.Split(grp, "\n")
letters := make([]uint, 97+26) // a = ascii 97, 26 alphabet letters
mustCount := len(persons) // e.g. that group has 3 persons, we must count to 3
for _, person := range persons {
runes := []rune(person) // one persons's answers
for _, l := range runes {
letters[int(l)] += 1
}
}
var count uint = 0
for _, v := range letters {
if v == uint(mustCount) {
count += 1
}
}
fmt.Println(letters[97:], mustCount, count)
total += count
}
fmt.Println("total unique letters of the alphabet, or 'yes' EVERYONED answered in groups", total)
} | day06.go | 0.671686 | 0.583381 | day06.go | starcoder |
package tile3d
import (
"math"
"github.com/flywave/go3d/vec3"
)
const rangeScale16 = 0xffff
const rangeScale8 = 0xff
func computeScale(extent float32, rangeScale uint16) float32 {
if extent == 0 {
return 1
}
return extent
}
func isInRange(qpos uint16, rangeScale uint16) bool {
return qpos >= 0 && qpos < rangeScale+1
}
func Quantize(pos float64, origin float32, scale float32, rangeScale uint16) uint16 {
return uint16(math.Floor(math.Max(0.0, math.Min(float64(rangeScale), (pos-float64(origin))*float64(float32(rangeScale)/scale)))))
}
func IsQuantizable(pos float64, origin float32, scale float32, rangeScale uint16) bool {
return isInRange(Quantize(pos, origin, scale, rangeScale), rangeScale16)
}
func UnQuantize(qpos uint16, origin float32, scale float32, rangeScale uint16) float64 {
return float64(origin) + float64(qpos)/float64(scale*float32(rangeScale))
}
func IsQuantized(qpos uint16) bool {
return isInRange(qpos, rangeScale16) && qpos == uint16(math.Floor(float64(qpos)))
}
type QParams3d struct {
Origin [3]float32
Scale [3]float32
}
func (p *QParams3d) SetFromRange(range_ *vec3.Box, rangeScale uint16) {
p.Origin[0] = range_.Min[0]
p.Origin[1] = range_.Min[1]
p.Origin[2] = range_.Min[2]
p.Scale[0] = computeScale(range_.Max[0]-range_.Min[0], rangeScale)
p.Scale[1] = computeScale(range_.Max[1]-range_.Min[1], rangeScale)
p.Scale[2] = computeScale(range_.Max[2]-range_.Min[2], rangeScale)
}
func (p *QParams3d) rangeDiagonal() [3]float32 {
var x float32
var y float32
var z float32
if p.Scale[0] == 0 {
x = 0
} else {
x = rangeScale16 / p.Scale[0]
}
if p.Scale[1] == 0 {
y = 0
} else {
y = rangeScale16 / p.Scale[1]
}
if p.Scale[2] == 0 {
z = 0
} else {
z = rangeScale16 / p.Scale[2]
}
return [3]float32{x, y, z}
}
func QuantizePoint3d(pos [3]float64, params *QParams3d) [3]uint16 {
var out [3]uint16
out[0] = Quantize(pos[0], params.Origin[0], params.Scale[0], rangeScale16)
out[1] = Quantize(pos[1], params.Origin[1], params.Scale[1], rangeScale16)
out[2] = Quantize(pos[2], params.Origin[2], params.Scale[2], rangeScale16)
return out
}
func UnQuantizePoint3d(qpos [3]uint16, params *QParams3d) [3]float64 {
var out [3]float64
out[0] = UnQuantize(qpos[0], params.Origin[0], params.Scale[0], rangeScale16)
out[1] = UnQuantize(qpos[1], params.Origin[1], params.Scale[1], rangeScale16)
out[2] = UnQuantize(qpos[2], params.Origin[2], params.Scale[2], rangeScale16)
return out
} | quantization.go | 0.681091 | 0.627923 | quantization.go | starcoder |
package ent
import (
"fmt"
"strings"
"time"
"entgo.io/ent/dialect/sql"
"github.com/open-farms/inventory/ent/location"
"github.com/open-farms/inventory/ent/vehicle"
)
// Vehicle is the model entity for the Vehicle schema.
type Vehicle struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// CreateTime holds the value of the "create_time" field.
CreateTime time.Time `json:"create_time,omitempty"`
// UpdateTime holds the value of the "update_time" field.
UpdateTime time.Time `json:"update_time,omitempty"`
// Make holds the value of the "make" field.
Make string `json:"make,omitempty"`
// Model holds the value of the "model" field.
Model string `json:"model,omitempty"`
// Hours holds the value of the "hours" field.
Hours int64 `json:"hours,omitempty"`
// Year holds the value of the "year" field.
Year int64 `json:"year,omitempty"`
// Active holds the value of the "active" field.
Active bool `json:"active,omitempty"`
// Power holds the value of the "power" field.
Power string `json:"power,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the VehicleQuery when eager-loading is set.
Edges VehicleEdges `json:"edges"`
category_vehicle *int
location_vehicle *int
}
// VehicleEdges holds the relations/edges for other nodes in the graph.
type VehicleEdges struct {
// Location holds the value of the location edge.
Location *Location `json:"location,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// LocationOrErr returns the Location value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e VehicleEdges) LocationOrErr() (*Location, error) {
if e.loadedTypes[0] {
if e.Location == nil {
// The edge location was loaded in eager-loading,
// but was not found.
return nil, &NotFoundError{label: location.Label}
}
return e.Location, nil
}
return nil, &NotLoadedError{edge: "location"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Vehicle) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case vehicle.FieldActive:
values[i] = new(sql.NullBool)
case vehicle.FieldID, vehicle.FieldHours, vehicle.FieldYear:
values[i] = new(sql.NullInt64)
case vehicle.FieldMake, vehicle.FieldModel, vehicle.FieldPower:
values[i] = new(sql.NullString)
case vehicle.FieldCreateTime, vehicle.FieldUpdateTime:
values[i] = new(sql.NullTime)
case vehicle.ForeignKeys[0]: // category_vehicle
values[i] = new(sql.NullInt64)
case vehicle.ForeignKeys[1]: // location_vehicle
values[i] = new(sql.NullInt64)
default:
return nil, fmt.Errorf("unexpected column %q for type Vehicle", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Vehicle fields.
func (v *Vehicle) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case vehicle.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
v.ID = int(value.Int64)
case vehicle.FieldCreateTime:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field create_time", values[i])
} else if value.Valid {
v.CreateTime = value.Time
}
case vehicle.FieldUpdateTime:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field update_time", values[i])
} else if value.Valid {
v.UpdateTime = value.Time
}
case vehicle.FieldMake:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field make", values[i])
} else if value.Valid {
v.Make = value.String
}
case vehicle.FieldModel:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field model", values[i])
} else if value.Valid {
v.Model = value.String
}
case vehicle.FieldHours:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field hours", values[i])
} else if value.Valid {
v.Hours = value.Int64
}
case vehicle.FieldYear:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field year", values[i])
} else if value.Valid {
v.Year = value.Int64
}
case vehicle.FieldActive:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field active", values[i])
} else if value.Valid {
v.Active = value.Bool
}
case vehicle.FieldPower:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field power", values[i])
} else if value.Valid {
v.Power = value.String
}
case vehicle.ForeignKeys[0]:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for edge-field category_vehicle", value)
} else if value.Valid {
v.category_vehicle = new(int)
*v.category_vehicle = int(value.Int64)
}
case vehicle.ForeignKeys[1]:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for edge-field location_vehicle", value)
} else if value.Valid {
v.location_vehicle = new(int)
*v.location_vehicle = int(value.Int64)
}
}
}
return nil
}
// QueryLocation queries the "location" edge of the Vehicle entity.
func (v *Vehicle) QueryLocation() *LocationQuery {
return (&VehicleClient{config: v.config}).QueryLocation(v)
}
// Update returns a builder for updating this Vehicle.
// Note that you need to call Vehicle.Unwrap() before calling this method if this Vehicle
// was returned from a transaction, and the transaction was committed or rolled back.
func (v *Vehicle) Update() *VehicleUpdateOne {
return (&VehicleClient{config: v.config}).UpdateOne(v)
}
// Unwrap unwraps the Vehicle entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (v *Vehicle) Unwrap() *Vehicle {
tx, ok := v.config.driver.(*txDriver)
if !ok {
panic("ent: Vehicle is not a transactional entity")
}
v.config.driver = tx.drv
return v
}
// String implements the fmt.Stringer.
func (v *Vehicle) String() string {
var builder strings.Builder
builder.WriteString("Vehicle(")
builder.WriteString(fmt.Sprintf("id=%v", v.ID))
builder.WriteString(", create_time=")
builder.WriteString(v.CreateTime.Format(time.ANSIC))
builder.WriteString(", update_time=")
builder.WriteString(v.UpdateTime.Format(time.ANSIC))
builder.WriteString(", make=")
builder.WriteString(v.Make)
builder.WriteString(", model=")
builder.WriteString(v.Model)
builder.WriteString(", hours=")
builder.WriteString(fmt.Sprintf("%v", v.Hours))
builder.WriteString(", year=")
builder.WriteString(fmt.Sprintf("%v", v.Year))
builder.WriteString(", active=")
builder.WriteString(fmt.Sprintf("%v", v.Active))
builder.WriteString(", power=")
builder.WriteString(v.Power)
builder.WriteByte(')')
return builder.String()
}
// Vehicles is a parsable slice of Vehicle.
type Vehicles []*Vehicle
func (v Vehicles) config(cfg config) {
for _i := range v {
v[_i].config = cfg
}
} | ent/vehicle.go | 0.663996 | 0.437163 | vehicle.go | starcoder |
package medtronic
import (
"log"
"time"
)
// CarbRatio represents an entry in a carb ratio schedule.
type CarbRatio struct {
Start TimeOfDay
Ratio Ratio
Units CarbUnitsType
}
// Newer pumps store carb ratios as 10x grams/unit or 1000x units/exchange.
// Older pumps store carb ratios as grams/unit or 10x units/exchange.
// Ratio represents a carb ratio using the higher resolution:
// 10x grams/unit or 1000x units/exchange.
type Ratio int
func intToRatio(n int, u CarbUnitsType, family Family) Ratio {
if family <= 22 {
// Convert to higher-resolution representation.
switch u {
case Grams:
return Ratio(10 * n)
case Exchanges:
return Ratio(100 * n)
default:
log.Panicf("unknown carb unit %d", u)
}
}
// Use representation as-is.
return Ratio(n)
}
// CarbRatioSchedule represents a carb ratio schedule.
type CarbRatioSchedule []CarbRatio
func carbRatioStep(family Family) int {
if family <= 22 {
return 2
}
return 3
}
func decodeCarbRatioSchedule(data []byte, units CarbUnitsType, family Family) CarbRatioSchedule {
var sched []CarbRatio
step := carbRatioStep(family)
for i := 0; i <= len(data)-step; i += step {
start := halfHoursToTimeOfDay(data[i])
if start == 0 && len(sched) != 0 {
break
}
var value int
if family <= 22 {
value = int(data[i+1])
} else {
value = twoByteInt(data[i+1 : i+3])
}
sched = append(sched, CarbRatio{
Start: start,
Ratio: intToRatio(value, units, family),
Units: units,
})
}
return sched
}
// CarbRatios returns the pump's carb ratio schedule..
func (pump *Pump) CarbRatios() CarbRatioSchedule {
data := pump.Execute(carbRatios)
if pump.Error() != nil {
return CarbRatioSchedule{}
}
if len(data) < 2 {
pump.BadResponse(carbRatios, data)
return CarbRatioSchedule{}
}
// Format of response depends on the pump family.
family := pump.Family()
n := int(data[0]) - 1
step := carbRatioStep(family)
if n%step != 0 {
pump.BadResponse(carbRatios, data)
return CarbRatioSchedule{}
}
units := CarbUnitsType(data[1])
return decodeCarbRatioSchedule(data[step:step+n], units, family)
}
// CarbRatioAt returns the carb ratio in effect at the given time.
func (s CarbRatioSchedule) CarbRatioAt(t time.Time) CarbRatio {
d := SinceMidnight(t)
last := CarbRatio{}
for _, v := range s {
if v.Start > d {
break
}
last = v
}
return last
} | carbratios.go | 0.753285 | 0.493409 | carbratios.go | starcoder |
// Package day12 solves AoC 2018 day 12.
package day12
import (
"fmt"
"strings"
"github.com/fis/aoc/glue"
"github.com/fis/aoc/util"
)
func init() {
glue.RegisterSolver(2018, 12, glue.ChunkSolver(solve))
}
const initialPrefix = "initial state: "
func solve(chunks []string) ([]string, error) {
if len(chunks) != 2 {
return nil, fmt.Errorf("expected 2 chunks, got %d", len(chunks))
}
if !strings.HasPrefix(chunks[0], initialPrefix) {
return nil, fmt.Errorf("invalid header: %s", chunks[0])
}
state := parseState(strings.TrimPrefix(chunks[0], initialPrefix))
rules := parseRules(util.Lines(chunks[1]))
state.evolve(20, rules)
part1 := state.checksum()
shift := state.findFixed(rules)
state.offset += (50000000000 - state.gen) * shift
part2 := state.checksum()
return glue.Ints(part1, part2), nil
}
type stateVector struct {
data []byte
offset int
gen int
}
func parseState(text string) stateVector {
return stateVector{
data: asBits("...." + text + "...."),
offset: -4,
}
}
func (s *stateVector) evolve(generations int, r *ruleSet) {
var next []byte
for g := 0; g < generations; g++ {
var disp int
next, disp = r.step(s.data, next)
s.data, s.offset, s.gen, next = next, s.offset+disp, s.gen+1, s.data
}
}
func (s *stateVector) findFixed(r *ruleSet) (disp int) {
var next []byte
for {
next, disp = r.step(s.data, next)
if equalBytes(s.data, next) {
return disp
}
s.data, s.offset, s.gen, next = next, s.offset+disp, s.gen+1, s.data
}
}
func (s stateVector) checksum() (sum int) {
for i, v := range s.data {
if v != 0 {
sum += s.offset + i
}
}
return sum
}
type ruleSet [32]byte
func parseRules(lines []string) *ruleSet {
r := new(ruleSet)
for _, line := range lines {
if len(line) != 10 || line[5:9] != " => " {
continue
}
r[idx(asBits(line[0:5]))] = asBit(line[9])
}
return r
}
func (r *ruleSet) lookup(d []byte) byte {
return r[idx(d)]
}
func (r *ruleSet) step(in, out []byte) ([]byte, int) {
out = append(out[:0], 0, 0, 0, 0)
disp := -2
for x := 0; x+5 <= len(in); x++ {
b := r.lookup(in[x : x+5])
if len(out) == 4 && b == 0 {
disp++
} else {
out = append(out, b)
}
}
for out[len(out)-4] != 0 || out[len(out)-3] != 0 || out[len(out)-2] != 0 || out[len(out)-1] != 0 {
out = append(out, 0)
}
return out, disp
}
func idx(d []byte) int {
return int((d[0] << 4) | (d[1] << 3) | (d[2] << 2) | (d[3] << 1) | d[4])
}
func asBit(c byte) byte {
if c == '#' {
return 1
}
return 0
}
func asBits(s string) (out []byte) {
for _, c := range []byte(s) {
out = append(out, asBit(c))
}
return out
}
func equalBytes(a, b []byte) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
} | 2018/day12/day12.go | 0.558809 | 0.415551 | day12.go | starcoder |
// Deep equality test via reflection
package deepequalexplained
import (
"fmt"
"math"
"reflect"
"unsafe"
)
type visit struct {
a1 unsafe.Pointer
a2 unsafe.Pointer
typ reflect.Type
}
func deepValueEqual(v1, v2 reflect.Value, visited map[visit]bool, depth int) error {
if !v1.IsValid() || !v2.IsValid() {
if v1.IsValid() == v2.IsValid() {
return nil
} else if !v1.IsValid() {
return fmt.Errorf(" in x is invalid but in y is not")
} else {
return fmt.Errorf(" in y is invalid but in x is not")
}
}
if v1.Type() != v2.Type() {
return fmt.Errorf(" has different types, where in x is %v but in y is %v", v1.Type().Name(), v2.Type().Name())
}
hard := func(k reflect.Kind) bool {
switch k {
case reflect.Array, reflect.Map, reflect.Slice, reflect.Struct:
return true
}
return false
}
if v1.CanAddr() && v2.CanAddr() && hard(v1.Kind()) {
addr1 := unsafe.Pointer(v1.UnsafeAddr())
addr2 := unsafe.Pointer(v2.UnsafeAddr())
if uintptr(addr1) > uintptr(addr2) {
// Canonicalize order to reduce number of entries in visited.
// Assumes non-moving garbage collector.
addr1, addr2 = addr2, addr1
}
// Short circuit if references are already seen.
typ := v1.Type()
v := visit{addr1, addr2, typ}
if visited[v] {
return nil
}
// Remember for later.
visited[v] = true
}
switch v1.Kind() {
case reflect.Array:
for i := 0; i < v1.Len(); i++ {
if err := deepValueEqual(v1.Index(i), v2.Index(i), visited, depth+1); err != nil {
return fmt.Errorf("[%d]%s", i, err.Error())
}
}
return nil
case reflect.Slice:
if v1.IsNil() != v2.IsNil() {
if v1.IsNil() {
return fmt.Errorf(" in x is nil but in y is not")
} else {
return fmt.Errorf(" in y is nil but in x is not")
}
}
if v1.Len() != v2.Len() {
return fmt.Errorf(" do not have the same length")
}
if v1.Pointer() == v2.Pointer() {
return nil
}
for i := 0; i < v1.Len(); i++ {
if err := deepValueEqual(v1.Index(i), v2.Index(i), visited, depth+1); err != nil {
return fmt.Errorf("[%d]%s", i, err.Error())
}
}
return nil
case reflect.Interface:
if v1.IsNil() || v2.IsNil() {
if v1.IsNil() == v2.IsNil() {
return nil
} else {
return fmt.Errorf(" do not have the same interface")
}
}
if err := deepValueEqual(v1.Elem(), v2.Elem(), visited, depth+1); err != nil {
return fmt.Errorf("(Interface)%s", err.Error())
}
return nil
case reflect.Ptr:
if v1.Pointer() == v2.Pointer() {
return nil
}
if err := deepValueEqual(v1.Elem(), v2.Elem(), visited, depth+1); err != nil {
return fmt.Errorf("(Ptr)%s", err.Error())
}
return nil
case reflect.Struct:
for i, n := 0, v1.NumField(); i < n; i++ {
if err := deepValueEqual(v1.Field(i), v2.Field(i), visited, depth+1); err != nil {
return fmt.Errorf(".%s%s", v1.Type().Field(i).Name, err.Error())
}
}
return nil
case reflect.Map:
if v1.IsNil() != v2.IsNil() {
if v1.IsNil() {
return fmt.Errorf(" are not equal, where in x is nil but in y is not")
} else {
return fmt.Errorf(" are not equal, where in y is nil but in x is not")
}
}
if v1.Len() != v2.Len() {
return fmt.Errorf(" do not have the same length, where in x is %d but in y is %d", v1.Len(), v2.Len())
}
if v1.Pointer() == v2.Pointer() {
return nil
}
for _, k := range v1.MapKeys() {
val1 := v1.MapIndex(k)
val2 := v2.MapIndex(k)
if !val1.IsValid() {
return fmt.Errorf("[%v] is invalid in x", k)
} else if !val2.IsValid() {
return fmt.Errorf("[%v] is invalid in y", k)
} else if err := deepValueEqual(v1.MapIndex(k), v2.MapIndex(k), visited, depth+1); err != nil {
return fmt.Errorf("[%v]%s", k, err.Error())
}
}
return nil
case reflect.Func:
if v1.IsNil() && v2.IsNil() {
return nil
}
// Can't do better than this:
return fmt.Errorf(" has different func")
default:
// Trying to compare between two values
if v1.Kind() == reflect.Float64 && math.IsNaN(v1.Float()) {
return fmt.Errorf(" in x is NaN float")
} else if v2.Kind() == reflect.Float64 && math.IsNaN(v2.Float()) {
return fmt.Errorf(" in y is NaN float")
} else if fmt.Sprintf("%T", v1) != fmt.Sprintf("%T", v2) {
return fmt.Errorf(" have different types, where in x is %T but in y is %T", v1, v2)
} else if fmt.Sprintf("%v", v1) != fmt.Sprintf("%v", v2) {
return fmt.Errorf(" are not equal, where in x is %v but in y is %v", v1, v2)
}
return nil
}
}
func DeepEqualExplained(x, y interface{}) error {
if x == nil || y == nil {
if x == y {
return nil
} else if x == nil {
return fmt.Errorf("x is nil while y is not")
} else {
return fmt.Errorf("y is nil while x is not")
}
}
v1 := reflect.ValueOf(x)
v2 := reflect.ValueOf(y)
if v1.Type() != v2.Type() {
return fmt.Errorf("values have different types, where in x is %v but in y is %v", v1.Type().String(), v2.Type().String())
}
if err := deepValueEqual(v1, v2, make(map[visit]bool), 0); err != nil {
return fmt.Errorf("values%s", err.Error())
} else {
return nil
}
} | deepequalexplained.go | 0.501953 | 0.461684 | deepequalexplained.go | starcoder |
package graphic
import (
"github.com/veandco/go-sdl2/img"
"github.com/veandco/go-sdl2/sdl"
)
//Instance position angle and the center of an instance of a sprite
type Instance struct {
destRect sdl.FRect
angle float64
center sdl.FPoint
parentSprite *Sprite
}
//Sprite contains the texture a list of instances and a srcRect
type Sprite struct {
texture *sdl.Texture
instances List
srcRect sdl.Rect
}
//NewSprite creates a sprite based on a renderer, the image path and a src rectangle
func NewSprite(renderer *sdl.Renderer, imgPath string, srcRect sdl.Rect) (Sprite, error) {
var sprite Sprite
var err error
sprite.texture, err = img.LoadTexture(renderer, imgPath)
if err != nil {
return sprite, err
}
sprite.srcRect = srcRect
return sprite, err
}
//NewInstance adds a instance to the sprite and initializes the width and height of the dest rectangle with the src rectangle
func (sprite *Sprite) NewInstance(angle float64, center sdl.FPoint) *Instance {
var instance Instance
instance.NewPosition(center)
instance.destRect.W = (float32)(sprite.srcRect.W)
instance.destRect.H = (float32)(sprite.srcRect.H)
instance.angle = angle
instance.parentSprite = sprite
sprite.instances.Push(&instance)
return &instance
}
//NewPosition sets the position of this instance center is the center of the instances new position
func (instance *Instance) NewPosition(center sdl.FPoint) {
instance.center = sdl.FPoint{instance.destRect.W / 2, instance.destRect.H / 2}
instance.destRect.X = center.X - (instance.destRect.W / 2)
instance.destRect.Y = center.Y - (instance.destRect.H / 2)
}
//NewPositionCorner sets the top left corner of the instance to corner
func (instance *Instance) NewPositionCorner(corner sdl.FPoint) {
instance.center = sdl.FPoint{instance.destRect.W / 2, instance.destRect.H / 2}
instance.destRect.X = corner.X
instance.destRect.Y = corner.Y
}
//SetAngle setter for instance.angle
func (instance *Instance) SetAngle(angle float64) {
instance.angle = angle
}
//ShowInstance makes instance visible
func (sprite *Sprite) ShowInstance(instance *Instance) {
instance.destRect.W, instance.destRect.H = (float32)(sprite.srcRect.W), (float32)(sprite.srcRect.H)
}
//Hide makes the instance invisible until Show
func (instance *Instance) Hide() {
instance.destRect.W, instance.destRect.H = 0, 0
}
//Show makes the instance visible
func (instance *Instance) Show() {
instance.parentSprite.ShowInstance(instance)
}
//Zoom changes the texture size by textureSize*multiplier does not work if the object is hidden could have weird artifacts at small texture sizes
func (instance *Instance) Zoom(multiplier float32) {
instance.destRect.W, instance.destRect.H = multiplier*instance.destRect.W, multiplier*instance.destRect.H
instance.NewPosition(sdl.FPoint{instance.center.X + instance.destRect.X, instance.center.Y + instance.destRect.Y})
}
//IsZoomed returns if an instance is zommed in
func (instance *Instance) IsZoomed() bool {
return (instance.destRect.W != (float32)(instance.parentSprite.srcRect.W) || instance.destRect.H != (float32)(instance.parentSprite.srcRect.H)) && (!instance.IsHidden())
}
//ResetZoom dRect.W, dRect.H is reset to default values
func (instance *Instance) ResetZoom() {
if instance.IsZoomed() {
instance.destRect.W, instance.destRect.H = (float32)(instance.parentSprite.srcRect.W), (float32)(instance.parentSprite.srcRect.H)
}
}
//IsHidden returns if an instance is hidden
func (instance *Instance) IsHidden() bool {
return instance.destRect.W == 0 || instance.destRect.H == 0
}
//GetBaseWitdth returns the width of an unzommed instance
func (instance Instance) GetBaseWitdth() float32 {
return (float32)(instance.parentSprite.srcRect.W)
}
//GetBaseHeight returns the height of an unzommed instance
func (instance Instance) GetBaseHeight() float32 {
return (float32)(instance.parentSprite.srcRect.H)
} | src/lib/graphic/sprite.go | 0.824921 | 0.550607 | sprite.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// AttributeMapping
type AttributeMapping struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// Default value to be used in case the source property was evaluated to null. Optional.
defaultValue *string
// For internal use only.
exportMissingReferences *bool
// Defines when this attribute should be exported to the target directory. Possible values are: FlowWhenChanged and FlowAlways. Default is FlowWhenChanged.
flowBehavior *AttributeFlowBehavior
// Defines when this attribute should be updated in the target directory. Possible values are: Always (default), ObjectAddOnly (only when new object is created), MultiValueAddOnly (only when the change is adding new values to a multi-valued attribute).
flowType *AttributeFlowType
// If higher than 0, this attribute will be used to perform an initial match of the objects between source and target directories. The synchronization engine will try to find the matching object using attribute with lowest value of matching priority first. If not found, the attribute with the next matching priority will be used, and so on a until match is found or no more matching attributes are left. Only attributes that are expected to have unique values, such as email, should be used as matching attributes.
matchingPriority *int32
// Defines how a value should be extracted (or transformed) from the source object.
source AttributeMappingSourceable
// Name of the attribute on the target object.
targetAttributeName *string
}
// NewAttributeMapping instantiates a new attributeMapping and sets the default values.
func NewAttributeMapping()(*AttributeMapping) {
m := &AttributeMapping{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateAttributeMappingFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAttributeMappingFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewAttributeMapping(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AttributeMapping) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetDefaultValue gets the defaultValue property value. Default value to be used in case the source property was evaluated to null. Optional.
func (m *AttributeMapping) GetDefaultValue()(*string) {
if m == nil {
return nil
} else {
return m.defaultValue
}
}
// GetExportMissingReferences gets the exportMissingReferences property value. For internal use only.
func (m *AttributeMapping) GetExportMissingReferences()(*bool) {
if m == nil {
return nil
} else {
return m.exportMissingReferences
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AttributeMapping) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["defaultValue"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDefaultValue(val)
}
return nil
}
res["exportMissingReferences"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetExportMissingReferences(val)
}
return nil
}
res["flowBehavior"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseAttributeFlowBehavior)
if err != nil {
return err
}
if val != nil {
m.SetFlowBehavior(val.(*AttributeFlowBehavior))
}
return nil
}
res["flowType"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseAttributeFlowType)
if err != nil {
return err
}
if val != nil {
m.SetFlowType(val.(*AttributeFlowType))
}
return nil
}
res["matchingPriority"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetMatchingPriority(val)
}
return nil
}
res["source"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAttributeMappingSourceFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetSource(val.(AttributeMappingSourceable))
}
return nil
}
res["targetAttributeName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetTargetAttributeName(val)
}
return nil
}
return res
}
// GetFlowBehavior gets the flowBehavior property value. Defines when this attribute should be exported to the target directory. Possible values are: FlowWhenChanged and FlowAlways. Default is FlowWhenChanged.
func (m *AttributeMapping) GetFlowBehavior()(*AttributeFlowBehavior) {
if m == nil {
return nil
} else {
return m.flowBehavior
}
}
// GetFlowType gets the flowType property value. Defines when this attribute should be updated in the target directory. Possible values are: Always (default), ObjectAddOnly (only when new object is created), MultiValueAddOnly (only when the change is adding new values to a multi-valued attribute).
func (m *AttributeMapping) GetFlowType()(*AttributeFlowType) {
if m == nil {
return nil
} else {
return m.flowType
}
}
// GetMatchingPriority gets the matchingPriority property value. If higher than 0, this attribute will be used to perform an initial match of the objects between source and target directories. The synchronization engine will try to find the matching object using attribute with lowest value of matching priority first. If not found, the attribute with the next matching priority will be used, and so on a until match is found or no more matching attributes are left. Only attributes that are expected to have unique values, such as email, should be used as matching attributes.
func (m *AttributeMapping) GetMatchingPriority()(*int32) {
if m == nil {
return nil
} else {
return m.matchingPriority
}
}
// GetSource gets the source property value. Defines how a value should be extracted (or transformed) from the source object.
func (m *AttributeMapping) GetSource()(AttributeMappingSourceable) {
if m == nil {
return nil
} else {
return m.source
}
}
// GetTargetAttributeName gets the targetAttributeName property value. Name of the attribute on the target object.
func (m *AttributeMapping) GetTargetAttributeName()(*string) {
if m == nil {
return nil
} else {
return m.targetAttributeName
}
}
// Serialize serializes information the current object
func (m *AttributeMapping) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteStringValue("defaultValue", m.GetDefaultValue())
if err != nil {
return err
}
}
{
err := writer.WriteBoolValue("exportMissingReferences", m.GetExportMissingReferences())
if err != nil {
return err
}
}
if m.GetFlowBehavior() != nil {
cast := (*m.GetFlowBehavior()).String()
err := writer.WriteStringValue("flowBehavior", &cast)
if err != nil {
return err
}
}
if m.GetFlowType() != nil {
cast := (*m.GetFlowType()).String()
err := writer.WriteStringValue("flowType", &cast)
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("matchingPriority", m.GetMatchingPriority())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("source", m.GetSource())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("targetAttributeName", m.GetTargetAttributeName())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AttributeMapping) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetDefaultValue sets the defaultValue property value. Default value to be used in case the source property was evaluated to null. Optional.
func (m *AttributeMapping) SetDefaultValue(value *string)() {
if m != nil {
m.defaultValue = value
}
}
// SetExportMissingReferences sets the exportMissingReferences property value. For internal use only.
func (m *AttributeMapping) SetExportMissingReferences(value *bool)() {
if m != nil {
m.exportMissingReferences = value
}
}
// SetFlowBehavior sets the flowBehavior property value. Defines when this attribute should be exported to the target directory. Possible values are: FlowWhenChanged and FlowAlways. Default is FlowWhenChanged.
func (m *AttributeMapping) SetFlowBehavior(value *AttributeFlowBehavior)() {
if m != nil {
m.flowBehavior = value
}
}
// SetFlowType sets the flowType property value. Defines when this attribute should be updated in the target directory. Possible values are: Always (default), ObjectAddOnly (only when new object is created), MultiValueAddOnly (only when the change is adding new values to a multi-valued attribute).
func (m *AttributeMapping) SetFlowType(value *AttributeFlowType)() {
if m != nil {
m.flowType = value
}
}
// SetMatchingPriority sets the matchingPriority property value. If higher than 0, this attribute will be used to perform an initial match of the objects between source and target directories. The synchronization engine will try to find the matching object using attribute with lowest value of matching priority first. If not found, the attribute with the next matching priority will be used, and so on a until match is found or no more matching attributes are left. Only attributes that are expected to have unique values, such as email, should be used as matching attributes.
func (m *AttributeMapping) SetMatchingPriority(value *int32)() {
if m != nil {
m.matchingPriority = value
}
}
// SetSource sets the source property value. Defines how a value should be extracted (or transformed) from the source object.
func (m *AttributeMapping) SetSource(value AttributeMappingSourceable)() {
if m != nil {
m.source = value
}
}
// SetTargetAttributeName sets the targetAttributeName property value. Name of the attribute on the target object.
func (m *AttributeMapping) SetTargetAttributeName(value *string)() {
if m != nil {
m.targetAttributeName = value
}
} | models/attribute_mapping.go | 0.869659 | 0.495545 | attribute_mapping.go | starcoder |
package openapi
import (
"encoding/json"
)
// RetentionPolicies struct for RetentionPolicies
type RetentionPolicies struct {
RetentionTimeInMinutes *int32 `json:"retentionTimeInMinutes,omitempty"`
RetentionSizeInMB *int64 `json:"retentionSizeInMB,omitempty"`
}
// NewRetentionPolicies instantiates a new RetentionPolicies object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewRetentionPolicies() *RetentionPolicies {
this := RetentionPolicies{}
return &this
}
// NewRetentionPoliciesWithDefaults instantiates a new RetentionPolicies object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewRetentionPoliciesWithDefaults() *RetentionPolicies {
this := RetentionPolicies{}
return &this
}
// GetRetentionTimeInMinutes returns the RetentionTimeInMinutes field value if set, zero value otherwise.
func (o *RetentionPolicies) GetRetentionTimeInMinutes() int32 {
if o == nil || o.RetentionTimeInMinutes == nil {
var ret int32
return ret
}
return *o.RetentionTimeInMinutes
}
// GetRetentionTimeInMinutesOk returns a tuple with the RetentionTimeInMinutes field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *RetentionPolicies) GetRetentionTimeInMinutesOk() (*int32, bool) {
if o == nil || o.RetentionTimeInMinutes == nil {
return nil, false
}
return o.RetentionTimeInMinutes, true
}
// HasRetentionTimeInMinutes returns a boolean if a field has been set.
func (o *RetentionPolicies) HasRetentionTimeInMinutes() bool {
if o != nil && o.RetentionTimeInMinutes != nil {
return true
}
return false
}
// SetRetentionTimeInMinutes gets a reference to the given int32 and assigns it to the RetentionTimeInMinutes field.
func (o *RetentionPolicies) SetRetentionTimeInMinutes(v int32) {
o.RetentionTimeInMinutes = &v
}
// GetRetentionSizeInMB returns the RetentionSizeInMB field value if set, zero value otherwise.
func (o *RetentionPolicies) GetRetentionSizeInMB() int64 {
if o == nil || o.RetentionSizeInMB == nil {
var ret int64
return ret
}
return *o.RetentionSizeInMB
}
// GetRetentionSizeInMBOk returns a tuple with the RetentionSizeInMB field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *RetentionPolicies) GetRetentionSizeInMBOk() (*int64, bool) {
if o == nil || o.RetentionSizeInMB == nil {
return nil, false
}
return o.RetentionSizeInMB, true
}
// HasRetentionSizeInMB returns a boolean if a field has been set.
func (o *RetentionPolicies) HasRetentionSizeInMB() bool {
if o != nil && o.RetentionSizeInMB != nil {
return true
}
return false
}
// SetRetentionSizeInMB gets a reference to the given int64 and assigns it to the RetentionSizeInMB field.
func (o *RetentionPolicies) SetRetentionSizeInMB(v int64) {
o.RetentionSizeInMB = &v
}
func (o RetentionPolicies) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.RetentionTimeInMinutes != nil {
toSerialize["retentionTimeInMinutes"] = o.RetentionTimeInMinutes
}
if o.RetentionSizeInMB != nil {
toSerialize["retentionSizeInMB"] = o.RetentionSizeInMB
}
return json.Marshal(toSerialize)
}
type NullableRetentionPolicies struct {
value *RetentionPolicies
isSet bool
}
func (v NullableRetentionPolicies) Get() *RetentionPolicies {
return v.value
}
func (v *NullableRetentionPolicies) Set(val *RetentionPolicies) {
v.value = val
v.isSet = true
}
func (v NullableRetentionPolicies) IsSet() bool {
return v.isSet
}
func (v *NullableRetentionPolicies) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableRetentionPolicies(val *RetentionPolicies) *NullableRetentionPolicies {
return &NullableRetentionPolicies{value: val, isSet: true}
}
func (v NullableRetentionPolicies) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableRetentionPolicies) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | openapi/model_retention_policies.go | 0.733261 | 0.560914 | model_retention_policies.go | starcoder |
package sw
import (
"math/big"
"github.com/consensys/gnark/frontend"
"github.com/consensys/gnark/std/algebra/fields"
"github.com/consensys/gurvy/utils"
)
// PairingContext contains useful info about the pairing
type PairingContext struct {
AteLoop uint64 // stores the ate loop
Extension fields.Extension
}
// LineEvalRes represents a sparse Fp12 Elmt (result of the line evaluation)
type LineEvalRes struct {
r0, r1, r2 fields.E2
}
// LineEvalBLS377 computes f(P) where div(f) = (P)+(R)+(-(P+R))-3O, Q, R are on the twist and in the r-torsion (trace 0 subgroup)
// the result is pulled back like if it was computed on the original curve, so it's a Fp12Elmt, that is sparse,
// only 3 entries are non zero. The result is therefore stored in a custom type LineEvalRes representing a sparse element
func LineEvalBLS377(cs *frontend.ConstraintSystem, Q, R G2Jac, P G1Jac, result *LineEvalRes, ext fields.Extension) {
// converts Q and R to projective coords
Q.ToProj(cs, &Q, ext)
R.ToProj(cs, &R, ext)
// line eq: w^3*(QyRz-QzRy)x + w^2*(QzRx - QxRz)y + w^5*(QxRy-QyRxz)
// result.r1 = Px*(QyRz-QzRy)
// result.r0 = Py*(QzRx - QxRz)
// result.r2 = Pz*(QxRy-QyRxz)
result.r1.Mul(cs, &Q.Y, &R.Z, ext)
result.r0.Mul(cs, &Q.Z, &R.X, ext)
result.r2.Mul(cs, &Q.X, &R.Y, ext)
Q.Z.Mul(cs, &Q.Z, &R.Y, ext)
Q.X.Mul(cs, &Q.X, &R.Z, ext)
Q.Y.Mul(cs, &Q.Y, &R.X, ext)
result.r1.Sub(cs, &result.r1, &Q.Z)
result.r0.Sub(cs, &result.r0, &Q.X)
result.r2.Sub(cs, &result.r2, &Q.Y)
// multiply P.Z by coeffs[2] in case P is infinity
result.r0.MulByFp(cs, &result.r0, P.Y)
result.r1.MulByFp(cs, &result.r1, P.X)
result.r2.MulByFp(cs, &result.r2, P.Z)
}
// LineEvalAffineBLS377 computes f(P) where div(f) = (P)+(R)+(-(P+R))-3O, Q, R are on the twist and in the r-torsion (trace 0 subgroup)
// the result is pulled back like if it was computed on the original curve, so it's a Fp12Elmt, that is sparse,
// only 3 entries are non zero. The result is therefore stored in a custom type LineEvalRes representing a sparse element
func LineEvalAffineBLS377(cs *frontend.ConstraintSystem, Q, R G2Affine, P G1Affine, result *LineEvalRes, ext fields.Extension) {
// line eq: w^3*(QyRz-QzRy)x + w^2*(QzRx - QxRz)y + w^5*(QxRy-QyRxz)
// result.r1 = Px*(QyRz-QzRy)
// result.r0 = Py*(QzRx - QxRz)
// result.r2 = Pz*(QxRy-QyRx)
// here all the z coordinates are 1
//result.r1.Mul(cs, &Q.Y, &R.Z, ext)
result.r1.Sub(cs, &Q.Y, &R.Y)
result.r0.Sub(cs, &R.X, &Q.X)
result.r2.Mul(cs, &Q.X, &R.Y, ext)
var tmp fields.E2
tmp.Mul(cs, &Q.Y, &R.X, ext)
result.r2.Sub(cs, &result.r2, &tmp)
// multiply P.Z by coeffs[2] in case P is infinity
result.r0.MulByFp(cs, &result.r0, P.Y)
result.r1.MulByFp(cs, &result.r1, P.X)
}
// MulAssign multiplies the result of a line evaluation to the current Fp12 accumulator
func (l *LineEvalRes) MulAssign(cs *frontend.ConstraintSystem, z *fields.E12, ext fields.Extension) {
var a, b, c fields.E12
a.MulByVW(cs, z, &l.r1, ext)
b.MulByV(cs, z, &l.r0, ext)
c.MulByV2W(cs, z, &l.r2, ext)
z.Add(cs, &a, &b).Add(cs, z, &c)
}
// MillerLoop computes the miller loop
func MillerLoop(cs *frontend.ConstraintSystem, P G1Jac, Q G2Jac, res *fields.E12, pairingInfo PairingContext) *fields.E12 {
var ateLoopNaf [64]int8
var ateLoopBigInt big.Int
ateLoopBigInt.SetUint64(pairingInfo.AteLoop)
utils.NafDecomposition(&ateLoopBigInt, ateLoopNaf[:])
res.SetOne(cs)
// the line goes through QCur and QNext
var QCur, QNext, QNextNeg G2Jac
var QNeg G2Jac
QCur = Q
// Stores -Q
QNeg.Neg(cs, &Q)
var lEval LineEvalRes
// Miller loop
for i := len(ateLoopNaf) - 2; i >= 0; i-- {
QNext = QCur
QNext.Double(cs, &QNext, pairingInfo.Extension)
QNextNeg.Neg(cs, &QNext)
res.Mul(cs, res, res, pairingInfo.Extension)
// evaluates line though Qcur,2Qcur at P
LineEvalBLS377(cs, QCur, QNextNeg, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
if ateLoopNaf[i] == 1 {
// evaluates line through 2Qcur, Q at P
LineEvalBLS377(cs, QNext, Q, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
QNext.AddAssign(cs, &Q, pairingInfo.Extension)
} else if ateLoopNaf[i] == -1 {
// evaluates line through 2Qcur, -Q at P
LineEvalBLS377(cs, QNext, QNeg, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
QNext.AddAssign(cs, &QNeg, pairingInfo.Extension)
}
QCur = QNext
}
return res
}
// MillerLoopAffine computes the miller loop, with points in affine
// When neither Q nor P are the point at infinity
func MillerLoopAffine(cs *frontend.ConstraintSystem, P G1Affine, Q G2Affine, res *fields.E12, pairingInfo PairingContext) *fields.E12 {
var ateLoopNaf [64]int8
var ateLoopBigInt big.Int
ateLoopBigInt.SetUint64(pairingInfo.AteLoop)
utils.NafDecomposition(&ateLoopBigInt, ateLoopNaf[:])
res.SetOne(cs)
// the line goes through QCur and QNext
var QCur, QNext, QNextNeg G2Affine
var QNeg G2Affine
QCur = Q
// Stores -Q
QNeg.Neg(cs, &Q)
var lEval LineEvalRes
// Miller loop
for i := len(ateLoopNaf) - 2; i >= 0; i-- {
QNext = QCur
QNext.Double(cs, &QNext, pairingInfo.Extension)
QNextNeg.Neg(cs, &QNext)
res.Mul(cs, res, res, pairingInfo.Extension)
// evaluates line though Qcur,2Qcur at P
LineEvalAffineBLS377(cs, QCur, QNextNeg, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
if ateLoopNaf[i] == 1 {
// evaluates line through 2Qcur, Q at P
LineEvalAffineBLS377(cs, QNext, Q, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
QNext.AddAssign(cs, &Q, pairingInfo.Extension)
} else if ateLoopNaf[i] == -1 {
// evaluates line through 2Qcur, -Q at P
LineEvalAffineBLS377(cs, QNext, QNeg, P, &lEval, pairingInfo.Extension)
lEval.MulAssign(cs, res, pairingInfo.Extension)
QNext.AddAssign(cs, &QNeg, pairingInfo.Extension)
}
QCur = QNext
}
return res
} | std/algebra/sw/pairing.go | 0.677261 | 0.418281 | pairing.go | starcoder |
package coinmarketcap
const Mapping = `[
{
"coin": 0,
"type": "coin",
"id": 1
},
{
"coin": 2,
"type": "coin",
"id": 2
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 2
},
{
"coin": 7,
"type": "coin",
"id": 3
},
{
"coin": 83,
"type": "coin",
"id": 4
},
{
"coin": 6,
"type": "coin",
"id": 5
},
{
"coin": 50,
"type": "coin",
"id": 6
},
{
"coin": 8,
"type": "coin",
"id": 8
},
{
"coin": 86,
"type": "coin",
"id": 13
},
{
"coin": 18,
"type": "coin",
"id": 18
},
{
"coin": 217,
"type": "coin",
"id": 37
},
{
"coin": 24,
"type": "coin",
"id": 42
},
{
"coin": 359,
"type": "coin",
"id": 45
},
{
"coin": 144,
"type": "coin",
"id": 52
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 52
},
{
"coin": 82,
"type": "coin",
"id": 53
},
{
"coin": 216,
"type": "coin",
"id": 64
},
{
"coin": 29,
"type": "coin",
"id": 66
},
{
"coin": 92,
"type": "coin",
"id": 67
},
{
"coin": 3,
"type": "coin",
"id": 74
},
{
"coin": 152,
"type": "coin",
"id": 77
},
{
"coin": 200,
"type": "coin",
"id": 83
},
{
"coin": 481,
"type": "coin",
"id": 89
},
{
"coin": 28,
"type": "coin",
"id": 99
},
{
"coin": 20,
"type": "coin",
"id": 109
},
{
"coin": 4,
"type": "coin",
"id": 118
},
{
"coin": 81,
"type": "coin",
"id": 122
},
{
"coin": 5,
"type": "coin",
"id": 131
},
{
"coin": 9,
"type": "coin",
"id": 132
},
{
"coin": 53,
"type": "coin",
"id": 145
},
{
"coin": 85,
"type": "coin",
"id": 148
},
{
"coin": 109,
"type": "coin",
"id": 151
},
{
"coin": 37,
"type": "coin",
"id": 161
},
{
"coin": 10,
"type": "coin",
"id": 170
},
{
"coin": 90,
"type": "coin",
"id": 182
},
{
"coin": 41,
"type": "coin",
"id": 201
},
{
"coin": 319,
"type": "coin",
"id": 212
},
{
"coin": 22,
"type": "coin",
"id": 213
},
{
"coin": 16,
"type": "coin",
"id": 215
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 217
},
{
"coin": 58,
"type": "coin",
"id": 233
},
{
"coin": 78,
"type": "coin",
"id": 234
},
{
"coin": 87,
"type": "coin",
"id": 254
},
{
"coin": 17,
"type": "coin",
"id": 258
},
{
"coin": 155,
"type": "coin",
"id": 268
},
{
"coin": 387,
"type": "coin",
"id": 278
},
{
"coin": 65,
"type": "coin",
"id": 293
},
{
"coin": 179,
"type": "coin",
"id": 298
},
{
"coin": 117,
"type": "coin",
"id": 313
},
{
"coin": 128,
"type": "coin",
"id": 328
},
{
"coin": 91,
"type": "coin",
"id": 366
},
{
"coin": 204,
"type": "coin",
"id": 372
},
{
"coin": 130,
"type": "coin",
"id": 377
},
{
"coin": 38,
"type": "coin",
"id": 389
},
{
"coin": 287,
"type": "coin",
"id": 405
},
{
"coin": 113,
"type": "coin",
"id": 416
},
{
"coin": 125,
"type": "coin",
"id": 448
},
{
"coin": 23,
"type": "coin",
"id": 460
},
{
"coin": 308,
"type": "coin",
"id": 463
},
{
"coin": 14,
"type": "coin",
"id": 470
},
{
"coin": 295,
"type": "coin",
"id": 495
},
{
"coin": 148,
"type": "coin",
"id": 512
},
{
"coin": 57,
"type": "coin",
"id": 541
},
{
"coin": 405,
"type": "coin",
"id": 551
},
{
"coin": 30,
"type": "coin",
"id": 573
},
{
"coin": 60,
"type": "token",
"token_id": "0x63f88A2298a5c4AEE3c216Aa6D926B184a4b2437",
"id": 576
},
{
"coin": 340,
"type": "coin",
"id": 584
},
{
"coin": 108,
"type": "coin",
"id": 588
},
{
"coin": 190,
"type": "coin",
"id": 633
},
{
"coin": 77,
"type": "coin",
"id": 693
},
{
"coin": 11,
"type": "coin",
"id": 699
},
{
"coin": 31,
"type": "coin",
"id": 706
},
{
"coin": 328,
"type": "coin",
"id": 707
},
{
"coin": 72,
"type": "coin",
"id": 720
},
{
"coin": 69,
"type": "coin",
"id": 760
},
{
"coin": 67,
"type": "coin",
"id": 789
},
{
"coin": 59,
"type": "coin",
"id": 799
},
{
"coin": 196,
"type": "coin",
"id": 815
},
{
"coin": 88,
"type": "coin",
"id": 819
},
{
"coin": 60,
"type": "token",
"token_id": "0xdAC17F958D2ee523a2206206994597C13D831ec7",
"id": 825
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 825
},
{
"coin": 10000714,
"type": "token",
"token_id": "0x55d398326f99059ff775485246999027b3197955",
"id": 825
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x55d398326f99059fF775485246999027B3197955",
"id": 825
},
{
"coin": 84,
"type": "coin",
"id": 833
},
{
"coin": 43,
"type": "coin",
"id": 873
},
{
"coin": 60,
"type": "token",
"token_id": "0x4DF812F6064def1e5e029f1ca858777CC98D2D81",
"id": 895
},
{
"coin": 55,
"type": "coin",
"id": 911
},
{
"coin": 36,
"type": "coin",
"id": 934
},
{
"coin": 89,
"type": "coin",
"id": 945
},
{
"coin": 75,
"type": "coin",
"id": 960
},
{
"coin": 285,
"type": "coin",
"id": 977
},
{
"coin": 123,
"type": "coin",
"id": 990
},
{
"coin": 519,
"type": "coin",
"id": 993
},
{
"coin": 168,
"type": "coin",
"id": 1004
},
{
"coin": 468,
"type": "coin",
"id": 1008
},
{
"coin": 60,
"type": "token",
"token_id": "0xa83aF809975619477Af73B179e05e04A1CcEA953",
"id": 1022
},
{
"coin": 355,
"type": "coin",
"id": 1026
},
{
"coin": 60,
"type": "coin",
"id": 1027
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x2170Ed0880ac9A755fd29B2688956BD959F933F8",
"id": 1027
},
{
"coin": 524,
"type": "coin",
"id": 1035
},
{
"coin": 49,
"type": "coin",
"id": 1044
},
{
"coin": 278,
"type": "coin",
"id": 1053
},
{
"coin": 40,
"type": "coin",
"id": 1070
},
{
"coin": 131,
"type": "coin",
"id": 1087
},
{
"coin": 60,
"type": "token",
"token_id": "0x1985365e9f78359a9B6AD760e32412f4a445E862",
"id": 1104
},
{
"coin": 79,
"type": "coin",
"id": 1135
},
{
"coin": 39,
"type": "coin",
"id": 1141
},
{
"coin": 42,
"type": "coin",
"id": 1168
},
{
"coin": 119,
"type": "coin",
"id": 1169
},
{
"coin": 134,
"type": "coin",
"id": 1214
},
{
"coin": 56,
"type": "coin",
"id": 1216
},
{
"coin": 60,
"type": "token",
"token_id": "0xE0B7927c4aF23765Cb51314A0E0521A9645F0E2A",
"id": 1229
},
{
"coin": 135,
"type": "coin",
"id": 1230
},
{
"coin": 180,
"type": "coin",
"id": 1266
},
{
"coin": 5741564,
"type": "coin",
"id": 1274
},
{
"coin": 22504,
"type": "coin",
"id": 1279
},
{
"coin": 7567736,
"type": "coin",
"id": 1281
},
{
"coin": 94,
"type": "coin",
"id": 1285
},
{
"coin": 1120,
"type": "coin",
"id": 1294
},
{
"coin": 140,
"type": "coin",
"id": 1298
},
{
"coin": 122,
"type": "coin",
"id": 1299
},
{
"coin": 286,
"type": "coin",
"id": 1308
},
{
"coin": 16754,
"type": "coin",
"id": 1320
},
{
"coin": 61,
"type": "coin",
"id": 1321
},
{
"coin": 105,
"type": "coin",
"id": 1343
},
{
"coin": 888,
"type": "coin",
"id": 1376
},
{
"coin": 218,
"type": "coin",
"id": 1381
},
{
"coin": 149,
"type": "coin",
"id": 1382
},
{
"coin": 103,
"type": "coin",
"id": 1389
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1392
},
{
"coin": 102,
"type": "coin",
"id": 1395
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1403
},
{
"coin": 60,
"type": "token",
"token_id": "0xaeC2E87E0A235266D9C5ADc9DEb4b2E29b54D009",
"id": 1409
},
{
"coin": 136,
"type": "coin",
"id": 1414
},
{
"coin": 133,
"type": "coin",
"id": 1437
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x1Ba42e5193dfA8B03D15dd1B86a3113bbBEF8Eeb",
"id": 1437
},
{
"coin": 147,
"type": "coin",
"id": 1447
},
{
"coin": 60,
"type": "token",
"token_id": "0xa74476443119A942dE498590Fe1f2454d7D4aC0d",
"id": 1455
},
{
"coin": 66,
"type": "coin",
"id": 1464
},
{
"coin": 60,
"type": "token",
"token_id": "0xAbC430136A4dE71c9998242de8c1b4B97D2b9045",
"id": 1465
},
{
"coin": 197,
"type": "coin",
"id": 1466
},
{
"coin": 343,
"type": "coin",
"id": 1478
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c543e7AE0A1104f78406C340E9C64FD9fCE5170",
"id": 1483
},
{
"coin": 330,
"type": "coin",
"id": 1496
},
{
"coin": 60,
"type": "token",
"token_id": "0x667088b212ce3d06a1b553a7221E1fD19000d9aF",
"id": 1500
},
{
"coin": 213,
"type": "coin",
"id": 1505
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1518
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 1518
},
{
"coin": 141,
"type": "coin",
"id": 1521
},
{
"coin": 167,
"type": "coin",
"id": 1522
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1552
},
{
"coin": 60,
"type": "token",
"token_id": "0x6531f133e6DeeBe7F2dcE5A0441aA7ef330B4e53",
"id": 1556
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1562
},
{
"coin": 165,
"type": "coin",
"id": 1567
},
{
"coin": 323,
"type": "coin",
"id": 1578
},
{
"coin": 111,
"type": "coin",
"id": 1586
},
{
"coin": 3381,
"type": "coin",
"id": 1587
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1592
},
{
"coin": 60,
"type": "token",
"token_id": "0x08711D3B02C8758F2FB3ab4e80228418a7F8e39c",
"id": 1596
},
{
"coin": 450,
"type": "coin",
"id": 1606
},
{
"coin": 369,
"type": "coin",
"id": 1609
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1616
},
{
"coin": 112,
"type": "coin",
"id": 1617
},
{
"coin": 8000,
"type": "coin",
"id": 1619
},
{
"coin": 60,
"type": "token",
"token_id": "0x607F4C5BB672230e8672085532f7e901544a7375",
"id": 1637
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1638
},
{
"coin": 160,
"type": "coin",
"id": 1654
},
{
"coin": 60,
"type": "token",
"token_id": "0xfa05A73FfE78ef8f1a739473e462c54bae6567D9",
"id": 1658
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1659
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1660
},
{
"coin": 60,
"type": "token",
"token_id": "0xcbCC0F036ED4788F63FC0fEE32873d6A7487b908",
"id": 1669
},
{
"coin": 60,
"type": "token",
"token_id": "0x1a95B271B0535D15fa49932Daba31BA612b52946",
"id": 1673
},
{
"coin": 60,
"type": "token",
"token_id": "0x2e071D2966Aa7D8dECB1005885bA1977D6038A65",
"id": 1677
},
{
"coin": 68,
"type": "coin",
"id": 1678
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1680
},
{
"coin": 2301,
"type": "coin",
"id": 1684
},
{
"coin": 414,
"type": "coin",
"id": 1694
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1697
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 1697
},
{
"coin": 121,
"type": "coin",
"id": 1698
},
{
"coin": 457,
"type": "coin",
"id": 1700
},
{
"coin": 2302,
"type": "coin",
"id": 1703
},
{
"coin": 714,
"type": "token",
"token_id": "EBST-783",
"id": 1704
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1708
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1710
},
{
"coin": 249,
"type": "coin",
"id": 1711
},
{
"coin": 238,
"type": "coin",
"id": 1712
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1715
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1721
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1722
},
{
"coin": 60,
"type": "token",
"token_id": "0x983F6d60db79ea8cA4eB9968C6aFf8cfA04B3c63",
"id": 1723
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1725
},
{
"coin": 27,
"type": "coin",
"id": 1726
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1727
},
{
"coin": 60,
"type": "token",
"token_id": "0x1776e1F26f98b1A5dF9cD347953a26dd3Cb46671",
"id": 1732
},
{
"coin": 124,
"type": "coin",
"id": 1736
},
{
"coin": 174,
"type": "coin",
"id": 1747
},
{
"coin": 2303,
"type": "coin",
"id": 1750
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1751
},
{
"coin": 120,
"type": "coin",
"id": 1755
},
{
"coin": 60,
"type": "token",
"token_id": "0x419D0d8BdD9aF5e606Ae2232ed285Aff190E711b",
"id": 1757
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1758
},
{
"coin": 60,
"type": "token",
"token_id": "0x744d70FDBE2Ba4CF95131626614a1763DF805B9E",
"id": 1759
},
{
"coin": 429,
"type": "coin",
"id": 1762
},
{
"coin": 194,
"type": "coin",
"id": 1765
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 1765
},
{
"coin": 60,
"type": "token",
"token_id": "0x4470BB87d77b963A013DB939BE332f927f2b992e",
"id": 1768
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1772
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1775
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1776
},
{
"coin": 714,
"type": "token",
"token_id": "WGR-D3D",
"id": 1779
},
{
"coin": 60,
"type": "token",
"token_id": "0x0AfFa06e7Fbe5bC9a764C979aA66E8256A631f02",
"id": 1784
},
{
"coin": 60,
"type": "token",
"token_id": "0xF4134146AF2d511Dd5EA8cDB1C4AC88C57D60404",
"id": 1786
},
{
"coin": 60,
"type": "token",
"token_id": "0x8727c112C712c4a03371AC87a74dD6aB104Af768",
"id": 1787
},
{
"coin": 60,
"type": "token",
"token_id": "0xF433089366899D83a9f26A773D59ec7eCF30355e",
"id": 1788
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1789
},
{
"coin": 435,
"type": "coin",
"id": 1803
},
{
"coin": 60,
"type": "token",
"token_id": "0x7C5A0CE9267ED19B22F8cae653F198e3E8daf098",
"id": 1807
},
{
"coin": 60,
"type": "token",
"token_id": "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07",
"id": 1808
},
{
"coin": 60,
"type": "token",
"token_id": "0x41e5560054824eA6B0732E656E3Ad64E20e94E45",
"id": 1816
},
{
"coin": 60,
"type": "token",
"token_id": "0x5Af2Be193a6ABCa9c8817001F45744777Db30756",
"id": 1817
},
{
"coin": 44,
"type": "coin",
"id": 1826
},
{
"coin": 224,
"type": "coin",
"id": 1828
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1830
},
{
"coin": 145,
"type": "coin",
"id": 1831
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x8fF795a6F4D97E7887C79beA79aba5cc76444aDf",
"id": 1831
},
{
"coin": 159,
"type": "coin",
"id": 1833
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1834
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1839
},
{
"coin": 714,
"type": "coin",
"id": 1839
},
{
"coin": 10000714,
"type": "token",
"token_id": "<KEY>",
"id": 1839
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 1839
},
{
"coin": 10000714,
"type": "token",
"token_id": "<KEY>",
"id": 1975
},
{
"coin": 10000714,
"type": "token",
"token_id": "0xe9e7cea3dedca5984780bafc599bd69add087d56",
"id": 4687
},
{
"coin": 10000714,
"type": "token",
"token_id": "<KEY>",
"id": 4023
},
{
"coin": 10000714,
"type": "coin",
"id": 1839
},
{
"coin": 20000714,
"type": "coin",
"id": 1839
},
{
"coin": 60,
"type": "token",
"token_id": "0xfcA47962D45ADFdfd1Ab2D972315dB4ce7CCf094",
"id": 1845
},
{
"coin": 60,
"type": "token",
"token_id": "0x701C244b988a513c945973dEFA05de933b23Fe1D",
"id": 1853
},
{
"coin": 60,
"type": "token",
"token_id": "0x0AbdAce70D3790235af448C88547603b945604ea",
"id": 1856
},
{
"coin": 60,
"type": "token",
"token_id": "0x006BeA43Baa3f7A6f765F14f10A1a1b08334EF45",
"id": 1861
},
{
"coin": 60,
"type": "token",
"token_id": "0x177d39AC676ED1C67A2b268AD7F1E58826E5B0af",
"id": 1864
},
{
"coin": 153,
"type": "coin",
"id": 1866
},
{
"coin": 60,
"type": "token",
"token_id": "0xEa1f346faF023F974Eb5adaf088BbCdf02d761F4",
"id": 1873
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1876
},
{
"coin": 499,
"type": "coin",
"id": 1877
},
{
"coin": 305,
"type": "coin",
"id": 1881
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1882
},
{
"coin": 311,
"type": "coin",
"id": 1883
},
{
"coin": 60,
"type": "token",
"token_id": "0x3597bfD533a99c9aa083587B074434E61Eb0A258",
"id": 1886
},
{
"coin": 60,
"type": "token",
"token_id": "0x7654915A1b82D6D2D0AFc37c52Af556eA8983c7E",
"id": 1888
},
{
"coin": 60,
"type": "token",
"token_id": "0xE41d2489571d322189246DaFA5ebDe1F4699F498",
"id": 1896
},
{
"coin": 60,
"type": "token",
"token_id": "0x5d60d8d7eF6d37E16EBABc324de3bE57f135e0BC",
"id": 1902
},
{
"coin": 60,
"type": "token",
"token_id": "0xa7f976C360ebBeD4465c2855684D1AAE5271eFa9",
"id": 1905
},
{
"coin": 2718,
"type": "coin",
"id": 1908
},
{
"coin": 60,
"type": "token",
"token_id": "0x07D9e49Ea402194bf48A8276dAfB16E4eD633317",
"id": 1912
},
{
"coin": 666,
"type": "coin",
"id": 1918
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1923
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1926
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1930
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1931
},
{
"coin": 60,
"type": "token",
"token_id": "0xe120c1ECBfdFeA7F0A8f0Ee30063491E8c26fedf",
"id": 1933
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1934
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1937
},
{
"coin": 60,
"type": "token",
"token_id": "0xaF4DcE16Da2877f8c9e00544c93B62Ac40631F16",
"id": 1947
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1948
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1949
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1950
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1954
},
{
"coin": 146,
"type": "coin",
"id": 1955
},
{
"coin": 195,
"type": "coin",
"id": 1958
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1961
},
{
"coin": 169,
"type": "coin",
"id": 1962
},
{
"coin": 60,
"type": "token",
"token_id": "0x4E0603e2A27A30480E5e3a4Fe548e29EF12F64bE",
"id": 1963
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1966
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1967
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1968
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1969
},
{
"coin": 60,
"type": "token",
"token_id": "0x226bb599a12C826476e3A771454697EA52E9E220",
"id": 1974
},
{
"coin": 60,
"type": "token",
"token_id": "0x514910771AF9Ca656af840dff83E8264EcF986CA",
"id": 1975
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 1975
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1976
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1982
},
{
"coin": 60,
"type": "token",
"token_id": "0xe8Ff5C9c75dEb346acAc493C463C8950Be03Dfba",
"id": 1983
},
{
"coin": 60,
"type": "token",
"token_id": "0x8D75959f1E61EC2571aa72798237101F084DE63a",
"id": 1984
},
{
"coin": 60,
"type": "token",
"token_id": "0xE814aeE960a85208C3dB542C53E7D4a6C8D5f60F",
"id": 1985
},
{
"coin": 60,
"type": "token",
"token_id": "0x3d1BA9be9f66B8ee101911bC36D3fB562eaC2244",
"id": 1991
},
{
"coin": 2017,
"type": "coin",
"id": 1993
},
{
"coin": 60,
"type": "token",
"token_id": "0x4156D3342D5c385a87D264F90653733592000581",
"id": 1996
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 1998
},
{
"coin": 1999,
"type": "coin",
"id": 2001
},
{
"coin": 232,
"type": "coin",
"id": 2002
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2006
},
{
"coin": 209,
"type": "coin",
"id": 2009
},
{
"coin": 1815,
"type": "coin",
"id": 2010
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 2010
},
{
"coin": 1729,
"type": "coin",
"id": 2011
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 2011
},
{
"coin": 60,
"type": "token",
"token_id": "0x83eEA00D838f92dEC4D1475697B9f4D3537b56E3",
"id": 2012
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2015
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2017
},
{
"coin": 60,
"type": "token",
"token_id": "0xE0C72452740414d861606a44cCd5eA7f96488278",
"id": 2018
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2019
},
{
"coin": 60,
"type": "token",
"token_id": "0xa8006C4ca56F24d6836727D106349320dB7fEF82",
"id": 2022
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2030
},
{
"coin": 60,
"type": "token",
"token_id": "0xDd6C68bb32462e01705011a4e2Ad1a60740f217F",
"id": 2031
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2032
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2034
},
{
"coin": 60,
"type": "token",
"token_id": "0xc42209aCcC14029c1012fB5680D95fBd6036E2a0",
"id": 2036
},
{
"coin": 177,
"type": "coin",
"id": 2041
},
{
"coin": 60,
"type": "token",
"token_id": "0xba2184520A1cC49a6159c57e61E1844E085615B6",
"id": 2042
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2043
},
{
"coin": 60,
"type": "token",
"token_id": "0xf0Ee6b27b759C9893Ce4f094b49ad28fd15A23e4",
"id": 2044
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2047
},
{
"coin": 60,
"type": "token",
"token_id": "0x5D21eF5f25a985380B65c8e943A0082fEDa0Db84",
"id": 2048
},
{
"coin": 60,
"type": "token",
"token_id": "0x2dAEE1AA61D60A252DC80564499A69802853583A",
"id": 2051
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2056
},
{
"coin": 60,
"type": "token",
"token_id": "0x27054b13b1B798B345b591a4d22e6562d47eA75a",
"id": 2058
},
{
"coin": 60,
"type": "token",
"token_id": "0x7d4b8Cce0591C9044a22ee543533b72E976E36C3",
"id": 2060
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 2061
},
{
"coin": 425,
"type": "coin",
"id": 2062
},
{
"coin": 60,
"type": "token",
"token_id": "0xfec0cF7fE078a500abf15F1284958F22049c2C7e",
"id": 2064
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2066
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2069
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2070
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2071
},
{
"coin": 60,
"type": "token",
"token_id": "0x28c8d01FF633eA9Cd8fc6a451D7457889E698de6",
"id": 2074
},
{
"coin": 60,
"type": "token",
"token_id": "0x539EfE69bCDd21a83eFD9122571a64CC25e0282b",
"id": 2076
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2078
},
{
"coin": 156,
"type": "coin",
"id": 2083
},
{
"coin": 60,
"type": "token",
"token_id": "0x039B5649A59967e3e936D7471f9c3700100Ee1ab",
"id": 2087
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2088
},
{
"coin": 60,
"type": "token",
"token_id": "0x705EE96c1c160842C92c1aeCfCFfccc9C412e3D9",
"id": 2089
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2090
},
{
"coin": 60,
"type": "token",
"token_id": "0xc324a2f6b05880503444451B8b27e6f9e63287Cb",
"id": 2091
},
{
"coin": 8964,
"type": "coin",
"id": 2092
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2093
},
{
"coin": 60,
"type": "token",
"token_id": "0x7728dFEF5aBd468669EB7f9b48A7f70a501eD29D",
"id": 2094
},
{
"coin": 60,
"type": "token",
"token_id": "0xF970b8E36e23F7fC3FD752EeA86f8Be8D83375A6",
"id": 2096
},
{
"coin": 74,
"type": "coin",
"id": 2099
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2100
},
{
"coin": 60,
"type": "token",
"token_id": "0x0A76aad21948eA1ef447D26DEe91a54370E151e0",
"id": 2101
},
{
"coin": 60,
"type": "token",
"token_id": "0x0aeF06DcCCC531e581f0440059E6FfCC206039EE",
"id": 2103
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2104
},
{
"coin": 164,
"type": "coin",
"id": 2105
},
{
"coin": 3003,
"type": "coin",
"id": 2107
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2110
},
{
"coin": 60,
"type": "token",
"token_id": "0xA9Aad2dC3a8315caeee5F458B1d8EDc31D8467BD",
"id": 2119
},
{
"coin": 60,
"type": "token",
"token_id": "0xEA38eAa3C86c8F9B751533Ba2E562deb9acDED40",
"id": 2120
},
{
"coin": 163,
"type": "coin",
"id": 2122
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2126
},
{
"coin": 60,
"type": "token",
"token_id": "0xeB7C20027172E5d143fB030d50f91Cece2D1485D",
"id": 2127
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2130
},
{
"coin": 60,
"type": "token",
"token_id": "0xB7c4A82936194FEE52a4E3d4cEC3415f74507532",
"id": 2131
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2132
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2134
},
{
"coin": 60,
"type": "token",
"token_id": "0x2ef52Ed7De8c5ce03a4eF0efbe9B7450F2D7Edc9",
"id": 2135
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2136
},
{
"coin": 415,
"type": "coin",
"id": 2137
},
{
"coin": 60,
"type": "token",
"token_id": "0x0Cf0Ee63788A0849fE5297F3407f701E122cC023",
"id": 2143
},
{
"coin": 60,
"type": "token",
"token_id": "0x44197A4c44D6A059297cAf6be4F7e172BD56Caaf",
"id": 2147
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2149
},
{
"coin": 60,
"type": "token",
"token_id": "0x5554e04e76533E1d14c52f05beEF6c9d329E1E30",
"id": 2151
},
{
"coin": 714,
"type": "token",
"token_id": "ARN-71B",
"id": 2153
},
{
"coin": 444,
"type": "coin",
"id": 2158
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2161
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2162
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2165
},
{
"coin": 60,
"type": "token",
"token_id": "0x999967E2Ec8A74B7c8E9dB19E039d920B31d39D0",
"id": 2166
},
{
"coin": 60,
"type": "token",
"token_id": "0x50Ee674689d75C0f88E8f83cfE8c4B69E8fd590D",
"id": 2172
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2178
},
{
"coin": 60,
"type": "token",
"token_id": "0x0371A82e4A9d0A4312f3ee2Ac9c6958512891372",
"id": 2180
},
{
"coin": 60,
"type": "token",
"token_id": "0x103c3A209da59d3E7C4A89307e66521e081CFDF0",
"id": 2181
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2184
},
{
"coin": 419,
"type": "coin",
"id": 2185
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2191
},
{
"coin": 176,
"type": "coin",
"id": 2200
},
{
"coin": 181,
"type": "coin",
"id": 2201
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2204
},
{
"coin": 60,
"type": "token",
"token_id": "0x82b0E50478eeaFde392D45D1259Ed1071B6fDa81",
"id": 2208
},
{
"coin": 60,
"type": "token",
"token_id": "0x99ea4dB9EE77ACD40B119BD1dC4E33e1C070b80d",
"id": 2212
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2213
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2219
},
{
"coin": 999,
"type": "coin",
"id": 2222
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2223
},
{
"coin": 214,
"type": "coin",
"id": 2230
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2231
},
{
"coin": 60,
"type": "token",
"token_id": "0xF7920B0768Ecb20A123fAc32311d07D193381d6f",
"id": 2235
},
{
"coin": 714,
"type": "token",
"token_id": "WISH-2D5",
"id": 2236
},
{
"coin": 60,
"type": "token",
"token_id": "0xb62d18DeA74045E822352CE4B3EE77319DC5ff2F",
"id": 2237
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2239
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2240
},
{
"coin": 60,
"type": "token",
"token_id": "0x679BADc551626e01B23CeecEFBc9B877EA18fc46",
"id": 2241
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2243
},
{
"coin": 60,
"type": "token",
"token_id": "0xB41422D5a1d5d5C73c229686935b40F881502785",
"id": 2244
},
{
"coin": 60,
"type": "token",
"token_id": "0xEC213F83defB583af3A000B1c0ada660b1902A0F",
"id": 2245
},
{
"coin": 1122,
"type": "coin",
"id": 2246
},
{
"coin": 60,
"type": "token",
"token_id": "0x1e797Ce986C3CFF4472F7D38d5C4aba55DfEFE40",
"id": 2247
},
{
"coin": 60,
"type": "token",
"token_id": "0x04F2E7221fdb1B52A68169B25793E51478fF0329",
"id": 2248
},
{
"coin": 60,
"type": "token",
"token_id": "0x74CEDa77281b339142A36817Fa5F9E29412bAb85",
"id": 2249
},
{
"coin": 60,
"type": "token",
"token_id": "0x5E6b6d9aBAd9093fdc861Ea1600eBa1b355Cd940",
"id": 2251
},
{
"coin": 60,
"type": "token",
"token_id": "0xCc34366E3842cA1BD36c1f324d15257960fCC801",
"id": 2256
},
{
"coin": 299,
"type": "coin",
"id": 2257
},
{
"coin": 60,
"type": "token",
"token_id": "0xBDC5bAC39Dbe132B1E030e898aE3830017D7d969",
"id": 2258
},
{
"coin": 60,
"type": "token",
"token_id": "0xF80D589b3Dbe130c270a69F1a69D050f268786Df",
"id": 2260
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2262
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2267
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2269
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2271
},
{
"coin": 60,
"type": "token",
"token_id": "0xD01DB73E047855Efb414e6202098C4Be4Cd2423B",
"id": 2273
},
{
"coin": 60,
"type": "token",
"token_id": "0x66186008C1050627F979d464eABb258860563dbE",
"id": 2274
},
{
"coin": 60,
"type": "token",
"token_id": "0x9041Fe5B3FDEA0f5e4afDC17e75180738D877A01",
"id": 2275
},
{
"coin": 60,
"type": "token",
"token_id": "0x2604FA406Be957E542BEb89E6754fCdE6815e83f",
"id": 2279
},
{
"coin": 461,
"type": "coin",
"id": 2280
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x0D8Ce2A99Bb6e3B7Db580eD848240e4a0F9aE153",
"id": 2280
},
{
"coin": 1688,
"type": "coin",
"id": 2281
},
{
"coin": 8888,
"type": "coin",
"id": 2282
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2283
},
{
"coin": 60,
"type": "token",
"token_id": "0x8B1F49491477e0fB46a29fef53F1EA320D13c349",
"id": 2286
},
{
"coin": 60,
"type": "token",
"token_id": "0x5e3346444010135322268a4630d2ED5F8D09446c",
"id": 2287
},
{
"coin": 60,
"type": "token",
"token_id": "0x72aDadb447784dd7AB1F472467750fC485e4cb2d",
"id": 2288
},
{
"coin": 714,
"type": "token",
"token_id": "GTO-908",
"id": 2289
},
{
"coin": 60,
"type": "token",
"token_id": "0x6EC8a24CaBdc339A06a172F8223ea557055aDAa5",
"id": 2291
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2295
},
{
"coin": 60,
"type": "token",
"token_id": "0x2C4e8f2D746113d0696cE89B35F0d8bF88E0AEcA",
"id": 2296
},
{
"coin": 60,
"type": "token",
"token_id": "0xbE9375C6a420D2eEB258962efB95551A5b722803",
"id": 2297
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2298
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2299
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xa3f020a5C92e15be13CAF0Ee5C95cF79585EeCC9",
"id": 2299
},
{
"coin": 60,
"type": "token",
"token_id": "0x72dD4b6bd852A3AA172Be4d6C5a6dbEc588cf131",
"id": 2305
},
{
"coin": 60,
"type": "token",
"token_id": "0x558EC3152e2eb2174905cd19AeA4e34A23DE9aD6",
"id": 2306
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2307
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2310
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2311
},
{
"coin": 60,
"type": "token",
"token_id": "0x68d57c9a1C35f63E2c83eE8e49A64e9d70528D25",
"id": 2313
},
{
"coin": 60,
"type": "token",
"token_id": "0x0Ebb614204E47c09B6C3FeB9AAeCad8EE060E23E",
"id": 2314
},
{
"coin": 172,
"type": "coin",
"id": 2315
},
{
"coin": 60,
"type": "token",
"token_id": "0xA823E6722006afe99E91c30FF5295052fe6b8E32",
"id": 2318
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2320
},
{
"coin": 348,
"type": "coin",
"id": 2321
},
{
"coin": 60,
"type": "token",
"token_id": "0xE477292f1B3268687A29376116B0ED27A9c76170",
"id": 2323
},
{
"coin": 60,
"type": "token",
"token_id": "0x0396340f16Bbec973280AB053efc3f208fA37795",
"id": 2324
},
{
"coin": 60,
"type": "token",
"token_id": "0x0AF44e2784637218dD1D32A322D44e603A8f0c6A",
"id": 2325
},
{
"coin": 60,
"type": "token",
"token_id": "0x7703C35CfFdC5CDa8D27aa3df2F9ba6964544b6e",
"id": 2330
},
{
"coin": 187,
"type": "coin",
"id": 2332
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2334
},
{
"coin": 998,
"type": "coin",
"id": 2335
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2336
},
{
"coin": 60,
"type": "token",
"token_id": "0xc27A2F05fa577a83BA0fDb4c38443c0718356501",
"id": 2337
},
{
"coin": 60,
"type": "token",
"token_id": "0x107c4504cd79C5d2696Ea0030a8dD4e92601B82e",
"id": 2340
},
{
"coin": 60,
"type": "token",
"token_id": "0x0bb217E40F8a5Cb79Adf04E1aAb60E5abd0dfC1e",
"id": 2341
},
{
"coin": 60,
"type": "token",
"token_id": "0xE2FB6529EF566a080e6d23dE0bd351311087D567",
"id": 2342
},
{
"coin": 714,
"type": "token",
"token_id": "CAN-677",
"id": 2343
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2344
},
{
"coin": 269,
"type": "coin",
"id": 2345
},
{
"coin": 99999,
"type": "coin",
"id": 2346
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2348
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2349
},
{
"coin": 60,
"type": "token",
"token_id": "0xe81D72D14B1516e68ac3190a46C93302Cc8eD60f",
"id": 2352
},
{
"coin": 60,
"type": "token",
"token_id": "0x8a854288a5976036A725879164Ca3e91d30c6A1B",
"id": 2354
},
{
"coin": 60,
"type": "token",
"token_id": "0x584B44853680ee34a0F337B712a8f66d816dF151",
"id": 2357
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2358
},
{
"coin": 1997,
"type": "coin",
"id": 2359
},
{
"coin": 60,
"type": "token",
"token_id": "0x6781a0F84c7E9e846DCb84A9a5bd49333067b104",
"id": 2363
},
{
"coin": 60,
"type": "token",
"token_id": "0x4824A7b64E3966B0133f4f4FFB1b9D6bEb75FFF7",
"id": 2364
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xCA0a9Df6a8cAD800046C1DDc5755810718b65C44",
"id": 2364
},
{
"coin": 60,
"type": "token",
"token_id": "0x9B20DaBcec77f6289113E61893F7BEeFAEB1990a",
"id": 2366
},
{
"coin": 60,
"type": "token",
"token_id": "0x1063ce524265d5a3A624f4914acd573dD89ce988",
"id": 2367
},
{
"coin": 453,
"type": "coin",
"id": 2369
},
{
"coin": 9999,
"type": "coin",
"id": 2370
},
{
"coin": 60,
"type": "token",
"token_id": "0x16f812Be7FfF02cAF662B85d5d58a5da6572D4Df",
"id": 2371
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2372
},
{
"coin": 60,
"type": "token",
"token_id": "0x1961B3331969eD52770751fC718ef530838b6dEE",
"id": 2374
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2375
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2376
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2377
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2379
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2380
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2381
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2382
},
{
"coin": 315,
"type": "coin",
"id": 2383
},
{
"coin": 185,
"type": "coin",
"id": 2387
},
{
"coin": 60,
"type": "token",
"token_id": "0xa6a840E50bCaa50dA017b91A0D86B8b2d41156EE",
"id": 2391
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2394
},
{
"coin": 60,
"type": "token",
"token_id": "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"id": 2396
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2398
},
{
"coin": 60,
"type": "token",
"token_id": "0xFF603F43946A3A28DF5E6A73172555D8C8b02386",
"id": 2400
},
{
"coin": 314,
"type": "coin",
"id": 2403
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2405
},
{
"coin": 291,
"type": "coin",
"id": 2405
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2406
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2407
},
{
"coin": 60,
"type": "token",
"token_id": "0x57838fF342f36A1EC18224981ea8715a4667fB3a",
"id": 2408
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2413
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2414
},
{
"coin": 60,
"type": "token",
"token_id": "0x1245ef80F4d9e02ED9425375e8F649B9221b31D8",
"id": 2415
},
{
"coin": 500,
"type": "coin",
"id": 2416
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2418
},
{
"coin": 122,
"type": "coin",
"id": 2419
},
{
"coin": 60,
"type": "token",
"token_id": "0x64CdF819d3E75Ac8eC217B3496d7cE167Be42e80",
"id": 2421
},
{
"coin": 60,
"type": "token",
"token_id": "0xCc13Fc627EFfd6E35D2D2706Ea3C4D7396c610ea",
"id": 2422
},
{
"coin": 60,
"type": "token",
"token_id": "0x8eB24319393716668D768dCEC29356ae9CfFe285",
"id": 2424
},
{
"coin": 60,
"type": "token",
"token_id": "0x2567c677473d110D75a8360C35309e63B1d52429",
"id": 2426
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2428
},
{
"coin": 60,
"type": "token",
"token_id": "0x9AF839687F6C94542ac5ece2e317dAAE355493A1",
"id": 2430
},
{
"coin": 60,
"type": "token",
"token_id": "0x8f136Cc8bEf1fEA4A7b71aa2301ff1A52F084384",
"id": 2432
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2434
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2436
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2437
},
{
"coin": 60,
"type": "token",
"token_id": "0xe75ad3aAB14E4B0dF8c5da4286608DaBb21Bd864",
"id": 2438
},
{
"coin": 668,
"type": "coin",
"id": 2439
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2440
},
{
"coin": 60,
"type": "token",
"token_id": "0x653430560bE843C4a3D143d0110e896c2Ab8ac0D",
"id": 2441
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2444
},
{
"coin": 60,
"type": "token",
"token_id": "0x69b148395Ce0015C13e36BFfBAd63f49EF874E03",
"id": 2446
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2447
},
{
"coin": 60,
"type": "token",
"token_id": "0xdA6cb58A0D0C01610a29c5A65c303e13e885887C",
"id": 2450
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2452
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2453
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2454
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2455
},
{
"coin": 714,
"type": "token",
"token_id": "TRUE-<PASSWORD>",
"id": 2457
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2458
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2459
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2461
},
{
"coin": 60,
"type": "token",
"token_id": "0x37E8789bB9996CaC9156cD5F5Fd32599E6b91289",
"id": 2462
},
{
"coin": 60,
"type": "token",
"token_id": "0x923108a439C4e8C2315c4f6521E5cE95B44e9B4c",
"id": 2464
},
{
"coin": 60,
"type": "token",
"token_id": "0x327682779bAB2BF4d1337e8974ab9dE8275A7Ca8",
"id": 2465
},
{
"coin": 60,
"type": "token",
"token_id": "0xC39E626A04C5971D770e319760D7926502975e47",
"id": 2466
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2467
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2468
},
{
"coin": 313,
"type": "coin",
"id": 2469
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2470
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2472
},
{
"coin": 60,
"type": "token",
"token_id": "0x2d0E95bd4795D7aCe0da3C0Ff7b706a5970eb9D3",
"id": 2473
},
{
"coin": 318,
"type": "coin",
"id": 2474
},
{
"coin": 60,
"type": "token",
"token_id": "0xf278c1CA969095ffddDED020290cf8B5C424AcE2",
"id": 2476
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2477
},
{
"coin": 60,
"type": "token",
"token_id": "0x3136eF851592aCf49CA4C825131E364170FA32b3",
"id": 2478
},
{
"coin": 714,
"type": "token",
"token_id": "EQL-586",
"id": 2479
},
{
"coin": 60,
"type": "token",
"token_id": "0x58c69ed6cd6887c0225D1FcCEcC055127843c69b",
"id": 2480
},
{
"coin": 714,
"type": "token",
"token_id": "CPC-FED",
"id": 2482
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2484
},
{
"coin": 60,
"type": "token",
"token_id": "0x8f7b0B40E27E357540F90f187d90CE06366aC5A5",
"id": 2488
},
{
"coin": 777,
"type": "coin",
"id": 2489
},
{
"coin": 60,
"type": "token",
"token_id": "0xb6EE9668771a79be7967ee29a63D4184F8097143",
"id": 2490
},
{
"coin": 2305,
"type": "coin",
"id": 2492
},
{
"coin": 60,
"type": "token",
"token_id": "0xaE73B38d1c9A8b274127ec30160a4927C4d71824",
"id": 2493
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2495
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2496
},
{
"coin": 60,
"type": "token",
"token_id": "0x41dBECc1cdC5517C6f76f6a6E836aDbEe2754DE3",
"id": 2497
},
{
"coin": 60,
"type": "token",
"token_id": "0xa5Fd1A791C4dfcaacC963D4F73c6Ae5824149eA7",
"id": 2498
},
{
"coin": 60,
"type": "token",
"token_id": "0xba9d4199faB4f26eFE3551D490E3821486f135Ba",
"id": 2499
},
{
"coin": 60,
"type": "token",
"token_id": "0xfd8971d5E8E1740cE2d0A84095fCA4De729d0c16",
"id": 2500
},
{
"coin": 60,
"type": "token",
"token_id": "0x2baac9330Cf9aC479D819195794d79AD0c7616e3",
"id": 2501
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2502
},
{
"coin": 60,
"type": "token",
"token_id": "0x2ccbFF3A042c68716Ed2a2Cb0c544A9f1d1935E1",
"id": 2503
},
{
"coin": 60,
"type": "token",
"token_id": "0x24dDFf6D8B8a42d835af3b440De91f3386554Aa4",
"id": 2504
},
{
"coin": 60,
"type": "token",
"token_id": "0x5732046A883704404F284Ce41FfADd5b007FD668",
"id": 2505
},
{
"coin": 60,
"type": "token",
"token_id": "0x3505F494c3f0fed0B594E01Fa41Dd3967645ca39",
"id": 2506
},
{
"coin": 60,
"type": "token",
"token_id": "0xe8A1Df958bE379045E2B46a31A98B93A2eCDfDeD",
"id": 2509
},
{
"coin": 60,
"type": "token",
"token_id": "0x8dB54ca569D3019A2ba126D03C37c44b5eF81EF6",
"id": 2510
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2511
},
{
"coin": 60,
"type": "token",
"token_id": "0x92e52a1A235d9A103D970901066CE910AAceFD37",
"id": 2512
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2517
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2518
},
{
"coin": 154,
"type": "coin",
"id": 2521
},
{
"coin": 60,
"type": "token",
"token_id": "0x9e3319636e2126e3c0bc9e3134AEC5e1508A46c7",
"id": 2524
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2526
},
{
"coin": 60,
"type": "token",
"token_id": "0x5adc961D6AC3f7062D2eA45FEFB8D8167d44b190",
"id": 2528
},
{
"coin": 714,
"type": "token",
"token_id": "CAS-167",
"id": 2529
},
{
"coin": 714,
"type": "token",
"token_id": "FSN-E14",
"id": 2530
},
{
"coin": 60,
"type": "token",
"token_id": "0x6425c6BE902d692AE2db752B3c268AFAdb099D3b",
"id": 2533
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2535
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2536
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2537
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2538
},
{
"coin": 60,
"type": "token",
"token_id": "0x408e41876cCCDC0F92210600ef50372656052a38",
"id": 2539
},
{
"coin": 192,
"type": "coin",
"id": 2540
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c3a228510D246b78a3765C20221Cbf3082b44a4",
"id": 2541
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2544
},
{
"coin": 60,
"type": "token",
"token_id": "0xB98d4C97425d9908E66E53A6fDf673ACcA0BE986",
"id": 2545
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2546
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c743a35E903F6c584514ec617ACEe0611Cf44f3",
"id": 2547
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2548
},
{
"coin": 178,
"type": "coin",
"id": 2548
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2549
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2551
},
{
"coin": 60,
"type": "token",
"token_id": "0xEda8B016efA8b1161208Cf041cD86972eeE0F31E",
"id": 2552
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2553
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2554
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2556
},
{
"coin": 60,
"type": "token",
"token_id": "0x4D8fc1453a0F359e99c9675954e656D80d996FbF",
"id": 2557
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2559
},
{
"coin": 60,
"type": "token",
"token_id": "0xC741f06082AA47F93729070aD0dD95E223Bda091",
"id": 2562
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2564
},
{
"coin": 60,
"type": "token",
"token_id": "0x9a005c9a89BD72a4Bd27721E7a09A3c11D2b03C4",
"id": 2565
},
{
"coin": 1024,
"type": "coin",
"id": 2566
},
{
"coin": 60,
"type": "token",
"token_id": "0xaBbBB6447B68ffD6141DA77C18c7B5876eD6c5ab",
"id": 2567
},
{
"coin": 60,
"type": "token",
"token_id": "0x0D262e5dC4A06a0F1c90cE79C7a60C09DfC884E4",
"id": 2568
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2569
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2572
},
{
"coin": 60,
"type": "token",
"token_id": "0xD49ff13661451313cA1553fd6954BD1d9b6E02b9",
"id": 2573
},
{
"coin": 183,
"type": "coin",
"id": 2575
},
{
"coin": 60,
"type": "token",
"token_id": "0xDD16eC0F66E54d453e6756713E533355989040E4",
"id": 2576
},
{
"coin": 175,
"type": "coin",
"id": 2577
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2578
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2579
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2580
},
{
"coin": 60,
"type": "token",
"token_id": "0xfD107B473AB90e8Fbd89872144a3DC92C40Fa8C9",
"id": 2582
},
{
"coin": 60,
"type": "token",
"token_id": "0x151202C9c18e495656f372281F493EB7698961D5",
"id": 2584
},
{
"coin": 60,
"type": "token",
"token_id": "0x1122B6a0E00DCe0563082b6e2953f3A943855c1F",
"id": 2585
},
{
"coin": 60,
"type": "token",
"token_id": "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F",
"id": 2586
},
{
"coin": 60,
"type": "token",
"token_id": "0xC011a73ee8576Fb46F5E1c5751cA3B9Fe0af2a6F",
"id": 2586
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x9Ac983826058b8a9C7Aa1C9171441191232E8404",
"id": 2586
},
{
"coin": 60,
"type": "token",
"token_id": "0xA4e8C3Ec456107eA67d3075bF9e3DF3A75823DB0",
"id": 2588
},
{
"coin": 60,
"type": "token",
"token_id": "0x07a58629AAF3e1A0d07D8f43114B76BD5EEe3B91",
"id": 2589
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2591
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2592
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2593
},
{
"coin": 60,
"type": "token",
"token_id": "0xFFE02ee4C69eDf1b340fCaD64fbd6b37a7b9e265",
"id": 2595
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2597
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2601
},
{
"coin": 60,
"type": "token",
"token_id": "0x28b5E12CcE51f15594B0b91d5b5AdaA70F684a02",
"id": 2602
},
{
"coin": 60,
"type": "token",
"token_id": "0xA15C7Ebe1f07CaF6bFF097D8a589fb8AC49Ae5B3",
"id": 2603
},
{
"coin": 222,
"type": "coin",
"id": 2604
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2605
},
{
"coin": 5718350,
"type": "coin",
"id": 2606
},
{
"coin": 60,
"type": "token",
"token_id": "0xCA0e7269600d353F70b14Ad118A49575455C0f2f",
"id": 2607
},
{
"coin": 714,
"type": "token",
"token_id": "MITH-C76",
"id": 2608
},
{
"coin": 60,
"type": "token",
"token_id": "0x4de2573e27E648607B50e1Cfff921A33E4A34405",
"id": 2609
},
{
"coin": 60,
"type": "token",
"token_id": "0x0F02e27745e3b6e9e1310d19469e2b5D7B5eC99A",
"id": 2610
},
{
"coin": 60,
"type": "token",
"token_id": "0x6888a16eA9792c15A4DCF2f6C623D055c8eDe792",
"id": 2611
},
{
"coin": 60,
"type": "token",
"token_id": "0x1FE70bE734e473e5721ea57C8B5B01e6Caa52686",
"id": 2612
},
{
"coin": 60,
"type": "token",
"token_id": "0x28dee01D53FED0Edf5f6E310BF8Ef9311513Ae40",
"id": 2614
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2615
},
{
"coin": 60,
"type": "token",
"token_id": "0x001F0aA5dA15585e5b2305DbaB2bac425ea71007",
"id": 2617
},
{
"coin": 60,
"type": "token",
"token_id": "0x355a458d555151D3B27F94227960Ade1504E526a",
"id": 2618
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2619
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2624
},
{
"coin": 60,
"type": "token",
"token_id": "0x23352036E911A22Cfc692B5E2E196692658ADED9",
"id": 2626
},
{
"coin": 265,
"type": "coin",
"id": 2627
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2628
},
{
"coin": 60,
"type": "token",
"token_id": "0x9E46A38F5DaaBe8683E10793b06749EEF7D733d1",
"id": 2630
},
{
"coin": 60,
"type": "token",
"token_id": "0xbf52F2ab39e26E0951d2a02b49B7702aBe30406a",
"id": 2631
},
{
"coin": 384,
"type": "coin",
"id": 2633
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2634
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2635
},
{
"coin": 60,
"type": "token",
"token_id": "0xBe428c3867F05deA2A89Fc76a102b544eaC7f772",
"id": 2642
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2643
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2644
},
{
"coin": 60,
"type": "token",
"token_id": "0x3543638eD4a9006E4840B105944271Bcea15605D",
"id": 2645
},
{
"coin": 60,
"type": "token",
"token_id": "0xE69a353b3152Dd7b706ff7dD40fe1d18b7802d31",
"id": 2646
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2650
},
{
"coin": 60,
"type": "token",
"token_id": "0xc12d099be31567add4e4e4d0D45691C3F58f5663",
"id": 2653
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2654
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2656
},
{
"coin": 60,
"type": "token",
"token_id": "0x6710c63432A2De02954fc0f851db07146a6c0312",
"id": 2658
},
{
"coin": 714,
"type": "token",
"token_id": "ADI-6BB",
"id": 2660
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2661
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2664
},
{
"coin": 60,
"type": "token",
"token_id": "0xd559f20296FF4895da39b5bd9ADd54b442596a61",
"id": 2667
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2669
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2670
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2672
},
{
"coin": 25718,
"type": "coin",
"id": 2673
},
{
"coin": 413,
"type": "coin",
"id": 2674
},
{
"coin": 60,
"type": "token",
"token_id": "0xE5Dada80Aa6477e85d09747f2842f7993D0Df71C",
"id": 2675
},
{
"coin": 60,
"type": "token",
"token_id": "0x13C2fab6354d3790D8ece4f0f1a3280b4A25aD96",
"id": 2676
},
{
"coin": 60,
"type": "token",
"token_id": "0xe7D3e4413E29ae35B0893140F4500965c74365e5",
"id": 2678
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2679
},
{
"coin": 60,
"type": "token",
"token_id": "0xd2Fa8f92Ea72AbB35dBD6DECa57173d22db2BA49",
"id": 2681
},
{
"coin": 60,
"type": "token",
"token_id": "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2",
"id": 2682
},
{
"coin": 60,
"type": "token",
"token_id": "0x0947b0e6D821378805c9598291385CE7c791A6B2",
"id": 2686
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2687
},
{
"coin": 1919,
"type": "coin",
"id": 2688
},
{
"coin": 60,
"type": "token",
"token_id": "0xFc2C4D8f95002C14eD0a7aA65102Cac9e5953b5E",
"id": 2689
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2690
},
{
"coin": 60,
"type": "token",
"token_id": "0x53066cdDBc0099eb6c96785d9b3DF2AAeEDE5DA3",
"id": 2691
},
{
"coin": 714,
"type": "token",
"token_id": "NEXO-A84",
"id": 2694
},
{
"coin": 60,
"type": "token",
"token_id": "0xC343f099d3E41aA5C1b59470450e21E92E2d840b",
"id": 2695
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2696
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2698
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2699
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2700
},
{
"coin": 60,
"type": "token",
"token_id": "0x763186eB8d4856D536eD4478302971214FEbc6A9",
"id": 2703
},
{
"coin": 60,
"type": "token",
"token_id": "0xb0280743b44bF7db4B6bE482b2Ba7b75E5dA096C",
"id": 2704
},
{
"coin": 60,
"type": "token",
"token_id": "0x737F98AC8cA59f2C68aD658E3C3d8C8963E40a4c",
"id": 2705
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2707
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2708
},
{
"coin": 60,
"type": "token",
"token_id": "0x4a527d8fc13C5203AB24BA0944F4Cb14658D1Db6",
"id": 2709
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2711
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2712
},
{
"coin": 60,
"type": "token",
"token_id": "0x4Cd988AfBad37289BAAf53C13e98E2BD46aAEa8c",
"id": 2713
},
{
"coin": 561,
"type": "coin",
"id": 2714
},
{
"coin": 60,
"type": "token",
"token_id": "0x3abdfF32F76b42E7635bdb7e425f0231A5F3aB17",
"id": 2715
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2717
},
{
"coin": 60,
"type": "token",
"token_id": "0x61f33Da40594cEc1E3Dc900FaF99F861D01e2e7D",
"id": 2719
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2720
},
{
"coin": 60,
"type": "token",
"token_id": "0x1829aA045E21E0D59580024A951DB48096e01782",
"id": 2723
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2724
},
{
"coin": 60,
"type": "token",
"token_id": "0xd99b8A7fA48E25Cce83B81812220A3E03Bf64e5f",
"id": 2725
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2726
},
{
"coin": 60,
"type": "token",
"token_id": "0xE1Aee98495365fc179699C1bB3E761FA716beE62",
"id": 2727
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2728
},
{
"coin": 60,
"type": "token",
"token_id": "0x1c79ab32C66aCAa1e9E81952B8AAa581B43e54E7",
"id": 2729
},
{
"coin": 212,
"type": "coin",
"id": 2731
},
{
"coin": 60,
"type": "token",
"token_id": "0x1A0F2aB46EC630F9FD638029027b552aFA64b94c",
"id": 2732
},
{
"coin": 60,
"type": "token",
"token_id": "0x17e67d1CB4e349B9CA4Bc3e17C7DF2a397A7BB64",
"id": 2733
},
{
"coin": 60,
"type": "token",
"token_id": "0xf263292e14d9D8ECd55B58dAD1F1dF825a874b7c",
"id": 2734
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2735
},
{
"coin": 60,
"type": "token",
"token_id": "0x228ba514309FFDF03A81a205a6D040E429d6E80C",
"id": 2737
},
{
"coin": 60,
"type": "token",
"token_id": "0x4f3AfEC4E5a3F2A6a1A411DEF7D7dFe50eE057bF",
"id": 2739
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2741
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2742
},
{
"coin": 60,
"type": "token",
"token_id": "0x347C099f110Ca6761779329D2879957b606b6aCE",
"id": 2745
},
{
"coin": 60,
"type": "token",
"token_id": "0xF03045a4C8077e38f3B8e2Ed33b8aEE69edF869F",
"id": 2747
},
{
"coin": 714,
"type": "token",
"token_id": "LOKI-6A9",
"id": 2748
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2749
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2751
},
{
"coin": 60,
"type": "token",
"token_id": "0xc20464e0C373486d2B3335576e83a218b1618A5E",
"id": 2752
},
{
"coin": 60,
"type": "token",
"token_id": "0x4162178B78D6985480A308B2190EE5517460406D",
"id": 2753
},
{
"coin": 60,
"type": "token",
"token_id": "0x491C9A23DB85623EEd455a8EfDd6AbA9b911C5dF",
"id": 2754
},
{
"coin": 60,
"type": "token",
"token_id": "0x10bA8C420e912bF07BEdaC03Aa6908720db04e0c",
"id": 2755
},
{
"coin": 820,
"type": "coin",
"id": 2757
},
{
"coin": 60,
"type": "token",
"token_id": "0x8400D94A5cb0fa0D041a3788e395285d61c9ee5e",
"id": 2758
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2759
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2760
},
{
"coin": 60,
"type": "token",
"token_id": "0x9D86b1B2554ec410ecCFfBf111A6994910111340",
"id": 2762
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2763
},
{
"coin": 60,
"type": "token",
"token_id": "0x2859021eE7F2Cb10162E67F33Af2D22764B31aFf",
"id": 2764
},
{
"coin": 60,
"type": "token",
"token_id": "0x55296f69f40Ea6d20E478533C15A6B08B654E758",
"id": 2765
},
{
"coin": 60,
"type": "token",
"token_id": "0x88d50B466BE55222019D71F9E8fAe17f5f45FCA1",
"id": 2766
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2771
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2772
},
{
"coin": 2000,
"type": "coin",
"id": 2773
},
{
"coin": 60,
"type": "token",
"token_id": "0x1CCAA0F2a7210d76E1fDec740d5F323E2E1b1672",
"id": 2775
},
{
"coin": 714,
"type": "token",
"token_id": "AVA-645",
"id": 2776
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 2776
},
{
"coin": 304,
"type": "coin",
"id": 2777
},
{
"coin": 304,
"type": "coin",
"id": 2777
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 2777
},
{
"coin": 60,
"type": "token",
"token_id": "0x5dbe296F97B23C4A6AA6183D73e574D02bA5c719",
"id": 2779
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2780
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2825
},
{
"coin": 60,
"type": "token",
"token_id": "0xA9d2927d3a04309E008B6af6E2e282AE2952e7fD",
"id": 2826
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2828
},
{
"coin": 60,
"type": "token",
"token_id": "0xB1eeF147028E9f480DbC5ccaA3277D417D1b85F0",
"id": 2830
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2833
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2835
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2836
},
{
"coin": 60,
"type": "token",
"token_id": "0xB6eD7644C69416d67B522e20bC294A9a9B405B31",
"id": 2837
},
{
"coin": 60,
"type": "token",
"token_id": "0xEA26c4aC16D4a5A106820BC8AEE85fd0b7b2b664",
"id": 2840
},
{
"coin": 60,
"type": "token",
"token_id": "0xC80c5E40220172B36aDee2c951f26F2a577810C5",
"id": 2842
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2844
},
{
"coin": 60,
"type": "token",
"token_id": "0x0E8d6b471e332F140e7d9dbB99E5E3822F728DA6",
"id": 2847
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2848
},
{
"coin": 60,
"type": "token",
"token_id": "0xCDB7eCFd3403Eef3882c65B761ef9B5054890a47",
"id": 2849
},
{
"coin": 714,
"type": "token",
"token_id": "TM2-0C4",
"id": 2850
},
{
"coin": 60,
"type": "token",
"token_id": "0xAf8A215e81FAea7C180CE22b72483525121813BD",
"id": 2852
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2855
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2856
},
{
"coin": 60,
"type": "token",
"token_id": "0x75c5eE419331B6150879530D06f9Ba054755F1DA",
"id": 2857
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2858
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2859
},
{
"coin": 6060,
"type": "coin",
"id": 2861
},
{
"coin": 60,
"type": "token",
"token_id": "0x624d520BAB2E4aD83935Fa503fB130614374E850",
"id": 2862
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2863
},
{
"coin": 60,
"type": "token",
"token_id": "0xC86D054809623432210c107af2e3F619DcFbf652",
"id": 2866
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2867
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2868
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2869
},
{
"coin": 60,
"type": "token",
"token_id": "0x3c4bEa627039F0B7e7d21E34bB9C9FE962977518",
"id": 2871
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2872
},
{
"coin": 60,
"type": "token",
"token_id": "0xa3d58c4E56fedCae3a7c43A725aeE9A71F0ece4e",
"id": 2873
},
{
"coin": 60,
"type": "token",
"token_id": "0x9ab165D795019b6d8B3e971DdA91071421305e5a",
"id": 2874
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2878
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2879
},
{
"coin": 60,
"type": "token",
"token_id": "0x436F0F3a982074c4a05084485D421466a994FE53",
"id": 2880
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2882
},
{
"coin": 60,
"type": "token",
"token_id": "0x4AaC461C86aBfA71e9d00d9a2cde8d74E4E1aeEa",
"id": 2883
},
{
"coin": 60,
"type": "token",
"token_id": "0x1ed7AE1F0E2Fa4276DD7ddC786334a3dF81D50c0",
"id": 2884
},
{
"coin": 60,
"type": "token",
"token_id": "0x8e1b448EC7aDFc7Fa35FC2e885678bD323176E34",
"id": 2885
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2889
},
{
"coin": 60,
"type": "token",
"token_id": "0x8E5610ab5E39d26828167640EA29823fe1dD5843",
"id": 2890
},
{
"coin": 60,
"type": "token",
"token_id": "0x954b890704693af242613edEf1B603825afcD708",
"id": 2891
},
{
"coin": 60,
"type": "token",
"token_id": "0x6863bE0e7CF7ce860A574760e9020D519a8bDC47",
"id": 2893
},
{
"coin": 60,
"type": "token",
"token_id": "0xA86a0Da9D05d0771955DF05B44Ca120661aF16DE",
"id": 2894
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2895
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2896
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2898
},
{
"coin": 60,
"type": "token",
"token_id": "0x4f27053F32edA8Af84956437Bc00e5fFa7003287",
"id": 2899
},
{
"coin": 60,
"type": "token",
"token_id": "0x943ED852DadB5C3938ECdC6883718df8142DE4C8",
"id": 2901
},
{
"coin": 60,
"type": "token",
"token_id": "0xE3F4b4A5d91e5cB9435B947F090A319737036312",
"id": 2902
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2903
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2906
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2907
},
{
"coin": 60,
"type": "token",
"token_id": "0x2bBA3CF6DE6058cc1B4457Ce00deb359E2703d7F",
"id": 2908
},
{
"coin": 60,
"type": "token",
"token_id": "0xD9A12Cde03a86E800496469858De8581D3A5353d",
"id": 2910
},
{
"coin": 60,
"type": "token",
"token_id": "0xac2e58A06E6265F1Cf5084EE58da68e5d75b49CA",
"id": 2911
},
{
"coin": 410,
"type": "coin",
"id": 2912
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2913
},
{
"coin": 60,
"type": "token",
"token_id": "0x6A27348483D59150aE76eF4C0f3622A78B0cA698",
"id": 2914
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2915
},
{
"coin": 242,
"type": "coin",
"id": 2916
},
{
"coin": 60,
"type": "token",
"token_id": "0x4375E7aD8A01B8eC3Ed041399f62D9Cd120e0063",
"id": 2918
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2919
},
{
"coin": 60,
"type": "token",
"token_id": "0x83e2BE8d114F9661221384B3a50d24B96a5653F5",
"id": 2920
},
{
"coin": 60,
"type": "token",
"token_id": "0x64A60493D888728Cf42616e034a0dfEAe38EFCF0",
"id": 2921
},
{
"coin": 60,
"type": "token",
"token_id": "0x97AEB5066E1A590e868b511457BEb6FE99d329F5",
"id": 2922
},
{
"coin": 60,
"type": "token",
"token_id": "0x44449Fa4d607F807d1eD4a69ad942971728391C8",
"id": 2923
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2924
},
{
"coin": 60,
"type": "token",
"token_id": "0x1A66E09F7DccC10eAe46e27cfA6B8d44a50dF1E7",
"id": 2926
},
{
"coin": 60,
"type": "token",
"token_id": "0x57Ab1E02fEE23774580C119740129eAC7081e9D3",
"id": 2927
},
{
"coin": 60,
"type": "token",
"token_id": "0xF8e06E4e4A80287FDCa5b02dcCecAa9D0954840F",
"id": 2929
},
{
"coin": 60,
"type": "token",
"token_id": "0xF4FaEa455575354d2699BC209B0a65CA99F69982",
"id": 2932
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2933
},
{
"coin": 60,
"type": "token",
"token_id": "0x1410434b0346f5bE678d0FB554E5c7ab620f8f4a",
"id": 2934
},
{
"coin": 60,
"type": "token",
"token_id": "0xdfdc0D82d96F8fd40ca0CFB4A288955bECEc2088",
"id": 2936
},
{
"coin": 666666,
"type": "coin",
"id": 2937
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c64031C62061865E5FD0F53d3CDaeF80f72E99D",
"id": 2938
},
{
"coin": 688,
"type": "coin",
"id": 2941
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2943
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2944
},
{
"coin": 60,
"type": "token",
"token_id": "0x63f584FA56E60e4D0fE8802b27C7e6E3b33E007f",
"id": 2945
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2946
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2947
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2949
},
{
"coin": 60,
"type": "token",
"token_id": "0x60C24407d01782C2175D32fe7C8921ed732371D1",
"id": 2950
},
{
"coin": 60,
"type": "token",
"token_id": "0xce5114d7fa8361F0c088EE26FA3A5446C4a1f50b",
"id": 2953
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2954
},
{
"coin": 60,
"type": "token",
"token_id": "0xD1E10C37A27d95D95720291b1Dc6f12F74C71443",
"id": 2955
},
{
"coin": 60,
"type": "token",
"token_id": "0x9d9223436dDD466FC247e9dbbD20207e640fEf58",
"id": 2957
},
{
"coin": 1984,
"type": "coin",
"id": 2958
},
{
"coin": 60,
"type": "token",
"token_id": "0xe3278DF3eB2085bA9B6899812A99a10f9CA5E0Df",
"id": 2960
},
{
"coin": 60,
"type": "token",
"token_id": "0x4A42d2c580f83dcE404aCad18dab26Db11a1750E",
"id": 2961
},
{
"coin": 60,
"type": "token",
"token_id": "0xFBc3c8Aad80B5934D134e2CCE065702FF254AD7D",
"id": 2962
},
{
"coin": 60,
"type": "token",
"token_id": "0xF03f8D65BaFA598611C3495124093c56e8F638f0",
"id": 2963
},
{
"coin": 60,
"type": "token",
"token_id": "0xd2946be786F35c3Cc402C29b323647aBda799071",
"id": 2965
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2966
},
{
"coin": 60,
"type": "token",
"token_id": "0x81705082eF9f0D660f07BE80093D46d826d48b25",
"id": 2969
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2970
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2972
},
{
"coin": 382,
"type": "coin",
"id": 2976
},
{
"coin": 60,
"type": "token",
"token_id": "0x47da42696A866CDC61A4C809A515500a242909C1",
"id": 2977
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2978
},
{
"coin": 60,
"type": "token",
"token_id": "0x95C9bD1f81CEe7391dA3EaC81693E60F3292c1E0",
"id": 2979
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2980
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2981
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2982
},
{
"coin": 60,
"type": "token",
"token_id": "0xF8C595D070d104377f58715ce2E6C93E49a87f3c",
"id": 2986
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2987
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2989
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 2990
},
{
"coin": 400,
"type": "coin",
"id": 2991
},
{
"coin": 201,
"type": "coin",
"id": 2994
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3001
},
{
"coin": 60,
"type": "token",
"token_id": "0xFc44EC51C80e35A87Bc2140299B1636eC83DFb04",
"id": 3004
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3010
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3011
},
{
"coin": 818,
"type": "token",
"token_id": "0x0000000000000000000000000000456E65726779",
"id": 3012
},
{
"coin": 60,
"type": "token",
"token_id": "0x8D5682941cE456900b12d47ac06a88b47C764CE1",
"id": 3014
},
{
"coin": 60,
"type": "token",
"token_id": "0x4a6058666cf1057eaC3CD3A5a614620547559fc9",
"id": 3015
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3016
},
{
"coin": 60,
"type": "token",
"token_id": "0xA984A92731C088F1eA4D53b71A2565a399F7D8D5",
"id": 3021
},
{
"coin": 60,
"type": "token",
"token_id": "0x554FFc77F4251a9fB3c0E3590a6a205f8d4e067D",
"id": 3022
},
{
"coin": 7562605,
"type": "coin",
"id": 3023
},
{
"coin": 60,
"type": "token",
"token_id": "0x840fe75ABfaDc0F2d54037829571B2782e919ce4",
"id": 3027
},
{
"coin": 19167,
"type": "coin",
"id": 3029
},
{
"coin": 60,
"type": "token",
"token_id": "0x0223fc70574214F65813fE336D870Ac47E147fAe",
"id": 3040
},
{
"coin": 60,
"type": "token",
"token_id": "0x34364BEe11607b1963d66BCA665FDE93fCA666a8",
"id": 3053
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3054
},
{
"coin": 60,
"type": "token",
"token_id": "0x31f3D9D1BeCE0c033fF78fA6DA60a6048F3E13c5",
"id": 3055
},
{
"coin": 60,
"type": "token",
"token_id": "0x9972A0F24194447E73a7e8b6CD26a52e02DDfAD5",
"id": 3056
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3060
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3061
},
{
"coin": 445,
"type": "coin",
"id": 3063
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3065
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3066
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3067
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3069
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3070
},
{
"coin": 303,
"type": "coin",
"id": 3071
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3073
},
{
"coin": 818,
"type": "coin",
"id": 3077
},
{
"coin": 60,
"type": "token",
"token_id": "0x4618519de4C304F3444ffa7f812dddC2971cc688",
"id": 3078
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3079
},
{
"coin": 327,
"type": "coin",
"id": 3080
},
{
"coin": 60,
"type": "token",
"token_id": "0x171D750d42d661B62C277a6B486ADb82348c3Eca",
"id": 3081
},
{
"coin": 60,
"type": "token",
"token_id": "0xC05d14442A510De4D3d71a3d316585aA0CE32b50",
"id": 3083
},
{
"coin": 60,
"type": "token",
"token_id": "0xFA456Cf55250A839088b27EE32A424d7DAcB54Ff",
"id": 3084
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3085
},
{
"coin": 60,
"type": "token",
"token_id": "0xfF5c25D2F40B47C4a37f989DE933E26562Ef0Ac0",
"id": 3086
},
{
"coin": 60,
"type": "token",
"token_id": "0x66BaD545596fb17a0B4ebDC003a85dEF10E8F6Ae",
"id": 3090
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3091
},
{
"coin": 60,
"type": "token",
"token_id": "0x245ef47D4d0505ECF3Ac463F4d81f41ADE8f1fd1",
"id": 3092
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3095
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 3096
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3097
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3101
},
{
"coin": 60,
"type": "token",
"token_id": "0x29536B7Ca7029b5cDDEB03c0451715615AcA35ba",
"id": 3110
},
{
"coin": 60,
"type": "token",
"token_id": "0x8E766F57F7d16Ca50B4A0b90b88f6468A09b0439",
"id": 3115
},
{
"coin": 60,
"type": "token",
"token_id": "0x17Aa18A4B64A55aBEd7FA543F2Ba4E91f2dcE482",
"id": 3116
},
{
"coin": 60,
"type": "token",
"token_id": "0x851017523AE205adc9195e7F97D029f4Cfe7794c",
"id": 3117
},
{
"coin": 60,
"type": "token",
"token_id": "0x170b275CEd089FffAEBFe927F445a350ED9160DC",
"id": 3120
},
{
"coin": 60,
"type": "token",
"token_id": "0x8a88f04e0c905054D2F33b26BB3A46D7091A039A",
"id": 3121
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3123
},
{
"coin": 60,
"type": "token",
"token_id": "0x60c68a87bE1E8a84144b543AAcfA77199cd3d024",
"id": 3127
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3128
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3131
},
{
"coin": 1987,
"type": "coin",
"id": 3132
},
{
"coin": 277,
"type": "coin",
"id": 3133
},
{
"coin": 60,
"type": "token",
"token_id": "0xf4065e4477e91C177DED71A7A6fb5ee07DC46BC9",
"id": 3135
},
{
"coin": 714,
"type": "token",
"token_id": "MEETONE-031",
"id": 3136
},
{
"coin": 60,
"type": "token",
"token_id": "0x1fC52f1ABade452Dd4674477D4711951700b3d27",
"id": 3138
},
{
"coin": 60,
"type": "token",
"token_id": "0x973e52691176d36453868D9d86572788d27041A9",
"id": 3139
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3140
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3141
},
{
"coin": 60,
"type": "token",
"token_id": "0x3e65E1eeFdE5Ea7ccfC9a9a1634AbE90f32262f8",
"id": 3142
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3144
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3146
},
{
"coin": 1397,
"type": "coin",
"id": 3147
},
{
"coin": 60,
"type": "token",
"token_id": "0xFEF3884b603C33EF8eD4183346E093A173C94da6",
"id": 3148
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3149
},
{
"coin": 200625,
"type": "coin",
"id": 3151
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3152
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3153
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3154
},
{
"coin": 60,
"type": "token",
"token_id": "0x4a220E6096B25EADb88358cb44068A3248254675",
"id": 3155
},
{
"coin": 60,
"type": "token",
"token_id": "0xf8b358b3397a8ea5464f8cc753645d42e14b79EA",
"id": 3156
},
{
"coin": 428,
"type": "coin",
"id": 3158
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3161
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3162
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3164
},
{
"coin": 404,
"type": "coin",
"id": 3166
},
{
"coin": 60,
"type": "token",
"token_id": "0x2AEC18c5500f21359CE1BEA5Dc1777344dF4C0Dc",
"id": 3168
},
{
"coin": 60,
"type": "token",
"token_id": "0x6059F55751603eAd7Dc6d280ad83A7B33D837C90",
"id": 3169
},
{
"coin": 60,
"type": "token",
"token_id": "0xE2492F8D2A2618d8709Ca99b1d8d75713Bd84089",
"id": 3171
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3174
},
{
"coin": 60,
"type": "token",
"token_id": "0xD7b3669C7d3E38aB5a441383D41F25E003e02148",
"id": 3177
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3179
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3181
},
{
"coin": 60,
"type": "token",
"token_id": "0x7995ab36bB307Afa6A683C24a25d90Dc1Ea83566",
"id": 3182
},
{
"coin": 60,
"type": "token",
"token_id": "0x4CcC3759eB48fAF1c6cfadaD2619E7038db6b212",
"id": 3188
},
{
"coin": 60,
"type": "token",
"token_id": "0x05D412CE18F24040bB3Fa45CF2C69e506586D8e8",
"id": 3189
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3193
},
{
"coin": 60,
"type": "token",
"token_id": "0xE8663A64A96169ff4d95b4299E7ae9a76b905B31",
"id": 3194
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3196
},
{
"coin": 60,
"type": "token",
"token_id": "0x016396044709EB3edc69C44f4d5Fa6996917E4e8",
"id": 3198
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3205
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3207
},
{
"coin": 60,
"type": "token",
"token_id": "0x5AB793E36070F0fac928EA15826b0c1Bc5365119",
"id": 3208
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3209
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3216
},
{
"coin": 1024,
"type": "token",
"token_id": "ong",
"id": 3217
},
{
"coin": 9797,
"type": "coin",
"id": 3218
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3219
},
{
"coin": 60,
"type": "token",
"token_id": "0xd82Df0ABD3f51425Eb15ef7580fDA55727875f14",
"id": 3220
},
{
"coin": 60,
"type": "token",
"token_id": "0xEf51c9377FeB29856E61625cAf9390bD0B67eA18",
"id": 3222
},
{
"coin": 60,
"type": "token",
"token_id": "0x76974C7B79dC8a6a109Fd71fd7cEb9E40eff5382",
"id": 3223
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3224
},
{
"coin": 1000,
"type": "coin",
"id": 3225
},
{
"coin": 60,
"type": "token",
"token_id": "0xca694eb79eF355eA0999485d211E68F39aE98493",
"id": 3227
},
{
"coin": 60,
"type": "token",
"token_id": "0x5301Eae39a4cBa1CC2A74E861fDed062cA3E3420",
"id": 3229
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3231
},
{
"coin": 60,
"type": "token",
"token_id": "0xBAE235823D7255D9D48635cEd4735227244Cd583",
"id": 3232
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3234
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3238
},
{
"coin": 31102,
"type": "coin",
"id": 3240
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3241
},
{
"coin": 800,
"type": "coin",
"id": 3242
},
{
"coin": 60,
"type": "token",
"token_id": "0x13119E34E140097a507B07a5564bDe1bC375D9e6",
"id": 3243
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3247
},
{
"coin": 60,
"type": "token",
"token_id": "0x4289c043A12392F1027307fB58272D8EBd853912",
"id": 3248
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3249
},
{
"coin": 60,
"type": "token",
"token_id": "0x613Fa2A6e6DAA70c659060E86bA1443D2679c9D7",
"id": 3251
},
{
"coin": 60,
"type": "token",
"token_id": "0x9064c91e51d7021A85AD96817e1432aBf6624470",
"id": 3252
},
{
"coin": 60,
"type": "token",
"token_id": "0x78c292D1445E6b9558bf42e8BC369271DeD062eA",
"id": 3255
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3256
},
{
"coin": 60,
"type": "token",
"token_id": "0x4AC00f287f36A6Aad655281fE1cA6798C9cb727b",
"id": 3257
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3258
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3259
},
{
"coin": 60,
"type": "token",
"token_id": "0x38c87AA89B2B8cD9B95b736e1Fa7b612EA972169",
"id": 3260
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3261
},
{
"coin": 60,
"type": "token",
"token_id": "0x749f35Ff65932E68267dd82F6CD85eeA735d700E",
"id": 3262
},
{
"coin": 447,
"type": "coin",
"id": 3263
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3264
},
{
"coin": 60,
"type": "token",
"token_id": "0x7C2E5b7ec572199D3841f6a38F7D4868BD0798f1",
"id": 3265
},
{
"coin": 60,
"type": "token",
"token_id": "0x1a2277C83930b7a64C3e3D5544Eaa8C4f946B1B7",
"id": 3270
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3271
},
{
"coin": 60,
"type": "token",
"token_id": "0xd42debE4eDc92Bd5a3FBb4243e1ecCf6d63A4A5d",
"id": 3274
},
{
"coin": 60,
"type": "token",
"token_id": "0x36B4B58DE030E93775E151a78D796039a11a2548",
"id": 3277
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3279
},
{
"coin": 60,
"type": "token",
"token_id": "0xD7394087E1DBBE477FE4F1CF373B9Ac9459565fF",
"id": 3280
},
{
"coin": 60,
"type": "token",
"token_id": "0x2ba6b1E4424e19816382d15937739959F7DA5fD8",
"id": 3286
},
{
"coin": 60,
"type": "token",
"token_id": "0xD938137E6d96c72E4a6085412aDa2daD78ff89c4",
"id": 3287
},
{
"coin": 526,
"type": "coin",
"id": 3295
},
{
"coin": 60,
"type": "token",
"token_id": "0x5D64D850c8368008aFB39224E92aD0DcEFf3CF38",
"id": 3296
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3297
},
{
"coin": 60,
"type": "token",
"token_id": "0xaF1250fa68D7DECD34fD75dE8742Bc03B29BD58e",
"id": 3301
},
{
"coin": 60,
"type": "token",
"token_id": "0xAAf37055188Feee4869dE63464937e683d61b2a1",
"id": 3302
},
{
"coin": 60,
"type": "token",
"token_id": "0x97Cb5Cc1b2e10cC56DC16ab9179f06dfEDBe41A2",
"id": 3304
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3305
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3306
},
{
"coin": 60,
"type": "token",
"token_id": "0x780116D91E5592E58a3b3c76A351571b39abCEc6",
"id": 3314
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3315
},
{
"coin": 60,
"type": "token",
"token_id": "0x8578530205CEcbe5DB83F7F29EcfEEC860C297C2",
"id": 3316
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3317
},
{
"coin": 60,
"type": "token",
"token_id": "0x755eb14D2fefF2939EB3026f5CaD9D03775b9fF4",
"id": 3321
},
{
"coin": 714,
"type": "token",
"token_id": "PYN-C37",
"id": 3323
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3325
},
{
"coin": 60,
"type": "token",
"token_id": "0xe11609b9a51CAF7d32A55896386aC52ED90e66F1",
"id": 3328
},
{
"coin": 60,
"type": "token",
"token_id": "0xcec38306558a31cdbb2a9d6285947C5b44A24f3e",
"id": 3329
},
{
"coin": 60,
"type": "token",
"token_id": "0x8E870D67F660D95d5be530380D0eC0bd388289E1",
"id": 3330
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3331
},
{
"coin": 525,
"type": "coin",
"id": 3332
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3336
},
{
"coin": 60,
"type": "token",
"token_id": "0x687BfC3E73f6af55F0CccA8450114D107E781a0e",
"id": 3337
},
{
"coin": 60,
"type": "token",
"token_id": "0xC72ED4445B3fe9f0863106E344E241530d338906",
"id": 3340
},
{
"coin": 220,
"type": "coin",
"id": 3343
},
{
"coin": 60,
"type": "token",
"token_id": "0xb052F8A33D8bb068414EaDE06AF6955199f9f010",
"id": 3344
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3345
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3347
},
{
"coin": 2001,
"type": "coin",
"id": 3348
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3349
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3351
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3352
},
{
"coin": 60,
"type": "token",
"token_id": "0xaD5Fe5B0B8eC8fF4565204990E4405B2Da117d8e",
"id": 3354
},
{
"coin": 60,
"type": "token",
"token_id": "0x10086399DD8c1e3De736724AF52587a2044c9fA2",
"id": 3356
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3357
},
{
"coin": 60,
"type": "token",
"token_id": "0x567300e14f8d67e1F6720a95291Dce2511a86723",
"id": 3358
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3359
},
{
"coin": 60,
"type": "token",
"token_id": "0x92A5B04D0ED5D94D7a193d1d334D3D16996f4E13",
"id": 3360
},
{
"coin": 344,
"type": "coin",
"id": 3362
},
{
"coin": 60,
"type": "token",
"token_id": "0x0ee11500E7CfE6124e3716cD7755A0F0f2116244",
"id": 3363
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3367
},
{
"coin": 60,
"type": "token",
"token_id": "0xdf1338FbAfe7aF1789151627B886781ba556eF9a",
"id": 3369
},
{
"coin": 60,
"type": "token",
"token_id": "0x14C926F2290044B647e1Bf2072e67B495eff1905",
"id": 3373
},
{
"coin": 60,
"type": "token",
"token_id": "0x89c6c856a6db3e46107163D0cDa7A7FF211BD655",
"id": 3376
},
{
"coin": 60,
"type": "token",
"token_id": "0x7CC62d8E80Be9bEa3947F3443aD136f50f75b505",
"id": 3383
},
{
"coin": 228,
"type": "coin",
"id": 3386
},
{
"coin": 342,
"type": "coin",
"id": 3387
},
{
"coin": 60,
"type": "token",
"token_id": "0x2F141Ce366a2462f02cEA3D12CF93E4DCa49e4Fd",
"id": 3388
},
{
"coin": 60,
"type": "token",
"token_id": "0xd07D9Fe2d2cc067015E2b4917D24933804f42cFA",
"id": 3389
},
{
"coin": 60,
"type": "token",
"token_id": "0xa0d440C6DA37892Dc06Ee7930B2eedE0634FD681",
"id": 3393
},
{
"coin": 60,
"type": "token",
"token_id": "0x3918C42F14F2eB1168365F911f63E540E5A306b5",
"id": 3397
},
{
"coin": 60,
"type": "token",
"token_id": "0x81E74a3eA4BaB2277aA3b941E9D9F37B08Ac5374",
"id": 3402
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3404
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3406
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3408
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3408
},
{
"coin": 60,
"type": "token",
"token_id": "0x7528E3040376EdD5DB8263Db2F5bd1beD91467FB",
"id": 3412
},
{
"coin": 60,
"type": "token",
"token_id": "0x1966d718A565566e8E202792658D7b5Ff4ECe469",
"id": 3413
},
{
"coin": 60,
"type": "token",
"token_id": "0xe7E4279b80D319EDe2889855135A22021baf0907",
"id": 3414
},
{
"coin": 60,
"type": "token",
"token_id": "0xD45247c07379d94904E0A87b4481F0a1DDfa0C64",
"id": 3415
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3417
},
{
"coin": 916,
"type": "coin",
"id": 3418
},
{
"coin": 60,
"type": "token",
"token_id": "0x7C84e62859D0715eb77d1b1C4154Ecd6aBB21BEC",
"id": 3422
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3423
},
{
"coin": 60,
"type": "token",
"token_id": "0x72c9Fb7ED19D3ce51cea5C56B3e023cd918baaDf",
"id": 3427
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3429
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3431
},
{
"coin": 714,
"type": "token",
"token_id": "RPD-9E0",
"id": 3432
},
{
"coin": 60,
"type": "token",
"token_id": "0x0F612a09eAd55Bb81b6534e80ed5A21Bf0a27B16",
"id": 3433
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3435
},
{
"coin": 367,
"type": "coin",
"id": 3437
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3438
},
{
"coin": 301,
"type": "coin",
"id": 3441
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3446
},
{
"coin": 1620,
"type": "coin",
"id": 3447
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3448
},
{
"coin": 1313114,
"type": "coin",
"id": 3452
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3454
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3458
},
{
"coin": 60,
"type": "token",
"token_id": "0xC38f1fb49acDf2f1213CAf3319F6Eb3ea2cB7527",
"id": 3460
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3461
},
{
"coin": 60,
"type": "token",
"token_id": "0x419B8ED155180A8c9C64145e76DaD49c0A4Efb97",
"id": 3465
},
{
"coin": 60,
"type": "token",
"token_id": "0xd4a293aE8bB9E0BE12E99eB19d48239e8c83a136",
"id": 3466
},
{
"coin": 226,
"type": "coin",
"id": 3467
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3469
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3470
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3471
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3472
},
{
"coin": 60,
"type": "token",
"token_id": "0xcA2796F9F61dc7b238Aab043971e49c6164DF375",
"id": 3474
},
{
"coin": 60,
"type": "token",
"token_id": "0xe1A178B681BD05964d3e3Ed33AE731577d9d96dD",
"id": 3475
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3476
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3479
},
{
"coin": 424,
"type": "coin",
"id": 3482
},
{
"coin": 60,
"type": "token",
"token_id": "0x0ea984e789302B7B612147E4e4144e64f21425Eb",
"id": 3484
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3485
},
{
"coin": 60,
"type": "token",
"token_id": "0x49614661737EfBFC6a102efaeefDc8E197f7CC0e",
"id": 3489
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3491
},
{
"coin": 60,
"type": "token",
"token_id": "0x922aC473A3cC241fD3a0049Ed14536452D58D73c",
"id": 3492
},
{
"coin": 284,
"type": "coin",
"id": 3497
},
{
"coin": 60,
"type": "token",
"token_id": "0xca00bC15f67Ebea4b20DfaAa847CAcE113cc5501",
"id": 3498
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3499
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3501
},
{
"coin": 60,
"type": "token",
"token_id": "0xeaf61FC150CD5c3BeA75744e830D916E60EA5A9F",
"id": 3505
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3506
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3509
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3512
},
{
"coin": 714,
"type": "token",
"token_id": "FTM-A64",
"id": 3513
},
{
"coin": 715,
"type": "coin",
"id": 3514
},
{
"coin": 60,
"type": "token",
"token_id": "0x497bAEF294c11a5f0f5Bea3f2AdB3073DB448B56",
"id": 3515
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3517
},
{
"coin": 60,
"type": "token",
"token_id": "0x77C07555aF5ffdC946Fb47ce15EA68620E4e7170",
"id": 3519
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3580
},
{
"coin": 60,
"type": "token",
"token_id": "0x93ED3FBe21207Ec2E8f2d3c3de6e058Cb73Bc04d",
"id": 3581
},
{
"coin": 60,
"type": "token",
"token_id": "0x737fA0372c8D001904Ae6aCAf0552d4015F9c947",
"id": 3582
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3583
},
{
"coin": 60,
"type": "token",
"token_id": "0xa838be6E4b760E6061D4732D6B9F11Bf578f9A76",
"id": 3584
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3585
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3587
},
{
"coin": 60,
"type": "token",
"token_id": "0x340eF83Ec8560892168D4062720F030460468656",
"id": 3589
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3592
},
{
"coin": 60,
"type": "token",
"token_id": "0xFE76BE9cEC465ed3219a9972c21655D57d21aec6",
"id": 3595
},
{
"coin": 60,
"type": "token",
"token_id": "0xDAC4AE188AcE3C8985765eDc6C9B4739D4845DdC",
"id": 3597
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3598
},
{
"coin": 464,
"type": "coin",
"id": 3599
},
{
"coin": 60,
"type": "token",
"token_id": "0x174aFE7A032b5A33a3270a9f6C30746E25708532",
"id": 3600
},
{
"coin": 236,
"type": "coin",
"id": 3602
},
{
"coin": 60,
"type": "token",
"token_id": "0x4D807509aECe24C0fa5A102b6a3B059Ec6E14392",
"id": 3603
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3607
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3608
},
{
"coin": 60,
"type": "token",
"token_id": "0xED494c9e2F8E34e53BDD0EA9B4d80305cb15C5c2",
"id": 3609
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3610
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3612
},
{
"coin": 60,
"type": "token",
"token_id": "0x3E1d5A855aD9D948373aE68e4fe1f094612b1322",
"id": 3615
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3616
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3619
},
{
"coin": 714,
"type": "token",
"token_id": "ATP-38C",
"id": 3620
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3621
},
{
"coin": 229,
"type": "coin",
"id": 3622
},
{
"coin": 60,
"type": "token",
"token_id": "0xC28e931814725BbEB9e670676FaBBCb694Fe7DF2",
"id": 3625
},
{
"coin": 137,
"type": "coin",
"id": 3626
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3628
},
{
"coin": 60,
"type": "token",
"token_id": "0x4946Fcea7C692606e8908002e55A582af44AC121",
"id": 3631
},
{
"coin": 60,
"type": "token",
"token_id": "0x77599D2C6DB170224243e255e6669280F11F1473",
"id": 3632
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 3633
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3634
},
{
"coin": 60,
"type": "token",
"token_id": "0xA0b73E1Ff0B80914AB6fe0444E65848C4C34450b",
"id": 3635
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3636
},
{
"coin": 714,
"type": "token",
"token_id": "AERGO-46B",
"id": 3637
},
{
"coin": 60,
"type": "token",
"token_id": "0x70c621f949b6556c4545707a2d5d73A776b98359",
"id": 3638
},
{
"coin": 60,
"type": "token",
"token_id": "0x47e67BA66b0699500f18A53F94E2b9dB3D47437e",
"id": 3639
},
{
"coin": 60,
"type": "token",
"token_id": "0x58b6A8A3302369DAEc383334672404Ee733aB239",
"id": 3640
},
{
"coin": 60,
"type": "token",
"token_id": "0xd947b0ceab2A8885866B9A04A06AE99DE852a3d4",
"id": 3642
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3643
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3644
},
{
"coin": 60,
"type": "token",
"token_id": "0xd5f788ca0de8f17cBDe1D1E35aA8F005A87fa00b",
"id": 3645
},
{
"coin": 60,
"type": "token",
"token_id": "0x04A020325024F130988782bd5276e53595e8d16E",
"id": 3646
},
{
"coin": 60,
"type": "token",
"token_id": "0x722F2f3EaC7e9597C73a593f7CF3de33Fbfc3308",
"id": 3648
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3649
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3650
},
{
"coin": 714,
"type": "token",
"token_id": "LAMB-46C",
"id": 3657
},
{
"coin": 60,
"type": "token",
"token_id": "0x56325d180Ec3878A9028AfC7B0EDCEe7486Cc9df",
"id": 3658
},
{
"coin": 60,
"type": "token",
"token_id": "0x86E44543164D9b97B14ef7f6f3aB7bA670CAB346",
"id": 3659
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3662
},
{
"coin": 60,
"type": "token",
"token_id": "0x8b79656FC38a04044E495e22fAD747126ca305C4",
"id": 3664
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3666
},
{
"coin": 60,
"type": "token",
"token_id": "0xaD22f63404f7305e4713CcBd4F296f34770513f4",
"id": 3667
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3669
},
{
"coin": 60,
"type": "token",
"token_id": "0xacACa5b8805636608e14C64b0bFFfC2Deb2C6cEc",
"id": 3670
},
{
"coin": 385,
"type": "coin",
"id": 3672
},
{
"coin": 60,
"type": "token",
"token_id": "0xcca0c9c383076649604eE31b20248BC04FdF61cA",
"id": 3673
},
{
"coin": 714,
"type": "token",
"token_id": "RNO-14E",
"id": 3679
},
{
"coin": 60,
"type": "token",
"token_id": "0xD38dE88687172BDE440755b5237987e4A87c23A7",
"id": 3683
},
{
"coin": 60,
"type": "token",
"token_id": "0x5acD19b9c91e596b1f062f18e3D02da7eD8D1e50",
"id": 3685
},
{
"coin": 60,
"type": "token",
"token_id": "0x6400B5522f8D448C0803e6245436DD1c81dF09ce",
"id": 3686
},
{
"coin": 60,
"type": "token",
"token_id": "0x06e0feB0D74106c7adA8497754074D222Ec6BCDf",
"id": 3687
},
{
"coin": 60,
"type": "token",
"token_id": "0x0775C81A273B355e6a5b76e240BF708701F00279",
"id": 3690
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3691
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3692
},
{
"coin": 60,
"type": "token",
"token_id": "0x018d7D179350f1Bb9853D04982820E37ccE13a92",
"id": 3694
},
{
"coin": 60,
"type": "token",
"token_id": "0xE99A894a69d7c2e3C92E61B64C505A6a57d2bC07",
"id": 3695
},
{
"coin": 7825267,
"type": "coin",
"id": 3698
},
{
"coin": 1533,
"type": "coin",
"id": 3702
},
{
"coin": 360,
"type": "coin",
"id": 3704
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3707
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3712
},
{
"coin": 714,
"type": "token",
"token_id": "LTO-BDF",
"id": 3714
},
{
"coin": 60,
"type": "token",
"token_id": "0x3C6A7aB47B5F058Be0e7C7fE1A4b7925B8aCA40e",
"id": 3715
},
{
"coin": 488,
"type": "coin",
"id": 3716
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3717
},
{
"coin": 195,
"type": "token",
"token_id": "1002000",
"id": 3718
},
{
"coin": 60,
"type": "token",
"token_id": "0xA4Bdb11dc0a2bEC88d24A3aa1E6Bb17201112eBe",
"id": 3719
},
{
"coin": 60,
"type": "token",
"token_id": "0xa66Daa57432024023DB65477BA87D4E7F5f95213",
"id": 3721
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3722
},
{
"coin": 60,
"type": "token",
"token_id": "0x446C9033E7516D820cc9a2ce2d0B7328b579406F",
"id": 3724
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3727
},
{
"coin": 60,
"type": "token",
"token_id": "0xaff84e86d72EDb971341a6A66eb2dA209446FA14",
"id": 3730
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3731
},
{
"coin": 422,
"type": "coin",
"id": 3732
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3733
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3734
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3735
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3737
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3738
},
{
"coin": 60,
"type": "token",
"token_id": "0x4983F767b1Bc44328E434729dDabea0a064cA1aC",
"id": 3739
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3741
},
{
"coin": 60,
"type": "token",
"token_id": "0x07eF9E82721AC16809D24DAfBE1792Ce01654DB4",
"id": 3742
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3743
},
{
"coin": 60,
"type": "token",
"token_id": "0x15A664416E42766A6cC0a1221d9C088548a6E731",
"id": 3744
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3745
},
{
"coin": 60,
"type": "token",
"token_id": "0x796E47B85A0d759F300f1de96A3583004235D4D8",
"id": 3746
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3748
},
{
"coin": 398,
"type": "coin",
"id": 3750
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3753
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3754
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3755
},
{
"coin": 60,
"type": "token",
"token_id": "0x1d464Ac5e046e5fE280c9588eDF8eB681b07008F",
"id": 3757
},
{
"coin": 60,
"type": "token",
"token_id": "0x21D5A14e625d767Ce6b7A167491C2d18e0785fDa",
"id": 3759
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3760
},
{
"coin": 60,
"type": "token",
"token_id": "0x0F72714B35a366285Df85886A2eE174601292A17",
"id": 3762
},
{
"coin": 60,
"type": "token",
"token_id": "0x0a2D9370cF74Da3FD3dF5d764e394Ca8205C50B6",
"id": 3764
},
{
"coin": 60,
"type": "token",
"token_id": "0x2eC95B8edA549B79a1248335A39d299d00Ed314C",
"id": 3766
},
{
"coin": 60,
"type": "token",
"token_id": "0x1864cE27E9F7517047933CaAE530674e8C70b8A7",
"id": 3768
},
{
"coin": 60,
"type": "token",
"token_id": "0x6fE355c62C6faf6946cE888fFABa9fD12355ae27",
"id": 3769
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3770
},
{
"coin": 522,
"type": "coin",
"id": 3772
},
{
"coin": 60,
"type": "token",
"token_id": "0xaea46A60368A7bD060eec7DF8CBa43b7EF41Ad85",
"id": 3773
},
{
"coin": 60,
"type": "token",
"token_id": "0x9f0f1Be08591AB7d990faf910B38ed5D60e4D5Bf",
"id": 3774
},
{
"coin": 60,
"type": "token",
"token_id": "0x919D3a363776B1ceec9352610c82dfaf80Edc32d",
"id": 3776
},
{
"coin": 60,
"type": "token",
"token_id": "0x01cC4151fe5f00EfB8dF2F90ff833725d3a482a3",
"id": 3777
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c872500c00565505F3624AB435c222E558E9ff8",
"id": 3779
},
{
"coin": 60,
"type": "token",
"token_id": "0x4b7aD3a56810032782Afce12d7d27122bDb96efF",
"id": 3780
},
{
"coin": 60,
"type": "token",
"token_id": "0xB31C219959E06f9aFBeB36b388a4BaD13E802725",
"id": 3782
},
{
"coin": 714,
"type": "token",
"token_id": "ANKR-E97",
"id": 3783
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 3783
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3784
},
{
"coin": 60,
"type": "token",
"token_id": "0xa957045A12D270e2eE0dcA9A3340c340e05d4670",
"id": 3785
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3787
},
{
"coin": 60,
"type": "token",
"token_id": "0xF29226914595052a04F5AFbe6410D0C3eD707548",
"id": 3788
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3789
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3790
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3791
},
{
"coin": 60,
"type": "token",
"token_id": "0x30680AC0a8A993088223925265fD7a76bEb87E7F",
"id": 3792
},
{
"coin": 402,
"type": "coin",
"id": 3793
},
{
"coin": 118,
"type": "coin",
"id": 3794
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x0Eb3a705fc54725037CC9e008bDede697f62F335",
"id": 3794
},
{
"coin": 60,
"type": "token",
"token_id": "0xE5B826Ca2Ca02F09c1725e9bd98d9a8874C30532",
"id": 3795
},
{
"coin": 60,
"type": "token",
"token_id": "0x420167D87d35c3A249b32Ef6225872fBD9aB85D2",
"id": 3796
},
{
"coin": 60,
"type": "token",
"token_id": "0x57C09A8de0b0F471F8567609777aDdFfb5c46a08",
"id": 3797
},
{
"coin": 225,
"type": "coin",
"id": 3798
},
{
"coin": 60,
"type": "token",
"token_id": "0x1C1C14A6B5074905Ce5d367B0A7E098b58EbFD47",
"id": 3800
},
{
"coin": 60,
"type": "token",
"token_id": "0x26fb86579e371c7AEdc461b2DdEF0A8628c93d3B",
"id": 3801
},
{
"coin": 60,
"type": "token",
"token_id": "0xc15A399c4eA7815fE36857C9E290EE452A5D6B21",
"id": 3805
},
{
"coin": 60,
"type": "token",
"token_id": "0x9B39A0B97319a9bd5fed217c1dB7b030453bac91",
"id": 3806
},
{
"coin": 60,
"type": "token",
"token_id": "0x8A732BC91c33c167F868E0af7e6f31e0776d0f71",
"id": 3807
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3808
},
{
"coin": 60,
"type": "token",
"token_id": "0x0A913beaD80F321E7Ac35285Ee10d9d922659cB7",
"id": 3809
},
{
"coin": 60,
"type": "token",
"token_id": "0xa96F31F1C187c28980176C3A27ba7069f48abDE4",
"id": 3810
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3811
},
{
"coin": 60,
"type": "token",
"token_id": "0x4a57E687b9126435a9B19E4A802113e266AdeBde",
"id": 3812
},
{
"coin": 60,
"type": "token",
"token_id": "0x4946583c5b86E01cCD30c71a05617D06E3E73060",
"id": 3813
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3814
},
{
"coin": 714,
"type": "token",
"token_id": "XBASE-CD2",
"id": 3815
},
{
"coin": 60,
"type": "token",
"token_id": "0xdF1D6405df92d981a2fB3ce68F6A03baC6C0E41F",
"id": 3816
},
{
"coin": 60,
"type": "token",
"token_id": "0xA00425D3e2D3E9FF74F3e112B4D3A7978d7D88c2",
"id": 3818
},
{
"coin": 60,
"type": "token",
"token_id": "0xA0008F510fE9eE696E7E320C9e5cbf61E27791Ee",
"id": 3819
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3820
},
{
"coin": 500,
"type": "token",
"token_id": "tfuel",
"id": 3822
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3823
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3824
},
{
"coin": 60,
"type": "token",
"token_id": "0xdcD85914b8aE28c1E62f1C488E1D968D5aaFfE2b",
"id": 3826
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3828
},
{
"coin": 698,
"type": "coin",
"id": 3830
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3832
},
{
"coin": 60,
"type": "token",
"token_id": "0xd35833f9255FB28cC6b91aCB8A66Ba6429D6Ef5A",
"id": 3833
},
{
"coin": 60,
"type": "token",
"token_id": "0x4571f3a386d1bd18E25d70d117e7067FA0Bd9D08",
"id": 3834
},
{
"coin": 60,
"type": "token",
"token_id": "0xff56Cc6b1E6dEd347aA0B7676C85AB0B3D08B0FA",
"id": 3835
},
{
"coin": 60,
"type": "token",
"token_id": "0xA809d363A66c576A2a814CDBfEFC107C600A55f0",
"id": 3838
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3840
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3842
},
{
"coin": 60,
"type": "token",
"token_id": "0x1822126fEedb4C7d61EecdBE3682FE61e91383d6",
"id": 3844
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3845
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x3f515f0a8e93F2E2f891ceeB3Db4e62e202d7110",
"id": 3845
},
{
"coin": 60,
"type": "token",
"token_id": "0x1367D4a67C1719B58C7e05dF8768226Fa768279a",
"id": 3848
},
{
"coin": 60,
"type": "token",
"token_id": "0xF4FE95603881D0e07954fD7605E0e9a916e42C44",
"id": 3849
},
{
"coin": 60,
"type": "token",
"token_id": "0x5137A403Dd25e48DE528912a4aF62881e625D801",
"id": 3852
},
{
"coin": 714,
"type": "token",
"token_id": "MTV-4C6",
"id": 3853
},
{
"coin": 5555,
"type": "coin",
"id": 3854
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3856
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3858
},
{
"coin": 60,
"type": "token",
"token_id": "0x6F919D67967a97EA36195A2346d9244E60FE0dDB",
"id": 3860
},
{
"coin": 60,
"type": "token",
"token_id": "0x00E150D741Eda1d49d341189CAE4c08a73a49C95",
"id": 3861
},
{
"coin": 60,
"type": "token",
"token_id": "0x8716Fc5Da009D3A208f0178b637a50F4ef42400F",
"id": 3863
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3864
},
{
"coin": 60,
"type": "token",
"token_id": "0xd28cFec79dB8d0A225767D06140aee280718AB7E",
"id": 3865
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3866
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3867
},
{
"coin": 60,
"type": "token",
"token_id": "0xFFc63b9146967A1ba33066fB057EE3722221aCf0",
"id": 3869
},
{
"coin": 714,
"type": "token",
"token_id": "LIT-<PASSWORD>",
"id": 3870
},
{
"coin": 714,
"type": "token",
"token_id": "NEW-<PASSWORD>",
"id": 3871
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3872
},
{
"coin": 60,
"type": "token",
"token_id": "0xEF19F4E48830093Ce5bC8b3Ff7f903A0AE3E9Fa1",
"id": 3873
},
{
"coin": 60,
"type": "token",
"token_id": "0x297E4e5e59Ad72B1B0A2fd446929e76117be0E0a",
"id": 3875
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3876
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3881
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3883
},
{
"coin": 60,
"type": "token",
"token_id": "0x8c15Ef5b4B21951d50E53E4fbdA8298FFAD25057",
"id": 3884
},
{
"coin": 60,
"type": "token",
"token_id": "0x056dD20b01799E9C1952c7c9a5ff4409a6110085",
"id": 3885
},
{
"coin": 60,
"type": "token",
"token_id": "0x2d71983E810B9e95258966B9c164C4d61a829bA9",
"id": 3886
},
{
"coin": 60,
"type": "token",
"token_id": "0x19cA83a13b4C4BE43FA82c5E415E16f1D86f57F7",
"id": 3888
},
{
"coin": 60,
"type": "token",
"token_id": "0x90D46A9636B973f18186541d1B04ed3621a49Cb0",
"id": 3889
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3890
},
{
"coin": 714,
"type": "token",
"token_id": "NOW-<PASSWORD>",
"id": 3893
},
{
"coin": 60,
"type": "token",
"token_id": "0xc3e2de0b661cF58F66BdE8E896905399ded58af5",
"id": 3895
},
{
"coin": 60,
"type": "token",
"token_id": "0xCFAc2916Ec118a0252A7766C513eE7c71b384b5E",
"id": 3896
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3897
},
{
"coin": 4242,
"type": "coin",
"id": 3898
},
{
"coin": 60,
"type": "token",
"token_id": "0x56e0B2C7694E6e10391E870774daA45cf6583486",
"id": 3905
},
{
"coin": 230,
"type": "coin",
"id": 3907
},
{
"coin": 60,
"type": "token",
"token_id": "0x7AFeBBB46fDb47ed17b22ed075Cde2447694fB9e",
"id": 3911
},
{
"coin": 60,
"type": "token",
"token_id": "0xdD94842C15abfe4c9bAFE4222adE02896Beb064c",
"id": 3912
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3916
},
{
"coin": 60,
"type": "token",
"token_id": "0x7865af71cf0b288b4E7F654f4F7851EB46a2B7F8",
"id": 3917
},
{
"coin": 19165,
"type": "coin",
"id": 3918
},
{
"coin": 60,
"type": "token",
"token_id": "0x10c71515602429C19d53011EA7040B87a4894838",
"id": 3920
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c62Da804298D5972a323C80B539B8E7517a0dDe",
"id": 3923
},
{
"coin": 714,
"type": "token",
"token_id": "DREP-7D2",
"id": 3924
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3925
},
{
"coin": 60,
"type": "token",
"token_id": "0xd36E9F8F194A47B10aF16C7656a68EBa1DFe88e4",
"id": 3927
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3928
},
{
"coin": 60,
"type": "token",
"token_id": "0x9D8bE94D0612170cE533AC4d7B43cc3cd91E5a1A",
"id": 3929
},
{
"coin": 1001,
"type": "coin",
"id": 3930
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3931
},
{
"coin": 60,
"type": "token",
"token_id": "0x0F237D5eA7876E0e2906034D98FDB20D43666ad4",
"id": 3932
},
{
"coin": 335,
"type": "coin",
"id": 3933
},
{
"coin": 60,
"type": "token",
"token_id": "0x6c3BE406174349cfa4501654313d97e6a31072e1",
"id": 3934
},
{
"coin": 60,
"type": "token",
"token_id": "0x0488401c3F535193Fa8Df029d9fFe615A06E74E6",
"id": 3935
},
{
"coin": 60,
"type": "token",
"token_id": "0x247551F2EB3362E222c742E9c788B8957D9BC87e",
"id": 3936
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3937
},
{
"coin": 714,
"type": "token",
"token_id": "MZK-2C7",
"id": 3938
},
{
"coin": 60,
"type": "token",
"token_id": "0x9B1E1FC958B83e801d1342F9f9BA7dA3A55bA1eF",
"id": 3939
},
{
"coin": 60,
"type": "token",
"token_id": "0xcecede5A20645EAc6ca2032eeEb1063572D63c29",
"id": 3940
},
{
"coin": 60,
"type": "token",
"token_id": "0xE54B3458C47E44C37a267E7C633AFEF88287C294",
"id": 3944
},
{
"coin": 60,
"type": "token",
"token_id": "0x799a4202c12ca952cB311598a024C80eD371a41e",
"id": 3945
},
{
"coin": 1023,
"type": "coin",
"id": 3945
},
{
"coin": 60,
"type": "token",
"token_id": "0x115eC79F1de567eC68B7AE7eDA501b406626478e",
"id": 3946
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3947
},
{
"coin": 60,
"type": "token",
"token_id": "0xEe9E5eFF401ee921b138490d00CA8D1F13f67A72",
"id": 3949
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3953
},
{
"coin": 60,
"type": "token",
"token_id": "0xa960d2bA7000d58773E7fa5754DeC3Bb40A069D5",
"id": 3954
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3956
},
{
"coin": 60,
"type": "token",
"token_id": "0x2AF5D2aD76741191D15Dfe7bF6aC92d4Bd912Ca3",
"id": 3957
},
{
"coin": 60,
"type": "token",
"token_id": "0xAcCe88F5A63A5e65DB9AA7303720bE16b556E751",
"id": 3960
},
{
"coin": 714,
"type": "token",
"token_id": "VDX-A17",
"id": 3962
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3963
},
{
"coin": 60,
"type": "token",
"token_id": "0x8762db106B2c2A0bccB3A80d1Ed41273552616E8",
"id": 3964
},
{
"coin": 60,
"type": "token",
"token_id": "0xE02784175C3BE0DEa7CC0F284041b64503639E66",
"id": 3965
},
{
"coin": 5249354,
"type": "coin",
"id": 3966
},
{
"coin": 60,
"type": "token",
"token_id": "0xBA14b245d449965BdBeB630ebe135B569474F5b1",
"id": 3967
},
{
"coin": 60,
"type": "token",
"token_id": "0x6aB4A7d75B0A42B6Bc83E852daB9E121F9C610Aa",
"id": 3968
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3970
},
{
"coin": 357,
"type": "coin",
"id": 3973
},
{
"coin": 431,
"type": "coin",
"id": 3974
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3975
},
{
"coin": 426,
"type": "coin",
"id": 3976
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3977
},
{
"coin": 60,
"type": "token",
"token_id": "0x915044526758533dfB918ecEb6e44bc21632060D",
"id": 3978
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3980
},
{
"coin": 60,
"type": "token",
"token_id": "0x25200235cA7113C2541E70dE737c41f5e9AcD1F6",
"id": 3983
},
{
"coin": 60,
"type": "token",
"token_id": "0xB9843e5dE0f37d1e22C8075e5814e13565FE7C22",
"id": 3984
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3985
},
{
"coin": 522,
"type": "coin",
"id": 3986
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3988
},
{
"coin": 60,
"type": "token",
"token_id": "0xa8EdA9D4Aee0eb882F8752C6bA7e16d9233C9Ad2",
"id": 3989
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3990
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3991
},
{
"coin": 714,
"type": "token",
"token_id": "COTI-CBB",
"id": 3992
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3994
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3995
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 3996
},
{
"coin": 254,
"type": "coin",
"id": 3997
},
{
"coin": 60,
"type": "token",
"token_id": "0x838d8e11B160deC88Fe62BF0f743FB7000941e13",
"id": 3998
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4000
},
{
"coin": 60,
"type": "token",
"token_id": "0x3810A4Ddf41E586Fa0dbA1463A7951B748cEcFca",
"id": 4001
},
{
"coin": 60,
"type": "token",
"token_id": "0x182A603541a4483c308475147D621bbB4E2587c6",
"id": 4004
},
{
"coin": 60,
"type": "token",
"token_id": "0x3EB55D5B22Ee0f9B03D59B4994C5AE7fe811bE92",
"id": 4005
},
{
"coin": 60,
"type": "token",
"token_id": "0xDe7D85157d9714EADf595045CC12Ca4A5f3E2aDb",
"id": 4006
},
{
"coin": 60,
"type": "token",
"token_id": "0x1519AFf03b3E23722511D2576c769A77Baf09580",
"id": 4007
},
{
"coin": 294,
"type": "coin",
"id": 4008
},
{
"coin": 60,
"type": "token",
"token_id": "0x971d048E737619884f2df75e31c7Eb6412392328",
"id": 4010
},
{
"coin": 60,
"type": "token",
"token_id": "0x1B80eeeaDcC590f305945BCc258cFa770Bbe1890",
"id": 4011
},
{
"coin": 60,
"type": "token",
"token_id": "0xae353DaEed8DCc7a9a12027F7e070c0A50B7b6A4",
"id": 4012
},
{
"coin": 451,
"type": "coin",
"id": 4018
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4020
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4023
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 4023
},
{
"coin": 714,
"type": "token",
"token_id": "R<PASSWORD>",
"id": 4024
},
{
"coin": 339,
"type": "coin",
"id": 4027
},
{
"coin": 283,
"type": "coin",
"id": 4030
},
{
"coin": 60,
"type": "token",
"token_id": "0xa249DE6948022783765Fee4850d7b85E43118FCc",
"id": 4034
},
{
"coin": 714,
"type": "token",
"token_id": "HNST-3C9",
"id": 4035
},
{
"coin": 60,
"type": "token",
"token_id": "0x589891a198195061Cb8ad1a75357A3b7DbaDD7Bc",
"id": 4036
},
{
"coin": 714,
"type": "token",
"token_id": "COS-<PASSWORD>",
"id": 4036
},
{
"coin": 714,
"type": "token",
"token_id": "MBL-2D2",
"id": 4038
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4039
},
{
"coin": 60,
"type": "token",
"token_id": "0x11eeF04c884E24d9B7B4760e7476D06ddF797f36",
"id": 4041
},
{
"coin": 60,
"type": "token",
"token_id": "0x6E605c269E0C92e70BEeB85486f1fC550f9380BD",
"id": 4045
},
{
"coin": 60,
"type": "token",
"token_id": "0x09Ccd2DA5dCDd0510268d4979e792381337138b8",
"id": 4047
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4048
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4049
},
{
"coin": 60,
"type": "token",
"token_id": "0x1BeEF31946fbbb40B877a72E4ae04a8D1A5Cee06",
"id": 4051
},
{
"coin": 60,
"type": "token",
"token_id": "0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2",
"id": 4053
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 4054
},
{
"coin": 60,
"type": "token",
"token_id": "0xD46bA6D942050d489DBd938a2C909A5d5039A161",
"id": 4056
},
{
"coin": 60,
"type": "token",
"token_id": "0x95a41fB80ca70306e9Ecf4e51ceA31bD18379C18",
"id": 4057
},
{
"coin": 10000,
"type": "coin",
"id": 4058
},
{
"coin": 60,
"type": "token",
"token_id": "0x72955eCFf76E48F2C8AbCCe11d54e5734D6f3657",
"id": 4060
},
{
"coin": 60,
"type": "token",
"token_id": "0x186a33d4dBcd700086A26188DcB74E69bE463665",
"id": 4063
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4064
},
{
"coin": 714,
"type": "token",
"token_id": "CHZ-ECD",
"id": 4066
},
{
"coin": 60,
"type": "token",
"token_id": "0x6393E822874728f8Afa7e1C9944E417D37CA5878",
"id": 4067
},
{
"coin": 60,
"type": "token",
"token_id": "0x8515cD0f00aD81996d24b9A9C35121a3b759D6Cd",
"id": 4069
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4071
},
{
"coin": 714,
"type": "token",
"token_id": "SPIN-9DD",
"id": 4073
},
{
"coin": 60,
"type": "token",
"token_id": "0xB4272071eCAdd69d933AdcD19cA99fe80664fc08",
"id": 4075
},
{
"coin": 60,
"type": "token",
"token_id": "0xe0c6CE3e73029F201e5C0Bedb97F67572A93711C",
"id": 4076
},
{
"coin": 714,
"type": "token",
"token_id": "SLV-986",
"id": 4079
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4084
},
{
"coin": 714,
"type": "token",
"token_id": "ERD-D06",
"id": 4086
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4088
},
{
"coin": 714,
"type": "token",
"token_id": "BLINK-9C6",
"id": 4089
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4091
},
{
"coin": 714,
"type": "token",
"token_id": "DUSK-45E",
"id": 4092
},
{
"coin": 60,
"type": "token",
"token_id": "0xff8Be4B22CeDC440591dcB1E641EB2a0dd9d25A5",
"id": 4093
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4094
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4096
},
{
"coin": 424242,
"type": "coin",
"id": 4097
},
{
"coin": 60,
"type": "token",
"token_id": "0x2467AA6B5A2351416fD4C3DeF8462d841feeecEC",
"id": 4100
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4103
},
{
"coin": 60,
"type": "token",
"token_id": "0x187D1018E8ef879BE4194d6eD7590987463eAD85",
"id": 4104
},
{
"coin": 60,
"type": "token",
"token_id": "0x44E2ca91ceA1147f1B503e669f06CD11FB0C5490",
"id": 4105
},
{
"coin": 60,
"type": "token",
"token_id": "0x6020Da0F7c1857dBE4431Ec92A15cC318D933eAa",
"id": 4108
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4109
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4113
},
{
"coin": 60,
"type": "token",
"token_id": "0x34D6A0F5C2f5D0082141fE73d93B9dd00ca7CE11",
"id": 4114
},
{
"coin": 714,
"type": "token",
"token_id": "PVT-554",
"id": 4115
},
{
"coin": 60,
"type": "token",
"token_id": "0xd31695a1d35E489252CE57b129FD4b1B05E6AcaC",
"id": 4116
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4118
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4120
},
{
"coin": 489,
"type": "coin",
"id": 4122
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4124
},
{
"coin": 60,
"type": "token",
"token_id": "0x36151737B45017234E9570Cf9a1cAc97138953C2",
"id": 4125
},
{
"coin": 60,
"type": "token",
"token_id": "0xDB7Eab9bA6be88B869F738f6DEeBa96d49Fe13fd",
"id": 4128
},
{
"coin": 354,
"type": "coin",
"id": 4129
},
{
"coin": 354,
"type": "coin",
"id": 4129
},
{
"coin": 60,
"type": "token",
"token_id": "0x5EdC1a266E8b2c5E8086d373725dF0690af7e3Ea",
"id": 4133
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4134
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4138
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4139
},
{
"coin": 60,
"type": "token",
"token_id": "0x225927F8fa71d16EE07968B8746364D1d9F839bD",
"id": 4141
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4142
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4144
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4154
},
{
"coin": 388,
"type": "coin",
"id": 4156
},
{
"coin": 714,
"type": "token",
"token_id": "RUNE-B1A",
"id": 4157
},
{
"coin": 347,
"type": "coin",
"id": 4160
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4162
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4167
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4168
},
{
"coin": 330,
"type": "coin",
"id": 4172
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4173
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4175
},
{
"coin": 60,
"type": "token",
"token_id": "0x386cABc0b14A507A4e024DEA15554342865B20DE",
"id": 4176
},
{
"coin": 363,
"type": "coin",
"id": 4178
},
{
"coin": 714,
"type": "token",
"token_id": "GMAT-FC8",
"id": 4182
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4186
},
{
"coin": 60,
"type": "token",
"token_id": "0x2E2E0a28f6585e895DD646a363BAE29B77B88a31",
"id": 4187
},
{
"coin": 60,
"type": "token",
"token_id": "0x40AdFc7c23c22Cc06f94F199a4750D7196F46fbe",
"id": 4188
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4189
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4190
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4191
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4193
},
{
"coin": 60,
"type": "token",
"token_id": "0x5e3845A1d78DB544613EdbE43Dc1Ea497266d3b8",
"id": 4194
},
{
"coin": 714,
"type": "token",
"token_id": "FTT-F11",
"id": 4195
},
{
"coin": 714,
"type": "token",
"token_id": "PLG-D8D",
"id": 4196
},
{
"coin": 714,
"type": "token",
"token_id": "SHR-DB6",
"id": 4197
},
{
"coin": 60,
"type": "token",
"token_id": "0x03B155AF3F4459193A276395dD76e357BB472DA1",
"id": 4199
},
{
"coin": 239,
"type": "coin",
"id": 4200
},
{
"coin": 60,
"type": "token",
"token_id": "0xA829F97373069ee5d23175e4105dF8fD49238Be7",
"id": 4202
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4205
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 4206
},
{
"coin": 60,
"type": "token",
"token_id": "0x026e62dDEd1a6aD07D93D39f96b9eabd59665e0d",
"id": 4207
},
{
"coin": 60,
"type": "token",
"token_id": "0x07597255910a51509CA469568B048F2597E72504",
"id": 4213
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4214
},
{
"coin": 60,
"type": "token",
"token_id": "0x35b08722AA26bE119c1608029CcbC976ac5C1082",
"id": 4215
},
{
"coin": 60,
"type": "token",
"token_id": "0x746DdA2ea243400D5a63e0700F190aB79f06489e",
"id": 4217
},
{
"coin": 60,
"type": "token",
"token_id": "0xDe4C5a791913838027a2185709E98c5C6027EA63",
"id": 4220
},
{
"coin": 714,
"type": "token",
"token_id": "MC<PASSWORD>69",
"id": 4224
},
{
"coin": 60,
"type": "token",
"token_id": "0xE5CAeF4Af8780E59Df925470b050Fb23C43CA68C",
"id": 4228
},
{
"coin": 60,
"type": "token",
"token_id": "0xeBF4CA5319F406602EEFf68da16261f1216011B5",
"id": 4229
},
{
"coin": 60,
"type": "token",
"token_id": "0x4Eeea7B48b9C3ac8F70a9c932A8B1E8a5CB624c7",
"id": 4233
},
{
"coin": 714,
"type": "token",
"token_id": "EVT-49B",
"id": 4238
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4239
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4240
},
{
"coin": 60,
"type": "token",
"token_id": "0x307d45Afbb7E84F82ef3D251A6bb0F00Edf632E4",
"id": 4242
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4243
},
{
"coin": 60,
"type": "token",
"token_id": "0x6b4689E4514957699eDeB2Ee91C947F18E439806",
"id": 4250
},
{
"coin": 60,
"type": "token",
"token_id": "0x6CbEDEc4F1ac9D874987D2769596544E0d9161ab",
"id": 4251
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4252
},
{
"coin": 714,
"type": "token",
"token_id": "CBM-4B2",
"id": 4253
},
{
"coin": 60,
"type": "token",
"token_id": "0x6CE21e5f5383c95691d243879A86A6025E0870c0",
"id": 4254
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4255
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4257
},
{
"coin": 60,
"type": "token",
"token_id": "0xD536bBd5414A8C2beEd82a63737B9327D2FA35a6",
"id": 4260
},
{
"coin": 60,
"type": "token",
"token_id": "0x01C0987E88F778DF6640787226bc96354E1a9766",
"id": 4262
},
{
"coin": 19169,
"type": "coin",
"id": 4264
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4266
},
{
"coin": 60,
"type": "token",
"token_id": "0xE66747a101bFF2dBA3697199DCcE5b743b454759",
"id": 4269
},
{
"coin": 60,
"type": "token",
"token_id": "0xD91a6162F146EF85922d9A15eE6eB14A00344586",
"id": 4273
},
{
"coin": 60,
"type": "token",
"token_id": "0x0C6f5F7D555E7518f6841a79436BD2b1Eef03381",
"id": 4275
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4276
},
{
"coin": 60,
"type": "token",
"token_id": "0x8CE9137d39326AD0cD6491fb5CC0CbA0e089b6A9",
"id": 4279
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 4279
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4280
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4281
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4283
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4284
},
{
"coin": 60,
"type": "token",
"token_id": "0x6556D2EC4D96Da39CF75cbE50D58fae90079800a",
"id": 4285
},
{
"coin": 377,
"type": "coin",
"id": 4286
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4287
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4291
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4293
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4296
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4298
},
{
"coin": 60,
"type": "token",
"token_id": "0x0c963A1B52Eb97C5e457c7D76696F8b95c3087eD",
"id": 4299
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4300
},
{
"coin": 60,
"type": "token",
"token_id": "0x5E3002dff591C5e75Bb9DEdae268049742E6b13a",
"id": 4301
},
{
"coin": 60,
"type": "token",
"token_id": "0x26946adA5eCb57f3A1F91605050Ce45c482C9Eb1",
"id": 4306
},
{
"coin": 60,
"type": "token",
"token_id": "0x2730d6FdC86C95a74253BefFaA8306B40feDecbb",
"id": 4307
},
{
"coin": 370,
"type": "coin",
"id": 4315
},
{
"coin": 60,
"type": "token",
"token_id": "0x9388F54FA978Aa9e24395a8b69033304ECcea4df",
"id": 4325
},
{
"coin": 60,
"type": "token",
"token_id": "0xac4D22e40bf0B8eF4750a99ED4E935B99A42685E",
"id": 4359
},
{
"coin": 60,
"type": "token",
"token_id": "0x12683Dc9eEc95a5F742D40206e73319E6b9d8A91",
"id": 4360
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4361
},
{
"coin": 60,
"type": "token",
"token_id": "0x5d285F735998F36631F678FF41fb56A10A4d0429",
"id": 4366
},
{
"coin": 1901,
"type": "coin",
"id": 4384
},
{
"coin": 60,
"type": "token",
"token_id": "0xC87F95aA269DD300D9F1cE49d8E1FD8119A10456",
"id": 4427
},
{
"coin": 60,
"type": "token",
"token_id": "0x00fC270C9cc13e878Ab5363D00354bebF6f05C15",
"id": 4430
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4431
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4441
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4451
},
{
"coin": 60,
"type": "token",
"token_id": "0x593114f03A0A575aece9ED675e52Ed68D2172B8c",
"id": 4452
},
{
"coin": 714,
"type": "token",
"token_id": "ECO-083",
"id": 4466
},
{
"coin": 60,
"type": "token",
"token_id": "0x65cCD72c0813CE6f2703593B633202a0F3Ca6a0c",
"id": 4467
},
{
"coin": 60,
"type": "token",
"token_id": "0x8D8129963291740dDDd917ab01af18c7aed4BA58",
"id": 4469
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4490
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4493
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4494
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 4495
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4496
},
{
"coin": 60,
"type": "token",
"token_id": "0x60715E436c37444E29772c0D26a98Ae1E8E1A989",
"id": 4508
},
{
"coin": 438,
"type": "coin",
"id": 4512
},
{
"coin": 60,
"type": "token",
"token_id": "0xa0B207103F764A920b4AF9e691F5bd956DE14DED",
"id": 4518
},
{
"coin": 60,
"type": "token",
"token_id": "0x8E30ea2329D95802Fd804f4291220b0e2F579812",
"id": 4520
},
{
"coin": 60,
"type": "token",
"token_id": "0xa101E27f06A97985B925E244111b61560Ecd97DB",
"id": 4534
},
{
"coin": 497,
"type": "coin",
"id": 4542
},
{
"coin": 1926,
"type": "coin",
"id": 4545
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4563
},
{
"coin": 60,
"type": "token",
"token_id": "0xB9EefC4b0d472A44be93970254Df4f4016569d27",
"id": 4566
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4567
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4577
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4586
},
{
"coin": 60,
"type": "token",
"token_id": "0xA1248c718d52752b2cC257eeb0eBa900408dAeB8",
"id": 4588
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c5887E55bBe41472AcDBA5FAe989788C6f7ab59",
"id": 4595
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4620
},
{
"coin": 60,
"type": "token",
"token_id": "0xeb269732ab75A6fD61Ea60b06fE994cD32a83549",
"id": 4621
},
{
"coin": 60,
"type": "token",
"token_id": "0xb8c6ad2586bB71d518C2aaf510Efe91f82022F58",
"id": 4623
},
{
"coin": 517,
"type": "coin",
"id": 4630
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4633
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4644
},
{
"coin": 60,
"type": "token",
"token_id": "0x827D53c8170aF52625f414bde00326Fc8A085E86",
"id": 4646
},
{
"coin": 60,
"type": "token",
"token_id": "0x0C3eF32f802967DB75B9D49fE1e76620151cCB81",
"id": 4674
},
{
"coin": 60,
"type": "token",
"token_id": "0x7DE2d123042994737105802D2abD0A10a7BdE276",
"id": 4676
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4677
},
{
"coin": 60,
"type": "token",
"token_id": "0x301C755bA0fcA00B1923768Fffb3Df7f4E63aF31",
"id": 4678
},
{
"coin": 60,
"type": "token",
"token_id": "0xBA11D00c5f74255f56a5E366F4F77f5A186d7f55",
"id": 4679
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 4679
},
{
"coin": 60,
"type": "token",
"token_id": "0x2396FBC0e2E3AE4B7206EbDb5706e2a5920349CB",
"id": 4681
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4682
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4685
},
{
"coin": 714,
"type": "token",
"token_id": "BUSD-BD1",
"id": 4687
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56",
"id": 4687
},
{
"coin": 60,
"type": "token",
"token_id": "0x60c87297A1fEaDC3C25993FfcadC54e99971e307",
"id": 4693
},
{
"coin": 60,
"type": "token",
"token_id": "0xcD8544DefeDEc7c6b60b5a4232320365b1B21fCc",
"id": 4694
},
{
"coin": 60,
"type": "token",
"token_id": "0x12fD19DAC0Fab61bEd5e0F09091B470C452D4d61",
"id": 4695
},
{
"coin": 161,
"type": "coin",
"id": 4696
},
{
"coin": 60,
"type": "token",
"token_id": "0x3277dd536471a3cBEB0c9486aCad494C95A31E73",
"id": 4701
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4703
},
{
"coin": 60,
"type": "token",
"token_id": "0x45804880De22913dAFE09f4980848ECE6EcbAf78",
"id": 4705
},
{
"coin": 60,
"type": "token",
"token_id": "0x278a83B64C3e3E1139f8E8A52D96360cA3c69A3D",
"id": 4707
},
{
"coin": 60,
"type": "token",
"token_id": "0x93065b5C7Eb63333b8E57a73012D25f687895785",
"id": 4708
},
{
"coin": 60,
"type": "token",
"token_id": "0x8c4E7f814d40f8929F9112C5D09016F923d34472",
"id": 4709
},
{
"coin": 60,
"type": "token",
"token_id": "0x00059AE69c1622A7542EdC15E8d17b060fE307b6",
"id": 4712
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4714
},
{
"coin": 60,
"type": "token",
"token_id": "0xa00a4d5786a6E955e9539d01D78Bf68f3271C050",
"id": 4746
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4748
},
{
"coin": 60,
"type": "token",
"token_id": "0xF784682C82526e245F50975190EF0fff4E4fC077",
"id": 4754
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4757
},
{
"coin": 60,
"type": "token",
"token_id": "0x431ad2ff6a9C365805eBaD47Ee021148d6f7DBe0",
"id": 4758
},
{
"coin": 372,
"type": "coin",
"id": 4762
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4765
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4766
},
{
"coin": 2018,
"type": "coin",
"id": 4769
},
{
"coin": 60,
"type": "token",
"token_id": "0xc2058F5D9736E8df8Ba03ca3582B7CD6aC613658",
"id": 4771
},
{
"coin": 60,
"type": "token",
"token_id": "0xdA4129919F964a3A526D3182Bb03E6449e5a8872",
"id": 4774
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4775
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4777
},
{
"coin": 60,
"type": "token",
"token_id": "0xdF574c24545E5FfEcb9a659c229253D4111d87e1",
"id": 4779
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4785
},
{
"coin": 520,
"type": "coin",
"id": 4787
},
{
"coin": 714,
"type": "token",
"token_id": "V<PASSWORD>",
"id": 4792
},
{
"coin": 60,
"type": "token",
"token_id": "0x37F74e99794853777a10ea1dc08a64C86958F06a",
"id": 4793
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4794
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4797
},
{
"coin": 60,
"type": "token",
"token_id": "0xbDEC45952B5E234EdDC2981B43eeD360826D5087",
"id": 4800
},
{
"coin": 60,
"type": "token",
"token_id": "0xA31B1767e09f842ECFd4bc471Fe44F830E3891AA",
"id": 4804
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 4805
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4806
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4808
},
{
"coin": 60,
"type": "token",
"token_id": "0xb7e77aEbBe0687d2EfF24Cc90c41A3b6eA74bdAB",
"id": 4809
},
{
"coin": 60,
"type": "token",
"token_id": "0x86FADb80d8D2cff3C3680819E4da99C10232Ba0F",
"id": 4815
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4818
},
{
"coin": 60,
"type": "token",
"token_id": "0x9556f8ee795D991fF371F547162D5efB2769425F",
"id": 4819
},
{
"coin": 60,
"type": "token",
"token_id": "0x37D6E7F287200C740012747d2A79295cAeD2DB35",
"id": 4820
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4822
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4824
},
{
"coin": 60,
"type": "token",
"token_id": "0xe0b9BcD54bF8A730EA5d3f1fFCe0885E911a502c",
"id": 4826
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4831
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4835
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4841
},
{
"coin": 60,
"type": "token",
"token_id": "0x5e040aC72140F0617bC24aB7134c0C6eCae0e965",
"id": 4842
},
{
"coin": 459,
"type": "coin",
"id": 4846
},
{
"coin": 714,
"type": "token",
"token_id": "KAVA-10C",
"id": 4846
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x5F88AB06e8dfe89DF127B2430Bba4Af600866035",
"id": 4846
},
{
"coin": 5757,
"type": "coin",
"id": 4847
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4850
},
{
"coin": 60,
"type": "token",
"token_id": "0xeF1344bDf80BEf3Ff4428d8bECEC3eea4A2cF574",
"id": 4860
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4862
},
{
"coin": 195,
"type": "token",
"token_id": "1002413",
"id": 4867
},
{
"coin": 60,
"type": "token",
"token_id": "0xbACA8D824f471a6b20fdbac25E9e8943B9cD743B",
"id": 4868
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4870
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4871
},
{
"coin": 60,
"type": "token",
"token_id": "0xD27D76A1bA55ce5C0291CCd04feBBe793D22ebF4",
"id": 4872
},
{
"coin": 60,
"type": "token",
"token_id": "0xade7B5f4a421d81DDaD8Ce86f77A0EfE8921E9CC",
"id": 4885
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4901
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4902
},
{
"coin": 60,
"type": "token",
"token_id": "0x193408cA0576B73156Ed42A2EA7D6fD3f6507162",
"id": 4903
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4910
},
{
"coin": 60,
"type": "token",
"token_id": "0x41ad4093349C8A60DE591A3C37dcd184558EaAe3",
"id": 4914
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4917
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 4918
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4920
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4927
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4928
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4929
},
{
"coin": 60,
"type": "token",
"token_id": "0x5978708d6ccE1CC9640Eed47422D64c91BbD5171",
"id": 4936
},
{
"coin": 60,
"type": "token",
"token_id": "0xfE4455fd433Ed3CA025ec7c43cb8686eD89826CD",
"id": 4937
},
{
"coin": 60,
"type": "token",
"token_id": "0xF70d160102cF7a22c1E432d6928a9d625Db91170",
"id": 4940
},
{
"coin": 60,
"type": "token",
"token_id": "0x2AA4a3E8bB72BE68a31c9c3C98CA7BeC723C6222",
"id": 4941
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4942
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4943
},
{
"coin": 60,
"type": "token",
"token_id": "0x6B175474E89094C44Da98b954EedeAC495271d0F",
"id": 4943
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x1AF3F329e8BE154074D8769D1FFa4eE058B1DBc3",
"id": 4943
},
{
"coin": 60,
"type": "token",
"token_id": "0x0Ba45A8b5d5575935B8158a88C631E9F9C95a2e5",
"id": 4944
},
{
"coin": 60,
"type": "token",
"token_id": "0x3DB99ab08006aeFcC9600972eCA8C202396B4300",
"id": 4946
},
{
"coin": 309,
"type": "coin",
"id": 4948
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4949
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4950
},
{
"coin": 60,
"type": "token",
"token_id": "0xE1bAD922F84b198A08292FB600319300ae32471b",
"id": 4953
},
{
"coin": 60,
"type": "token",
"token_id": "0x49229C3902d49BE6443E01C0251b02780397ab1A",
"id": 4956
},
{
"coin": 491,
"type": "coin",
"id": 4957
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4958
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4975
},
{
"coin": 417,
"type": "coin",
"id": 4978
},
{
"coin": 195,
"type": "token",
"token_id": "1000001",
"id": 4980
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 4980
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4981
},
{
"coin": 60,
"type": "token",
"token_id": "0x3be6e7bF2cD8E1a0A95597E72ca6D3709bBeFF76",
"id": 4983
},
{
"coin": 60,
"type": "token",
"token_id": "0xB119Ce94D098C18fe380904c24e358bd887F00BE",
"id": 4984
},
{
"coin": 60,
"type": "token",
"token_id": "0xB8E2e2101eD11e9138803cd3e06e16dd19910647",
"id": 4985
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 4989
},
{
"coin": 714,
"type": "token",
"token_id": "XIO-B05",
"id": 4997
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5001
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5005
},
{
"coin": 714,
"type": "token",
"token_id": "TROY-9B8",
"id": 5007
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5008
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5009
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5010
},
{
"coin": 60,
"type": "token",
"token_id": "0x9d5686EaDeA7327F5a0c4820dcA90457A0E88763",
"id": 5011
},
{
"coin": 60,
"type": "token",
"token_id": "0x2b591e99afE9f32eAA6214f7B7629768c40Eeb39",
"id": 5015
},
{
"coin": 60,
"type": "token",
"token_id": "0xB67718b98d52318240c52E71A898335da4A28c42",
"id": 5016
},
{
"coin": 60,
"type": "token",
"token_id": "0xBbe761EA1447A20b75aA485b7BCad4837415d7D7",
"id": 5019
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5025
},
{
"coin": 60,
"type": "token",
"token_id": "0x4575f41308EC1483f3d399aa9a2826d74Da13Deb",
"id": 5026
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5033
},
{
"coin": 434,
"type": "coin",
"id": 5034
},
{
"coin": 60,
"type": "token",
"token_id": "0x4c14114C107D6374EC31981F5F6Cc27A13e22F9a",
"id": 5039
},
{
"coin": 60,
"type": "token",
"token_id": "0x4eE6E959d460dE47DfE58E5E6fBAB330Ce8484b6",
"id": 5045
},
{
"coin": 60,
"type": "token",
"token_id": "0x0E22734e078d6e399BCeE40a549DB591C4EA46cB",
"id": 5046
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5047
},
{
"coin": 60,
"type": "token",
"token_id": "0xB020eD54651831878E5C967e0953A900786178f9",
"id": 5050
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5052
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5054
},
{
"coin": 60,
"type": "token",
"token_id": "0x8F8e787989BC652eeA01A6C88a19f0f379BDF4FD",
"id": 5057
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5058
},
{
"coin": 60,
"type": "token",
"token_id": "0x786001c9c5CA6E502dEB8a8a72480d2147891f32",
"id": 5062
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5064
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5065
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5067
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5069
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5070
},
{
"coin": 60,
"type": "token",
"token_id": "0x6E5a43DB10b04701385A34afb670E404bC7Ea597",
"id": 5072
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5074
},
{
"coin": 60,
"type": "token",
"token_id": "0x73Cee8348b9bDd48c64E13452b8a6fbc81630573",
"id": 5075
},
{
"coin": 60,
"type": "token",
"token_id": "0x45F2aB0ca2116b2e1a70BF5e13293947b25d0272",
"id": 5076
},
{
"coin": 60,
"type": "token",
"token_id": "0xB4a677B0E363c3815d46326954a4E4d2B1ACe357",
"id": 5078
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5079
},
{
"coin": 60,
"type": "token",
"token_id": "0x13339fD07934CD674269726EdF3B5ccEE9DD93de",
"id": 5083
},
{
"coin": 60,
"type": "token",
"token_id": "0xaDA62f7CCd6af6cAcff04ACCBC4f56f3D4FFd4Ef",
"id": 5084
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5086
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5087
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5090
},
{
"coin": 60,
"type": "token",
"token_id": "0x063b98a414EAA1D4a5D4fC235a22db1427199024",
"id": 5093
},
{
"coin": 60,
"type": "token",
"token_id": "0x261638EC8ee8100484130EBD2fEBfDAdC0D8742a",
"id": 5094
},
{
"coin": 60,
"type": "token",
"token_id": "0x0e8e874bb30a5F254f5144EaAE4564C7F73fAbeD",
"id": 5097
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5102
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5105
},
{
"coin": 60,
"type": "token",
"token_id": "0xBdBB0Ee6144544eC814d417B0ad41f16fC8B858E",
"id": 5107
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5113
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5114
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5117
},
{
"coin": 60,
"type": "token",
"token_id": "0xa90C43e0d6c92b8e6171a829beB38Be28a0Ad073",
"id": 5124
},
{
"coin": 1027,
"type": "coin",
"id": 5125
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5126
},
{
"coin": 346,
"type": "coin",
"id": 5127
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5135
},
{
"coin": 356,
"type": "coin",
"id": 5137
},
{
"coin": 60,
"type": "token",
"token_id": "0xD0Bd12A8D5EBCA1E2FA46dA59F1993EC51C3d75c",
"id": 5154
},
{
"coin": 380,
"type": "coin",
"id": 5155
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5156
},
{
"coin": 714,
"type": "token",
"token_id": "BULL-<PASSWORD>",
"id": 5157
},
{
"coin": 714,
"type": "token",
"token_id": "BE<PASSWORD>4C",
"id": 5158
},
{
"coin": 714,
"type": "token",
"token_id": "WR<PASSWORD>",
"id": 5161
},
{
"coin": 60,
"type": "token",
"token_id": "0x79C5a1Ae586322A07BfB60be36E1b31CE8C84A1e",
"id": 5165
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5169
},
{
"coin": 60,
"type": "token",
"token_id": "0xc962ad021a69D457564e985738C719aE3f79B707",
"id": 5170
},
{
"coin": 520,
"type": "coin",
"id": 5175
},
{
"coin": 60,
"type": "token",
"token_id": "0x4922a015c4407F87432B179bb209e125432E4a2A",
"id": 5176
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5179
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5180
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5182
},
{
"coin": 60,
"type": "token",
"token_id": "0x7BD6a4E7DB3A34c485A8DD02b30B6565e3bbC633",
"id": 5185
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5186
},
{
"coin": 60,
"type": "token",
"token_id": "0x8A9C67fee641579dEbA04928c4BC45F66e26343A",
"id": 5187
},
{
"coin": 60,
"type": "token",
"token_id": "0x1fff4Dd33105054E853955C6d0dBa82859C01Cff",
"id": 5193
},
{
"coin": 476,
"type": "coin",
"id": 5200
},
{
"coin": 60,
"type": "token",
"token_id": "0xAFFCDd96531bCd66faED95FC61e443D08F79eFEf",
"id": 5203
},
{
"coin": 60,
"type": "token",
"token_id": "0x688fF43c3c19e4714f0BeB76df8Ee394207Ab411",
"id": 5204
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5208
},
{
"coin": 714,
"type": "token",
"token_id": "ETHBEAR-B2B",
"id": 5216
},
{
"coin": 714,
"type": "token",
"token_id": "ETHBULL-D33",
"id": 5217
},
{
"coin": 60,
"type": "token",
"token_id": "0x70da48f4B7e83c386ef983D4CEF4e58c2c09D8Ac",
"id": 5220
},
{
"coin": 5353,
"type": "coin",
"id": 5221
},
{
"coin": 60,
"type": "token",
"token_id": "0x02b1669bC9EE893edAFf3cADfD326A294d643f99",
"id": 5232
},
{
"coin": 515,
"type": "coin",
"id": 5234
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5243
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5247
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5251
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5253
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5255
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5258
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5260
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5262
},
{
"coin": 60,
"type": "token",
"token_id": "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643",
"id": 5263
},
{
"coin": 60,
"type": "token",
"token_id": "0xF5DCe57282A584D2746FaF1593d3121Fcac444dC",
"id": 5264
},
{
"coin": 60,
"type": "token",
"token_id": "0x39AA39c021dfbaE8faC545936693aC917d5E7563",
"id": 5265
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5267
},
{
"coin": 246,
"type": "coin",
"id": 5268
},
{
"coin": 485,
"type": "coin",
"id": 5270
},
{
"coin": 60,
"type": "token",
"token_id": "0x7B68D272EDa2185ea2F9283F241b1c44C51e712A",
"id": 5272
},
{
"coin": 523,
"type": "coin",
"id": 5274
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5276
},
{
"coin": 60,
"type": "token",
"token_id": "0x179E31FB25E433441a2839389A7b8EC9c4654b7B",
"id": 5277
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5280
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5285
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5288
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5289
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5292
},
{
"coin": 60,
"type": "token",
"token_id": "0x6FeBdFC0A9d9502C45343fCE0dF08828dEF44795",
"id": 5294
},
{
"coin": 60,
"type": "token",
"token_id": "0x9D1a62c2AD99019768b9126fdA004a9952853F6E",
"id": 5295
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5298
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5299
},
{
"coin": 60,
"type": "token",
"token_id": "0xae746520FfDB15d0505e32f1d6e9a2b4ab866572",
"id": 5304
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5310
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5313
},
{
"coin": 60,
"type": "token",
"token_id": "0x44A67C8570a61A28bAfd0035042f2F0A73a64428",
"id": 5322
},
{
"coin": 60,
"type": "token",
"token_id": "0x3cC5EB07E0e1227613F1DF58f38b549823d11cB9",
"id": 5323
},
{
"coin": 60,
"type": "token",
"token_id": "0x70debcDAB2Ef20bE3d1dBFf6a845E9cCb6E46930",
"id": 5325
},
{
"coin": 60,
"type": "token",
"token_id": "0xa982B2e19e90b2D9F7948e9C1b65D119F1CE88D6",
"id": 5328
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5330
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5332
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5334
},
{
"coin": 60,
"type": "token",
"token_id": "0xB1A30851E3f7d841b231B086479608e17198363A",
"id": 5336
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5339
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5347
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5350
},
{
"coin": 60,
"type": "token",
"token_id": "0x633eE3fbE5ffc05bD44Ecd8240732fF9ef9Dee1d",
"id": 5354
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5355
},
{
"coin": 60,
"type": "token",
"token_id": "0xF0e6019c0F16d31294937b3334229909349e00f4",
"id": 5358
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5363
},
{
"coin": 60,
"type": "token",
"token_id": "0xA8b0279aCc96efEBD09955AD8240B2d30B53055e",
"id": 5364
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5366
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5367
},
{
"coin": 60,
"type": "token",
"token_id": "0xab5c04BBE42667610a2Da07aC98ea9FA6e4a9514",
"id": 5373
},
{
"coin": 60,
"type": "token",
"token_id": "0xc21dBEE65D62770953035f0434C532d578a666c9",
"id": 5376
},
{
"coin": 60,
"type": "token",
"token_id": "0xc175E77b04F2341517334Ea3Ed0b198A01A97383",
"id": 5378
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5379
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5380
},
{
"coin": 60,
"type": "token",
"token_id": "0x2781246fe707bB15CeE3e5ea354e2154a2877B16",
"id": 5382
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5383
},
{
"coin": 60,
"type": "token",
"token_id": "0xbcC7026CdE024c6a97d2DF6e40613217d946279C",
"id": 5384
},
{
"coin": 60,
"type": "token",
"token_id": "0x19B58d95929586Ad750893CAad43E77aa6e8Ce9E",
"id": 5388
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5389
},
{
"coin": 60,
"type": "token",
"token_id": "0xB8e2E9dDdC504688cfe925b9acE8017D48d3e095",
"id": 5390
},
{
"coin": 60,
"type": "token",
"token_id": "0x7A5E6ca9d335e343D1Ed12239F67248E056AFE2f",
"id": 5391
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5394
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5397
},
{
"coin": 60,
"type": "token",
"token_id": "0x4dF76A9DaB9bb8310e4Ad3dc4336a8e26ed24EBB",
"id": 5398
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5399
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5401
},
{
"coin": 60,
"type": "token",
"token_id": "0x45Af324F53a8D7DA1752DAd74ADc1748126D7978",
"id": 5402
},
{
"coin": 202,
"type": "coin",
"id": 5403
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5407
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5409
},
{
"coin": 60,
"type": "token",
"token_id": "0xFE2786D7D1cCAb8B015f6Ef7392F67d778f8d8D7",
"id": 5410
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5411
},
{
"coin": 714,
"type": "token",
"token_id": "XRPBULL-E7C",
"id": 5412
},
{
"coin": 714,
"type": "token",
"token_id": "XRPBEAR-00B",
"id": 5413
},
{
"coin": 714,
"type": "token",
"token_id": "EOSBULL-F0D",
"id": 5414
},
{
"coin": 714,
"type": "token",
"token_id": "EOS<PASSWORD>",
"id": 5415
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5416
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5421
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5422
},
{
"coin": 60,
"type": "token",
"token_id": "0x3aFfCCa64c2A6f4e3B6Bd9c64CD2C969EFd1ECBe",
"id": 5423
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 5424
},
{
"coin": 60,
"type": "token",
"token_id": "0x27201232579491Ce9b116Ac6F37D354Cc723A2f3",
"id": 5425
},
{
"coin": 501,
"type": "coin",
"id": 5426
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5428
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5429
},
{
"coin": 60,
"type": "token",
"token_id": "0xe96F2c381E267a96C29bbB8ab05AB7d3527b45Ab",
"id": 5430
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5431
},
{
"coin": 1111,
"type": "coin",
"id": 5432
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5433
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5434
},
{
"coin": 60,
"type": "token",
"token_id": "0x7A8Ca2f815A260660158a38C34ca321A3605eCFE",
"id": 5437
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5441
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5444
},
{
"coin": 60,
"type": "token",
"token_id": "0x9cB1AEaFcc8A9406632C5B084246Ea72f62d37b6",
"id": 5445
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5446
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5453
},
{
"coin": 60,
"type": "token",
"token_id": "0xCe49c3c92b33a1653F34811a9d7e34502bF12B89",
"id": 5459
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5461
},
{
"coin": 60,
"type": "token",
"token_id": "0xDB61354E9cf2217a29770E9811832B360a8DAad3",
"id": 5462
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5463
},
{
"coin": 60,
"type": "token",
"token_id": "0xFD25676Fc2c4421778B18Ec7Ab86E7C5701DF187",
"id": 5465
},
{
"coin": 60,
"type": "token",
"token_id": "0xa9fC65Da36064cE545e87690e06f5de10C52C690",
"id": 5466
},
{
"coin": 60,
"type": "token",
"token_id": "0x42726d074BBa68Ccc15200442B72Afa2D495A783",
"id": 5468
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5471
},
{
"coin": 60,
"type": "token",
"token_id": "0xDaab5E695bb0E8Ce8384ee56BA38fA8290618e52",
"id": 5473
},
{
"coin": 531,
"type": "coin",
"id": 5475
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5478
},
{
"coin": 60,
"type": "token",
"token_id": "0x013A06558f07d9E6F9A00c95a33f3a0E0255176b",
"id": 5480
},
{
"coin": 60,
"type": "token",
"token_id": "0xF25c91C87e0B1fd9B4064Af0F427157AaB0193A7",
"id": 5481
},
{
"coin": 714,
"type": "token",
"token_id": "<KEY>",
"id": 5483
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5486
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5488
},
{
"coin": 60,
"type": "token",
"token_id": "0x80bD0cc689c206e3F642919244c4251c7Ef19852",
"id": 5489
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5509
},
{
"coin": 51,
"type": "coin",
"id": 5511
},
{
"coin": 60,
"type": "token",
"token_id": "0x58002A6B6E659A16dE9F02F529B10536E307b0d9",
"id": 5513
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5523
},
{
"coin": 60,
"type": "token",
"token_id": "0x08399ab5eBBE96870B289754A7bD21E7EC8c6FCb",
"id": 5527
},
{
"coin": 60,
"type": "token",
"token_id": "0x627e2Ee3dbDA546e168eaAFF25A2C5212E4A95a0",
"id": 5534
},
{
"coin": 509,
"type": "coin",
"id": 5541
},
{
"coin": 60,
"type": "token",
"token_id": "0xCaBeC58a571979f9fE825885fcb8F7A93892eaB0",
"id": 5546
},
{
"coin": 297,
"type": "coin",
"id": 5548
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5563
},
{
"coin": 60,
"type": "token",
"token_id": "0xcDd0A6B15B49A9eb3Ce011CCE22FAc2ccf09ecE6",
"id": 5564
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5566
},
{
"coin": 52752,
"type": "coin",
"id": 5567
},
{
"coin": 60,
"type": "token",
"token_id": "0x91E84EC6101547C1FA39Dd565dd8b020E3c20CF2",
"id": 5569
},
{
"coin": 60,
"type": "token",
"token_id": "0xCa208BfD69ae6D2667f1FCbE681BAe12767c0078",
"id": 5572
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5577
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5580
},
{
"coin": 60,
"type": "token",
"token_id": "0xE48972fCd82a274411c01834e2f031D4377Fa2c0",
"id": 5587
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5589
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5590
},
{
"coin": 60,
"type": "token",
"token_id": "0xC7e43A1c8E118aA2965F5EAbe0e718D83DB7A63C",
"id": 5594
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5597
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5600
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5601
},
{
"coin": 195,
"type": "token",
"token_id": "1002984",
"id": 5610
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5611
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5614
},
{
"coin": 60,
"type": "token",
"token_id": "0x310DA5e1E61cD9d6ECed092F085941089267E71E",
"id": 5615
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5616
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5617
},
{
"coin": 60,
"type": "token",
"token_id": "0x580c8520dEDA0a441522AEAe0f9F7A5f29629aFa",
"id": 5618
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5619
},
{
"coin": 60,
"type": "token",
"token_id": "0x412D397DDCa07D753E3E0C61e367fb1b474B3E7D",
"id": 5620
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5623
},
{
"coin": 60,
"type": "token",
"token_id": "0xA8b919680258d369114910511cc87595aec0be6D",
"id": 5625
},
{
"coin": 60,
"type": "token",
"token_id": "0x95E40E065AFB3059dcabe4aaf404c1F92756603a",
"id": 5626
},
{
"coin": 60,
"type": "token",
"token_id": "0x3F694635Ab69b1b5E7f001dE26892CC63ba94bad",
"id": 5628
},
{
"coin": 60,
"type": "token",
"token_id": "0x6Ec47a178A9d50d4ec4683003d8324f19Ca35382",
"id": 5629
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5631
},
{
"coin": 472,
"type": "coin",
"id": 5632
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5635
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5636
},
{
"coin": 60,
"type": "token",
"token_id": "0x8f179114235842978D8917e08721541072C46584",
"id": 5640
},
{
"coin": 60,
"type": "token",
"token_id": "0x675Ce995953136814cb05aaAA5d02327E7Dc8c93",
"id": 5644
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5648
},
{
"coin": 60,
"type": "token",
"token_id": "0xcBe79cEca09092648995B2CCdf91cA5ECD1EdEc9",
"id": 5652
},
{
"coin": 60,
"type": "token",
"token_id": "0x4a73E60ADBb8575500Ffc6AaEa6128954011C8Af",
"id": 5653
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5656
},
{
"coin": 60,
"type": "token",
"token_id": "0x4Ba6dDd7b89ed838FEd25d208D4f644106E34279",
"id": 5657
},
{
"coin": 60,
"type": "token",
"token_id": "0x10e1E953DDBa597011f8bFA806aB0cC3415a622b",
"id": 5658
},
{
"coin": 60,
"type": "token",
"token_id": "0x0E7f79E89BA8C4a13431129fB2db0d4f444B5B9A",
"id": 5659
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5660
},
{
"coin": 485,
"type": "coin",
"id": 5661
},
{
"coin": 60,
"type": "token",
"token_id": "0x045Eb7e34e94B28C7A3641BC5e1A1F61f225Af9F",
"id": 5663
},
{
"coin": 60,
"type": "token",
"token_id": "0x46b4a7d906F1A943b7744Df23625E63726d79035",
"id": 5666
},
{
"coin": 60,
"type": "token",
"token_id": "0xd2727e4259BBa31955FD582986334AEA4fBA8C52",
"id": 5671
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5673
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5674
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5677
},
{
"coin": 60,
"type": "token",
"token_id": "0x60571E95E12c78CbA5223042692908f0649435a5",
"id": 5679
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5688
},
{
"coin": 60,
"type": "token",
"token_id": "0x6De037ef9aD2725EB40118Bb1702EBb27e4Aeb24",
"id": 5690
},
{
"coin": 60,
"type": "token",
"token_id": "0xc00e94Cb662C3520282E6f5717214004A7f26888",
"id": 5692
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x52CE071Bd9b1C4B00A0b92D298c512478CaD67e8",
"id": 5692
},
{
"coin": 60,
"type": "token",
"token_id": "0x256845e721C0c46d54E6afBD4FA3B52CB72353EA",
"id": 5694
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5697
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5702
},
{
"coin": 60,
"type": "token",
"token_id": "0x4D6b9f281AF31916a0f16D1cEA2ec7384851EAAb",
"id": 5703
},
{
"coin": 60,
"type": "token",
"token_id": "0xD6F0Bb2A45110f819e908a915237D652Ac7c5AA8",
"id": 5704
},
{
"coin": 60,
"type": "token",
"token_id": "0x0000000000004946c0e9F43F4Dee607b0eF1fA1c",
"id": 5709
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5711
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5715
},
{
"coin": 60,
"type": "token",
"token_id": "0xF5238462E7235c7B62811567E63Dd17d12C2EAA0",
"id": 5719
},
{
"coin": 60,
"type": "token",
"token_id": "0xeF7A985E4FF9B5DcCD6eDdF58577486887288711",
"id": 5722
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5725
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5728
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5729
},
{
"coin": 60,
"type": "token",
"token_id": "0x5547136b913b68881596275ACe01e9A589c5b16B",
"id": 5732
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5733
},
{
"coin": 60,
"type": "token",
"token_id": "0x06A8F2BCc622ac55d596ea02CE5bb5f318F485e9",
"id": 5734
},
{
"coin": 60,
"type": "token",
"token_id": "0x7e5F9F248e84EF0B1f63586323e92a0d91B15568",
"id": 5736
},
{
"coin": 60,
"type": "token",
"token_id": "0x1148661869D30e095FF4AA48Aa8b5EadedC75f2A",
"id": 5737
},
{
"coin": 60,
"type": "token",
"token_id": "0xEF9c8a1b3cE9055266E1CE20b98a4c882F0e5c78",
"id": 5738
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5739
},
{
"coin": 60,
"type": "token",
"token_id": "0xEd91879919B71bB6905f23af0A68d231EcF87b14",
"id": 5741
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5747
},
{
"coin": 60,
"type": "token",
"token_id": "0xa3BeD4E1c75D00fa6f4E5E6922DB7261B5E9AcD2",
"id": 5748
},
{
"coin": 60,
"type": "token",
"token_id": "0xfE18be6b3Bd88A2D2A7f928d00292E7a9963CfC6",
"id": 5764
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5765
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5767
},
{
"coin": 60,
"type": "token",
"token_id": "0x36a2422a863D5B950882190Ff5433E513413343a",
"id": 5768
},
{
"coin": 60,
"type": "token",
"token_id": "0xe36E2D3c7c34281FA3bC737950a68571736880A1",
"id": 5769
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5775
},
{
"coin": 60,
"type": "token",
"token_id": "0xEB4C2781e4ebA804CE9a9803C67d0893436bB27D",
"id": 5777
},
{
"coin": 60,
"type": "token",
"token_id": "0xEED736b2b809550D89A941C2005dE93588c628e2",
"id": 5778
},
{
"coin": 60,
"type": "token",
"token_id": "0xF0FAC7104aAC544e4a7CE1A55ADF2B5a25c65bD1",
"id": 5780
},
{
"coin": 60,
"type": "token",
"token_id": "0x6F4ee03Ca6c942c9397D2ba5f8F83eA58f918f47",
"id": 5781
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5783
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c250ff9b993C6991cC4A3cC543716e53b478018",
"id": 5785
},
{
"coin": 714,
"type": "token",
"token_id": "LYFE-6AB",
"id": 5786
},
{
"coin": 60,
"type": "token",
"token_id": "0xc1eEcf1f4AF8EB9a2a19f6C26B434aA96ce859e1",
"id": 5789
},
{
"coin": 60,
"type": "token",
"token_id": "0x36c85687eeDAe01C50eB7d04d74C0ec74f930c54",
"id": 5790
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5792
},
{
"coin": 60,
"type": "token",
"token_id": "0xc59cb23295e2DEEB66bd090ACB6B02BE8d30A11F",
"id": 5793
},
{
"coin": 60,
"type": "token",
"token_id": "0x89Ab32156e46F46D02ade3FEcbe5Fc4243B9AAeD",
"id": 5794
},
{
"coin": 60,
"type": "token",
"token_id": "0x58A3520D738B268c2353ECeE518A1AD8e28E4AE5",
"id": 5797
},
{
"coin": 60,
"type": "token",
"token_id": "0x9469D013805bFfB7D3DEBe5E7839237e535ec483",
"id": 5798
},
{
"coin": 60,
"type": "token",
"token_id": "0x0a9d68886a0D7Db83a30ec00d62512483e5Ad437",
"id": 5800
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5802
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5803
},
{
"coin": 60,
"type": "token",
"token_id": "0xBfE03707aDb75b478Add9A01978057803F480E44",
"id": 5806
},
{
"coin": 60,
"type": "token",
"token_id": "0x61bFC979EA8160Ede9b862798B7833a97baFa02a",
"id": 5807
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5810
},
{
"coin": 60,
"type": "token",
"token_id": "0xEc1a718D1A6F8F8d94eCEc6fe91465697bb2b88C",
"id": 5811
},
{
"coin": 60,
"type": "token",
"token_id": "0x5AB55ec290BeacAE98f54c3eB70860460B167C3C",
"id": 5814
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5816
},
{
"coin": 60,
"type": "token",
"token_id": "0xd6bD97a26232bA02172Ff86b055d5D7bE789335B",
"id": 5818
},
{
"coin": 60,
"type": "token",
"token_id": "0xa7C71d444bf9aF4bfEd2adE75595d7512Eb4DD39",
"id": 5819
},
{
"coin": 997,
"type": "coin",
"id": 5823
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5824
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5826
},
{
"coin": 60,
"type": "token",
"token_id": "0xCC4304A31d09258b0029eA7FE63d032f52e44EFe",
"id": 5829
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5830
},
{
"coin": 60,
"type": "token",
"token_id": "0xeEEE2a622330E6d2036691e983DEe87330588603",
"id": 5833
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5834
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5835
},
{
"coin": 60,
"type": "token",
"token_id": "0x89551b940e2A8ED8eCcF509935bAc9213fE30584",
"id": 5837
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5839
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5840
},
{
"coin": 60,
"type": "token",
"token_id": "0x04abEdA201850aC0124161F037Efd70c74ddC74C",
"id": 5841
},
{
"coin": 60,
"type": "token",
"token_id": "0xb4058411967D5046f3510943103805be61f0600E",
"id": 5843
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5844
},
{
"coin": 60,
"type": "token",
"token_id": "0x283669123bd83dA2536bB534e20512101c18E5D8",
"id": 5850
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5852
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5853
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5854
},
{
"coin": 60,
"type": "token",
"token_id": "0xD1ef9a7310D0806855C672288EF5a1BAB62ceF33",
"id": 5855
},
{
"coin": 60,
"type": "token",
"token_id": "0x6b785a0322126826d8226d77e173d75DAfb84d11",
"id": 5856
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5857
},
{
"coin": 60,
"type": "token",
"token_id": "0x63120ccd7b415743e8753AfD167F5AD4A1732C43",
"id": 5858
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5860
},
{
"coin": 60,
"type": "token",
"token_id": "0xeABACD844A196D7Faf3CE596edeBF9900341B420",
"id": 5861
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5862
},
{
"coin": 60,
"type": "token",
"token_id": "0x6A22e5e94388464181578Aa7A6B869e00fE27846",
"id": 5863
},
{
"coin": 60,
"type": "token",
"token_id": "0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e",
"id": 5864
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 5864
},
{
"coin": 235,
"type": "coin",
"id": 5865
},
{
"coin": 60,
"type": "token",
"token_id": "0x26CE25148832C04f3d7F26F32478a9fe55197166",
"id": 5866
},
{
"coin": 60,
"type": "token",
"token_id": "0xa7DE087329BFcda5639247F96140f9DAbe3DeED1",
"id": 5868
},
{
"coin": 60,
"type": "token",
"token_id": "0x43afc9058a3DeBF37eaDf99138e449cE8a480A8a",
"id": 5869
},
{
"coin": 60,
"type": "token",
"token_id": "0x1C09EF4493465569f6d704A5CC4f9864BCD2E56a",
"id": 5870
},
{
"coin": 60,
"type": "token",
"token_id": "0xDc5864eDe28BD4405aa04d93E05A0531797D9D59",
"id": 5871
},
{
"coin": 60,
"type": "token",
"token_id": "0x7105eC15995A97496eC25de36CF7eEc47b703375",
"id": 5875
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5876
},
{
"coin": 60,
"type": "token",
"token_id": "0xFca59Cd816aB1eaD66534D82bc21E7515cE441CF",
"id": 5877
},
{
"coin": 60,
"type": "token",
"token_id": "0xB72B31907C1C95F3650b64b2469e08EdACeE5e8F",
"id": 5879
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5880
},
{
"coin": 60,
"type": "token",
"token_id": "0x48fa42D579CE56Cc2797E4d678AD7345A27999B9",
"id": 5881
},
{
"coin": 60,
"type": "token",
"token_id": "0xd3CDc4e75750DC1e59F8342200742B6B29490e70",
"id": 5884
},
{
"coin": 60,
"type": "token",
"token_id": "0xc4199fB6FFDb30A829614becA030f9042f1c3992",
"id": 5889
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5890
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5894
},
{
"coin": 60,
"type": "token",
"token_id": "0x995dE3D961b40Ec6CDee0009059D48768ccbdD48",
"id": 5901
},
{
"coin": 60,
"type": "token",
"token_id": "0x194524355F26aF663468d4996f207A918C73E013",
"id": 5902
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5903
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5904
},
{
"coin": 60,
"type": "token",
"token_id": "0x4f56221252d117f35E2f6Ab937A3F77CAd38934D",
"id": 5907
},
{
"coin": 60,
"type": "token",
"token_id": "0x5dc60C4D5e75D22588FA17fFEB90A63E535efCE0",
"id": 5908
},
{
"coin": 60,
"type": "token",
"token_id": "0xE09394F8BA642430eD448CA20f342EC7aa1Ba2E1",
"id": 5910
},
{
"coin": 60,
"type": "token",
"token_id": "0xFF44b5719f0B77A9951636fc5e69d3a1fc9E7d73",
"id": 5912
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5914
},
{
"coin": 60,
"type": "token",
"token_id": "0x5C84bc60a796534bfeC3439Af0E6dB616A966335",
"id": 5915
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5916
},
{
"coin": 60,
"type": "token",
"token_id": "0xaa8d0e9A26853D51613ca75729CDE2564913BCfb",
"id": 5919
},
{
"coin": 60,
"type": "token",
"token_id": "0xE7FB3559358A99df54466d0350e4aD6DC7093da3",
"id": 5920
},
{
"coin": 60,
"type": "token",
"token_id": "0x4Ba012f6e411a1bE55b98E9E62C3A4ceb16eC88B",
"id": 5921
},
{
"coin": 714,
"type": "token",
"token_id": "SWINGBY-888",
"id": 5922
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5923
},
{
"coin": 60,
"type": "token",
"token_id": "0x536381a8628dBcC8C70aC9A30A7258442eAb4c92",
"id": 5924
},
{
"coin": 60,
"type": "token",
"token_id": "0x9F284E1337A815fe77D2Ff4aE46544645B20c5ff",
"id": 5931
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5933
},
{
"coin": 60,
"type": "token",
"token_id": "0x20Bcae16A8bA95d8E8363E265de4eCFc36eC5cd9",
"id": 5934
},
{
"coin": 60,
"type": "token",
"token_id": "0x08AD83D779BDf2BBE1ad9cc0f78aa0D24AB97802",
"id": 5936
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5939
},
{
"coin": 60,
"type": "token",
"token_id": "0x42382F39e7C9F1ADD5fa5f0c6e24aa62f50be3b3",
"id": 5940
},
{
"coin": 60,
"type": "token",
"token_id": "0x4161725D019690a3E0de50f6bE67b07a86A9fAe1",
"id": 5947
},
{
"coin": 60,
"type": "token",
"token_id": "0xf29e46887FFAE92f1ff87DfE39713875Da541373",
"id": 5948
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5949
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5950
},
{
"coin": 60,
"type": "token",
"token_id": "0xF29992D7b589A0A6bD2de7Be29a97A6EB73EaF85",
"id": 5952
},
{
"coin": 60,
"type": "token",
"token_id": "0x4e352cF164E64ADCBad318C3a1e222E9EBa4Ce42",
"id": 5956
},
{
"coin": 60,
"type": "token",
"token_id": "0xa1d0E215a23d7030842FC67cE582a6aFa3CCaB83",
"id": 5957
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 5957
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5958
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 5963
},
{
"coin": 714,
"type": "token",
"token_id": "TWT-8C2",
"id": 5964
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x4B0F1812e5Df2A09796481Ff14017e6005508003",
"id": 5964
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5966
},
{
"coin": 60,
"type": "token",
"token_id": "0x1453Dbb8A29551ADe11D89825CA812e05317EAEB",
"id": 5971
},
{
"coin": 60,
"type": "token",
"token_id": "0x57700244B20f84799a31c6C96DadFF373ca9D6c5",
"id": 5972
},
{
"coin": 60,
"type": "token",
"token_id": "0x456AE45c0CE901E2e7c99c0718031cEc0A7A59Ff",
"id": 5973
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5985
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5986
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5987
},
{
"coin": 60,
"type": "token",
"token_id": "0x5cAf454Ba92e6F2c929DF14667Ee360eD9fD5b26",
"id": 5990
},
{
"coin": 60,
"type": "token",
"token_id": "0x95aD61b0a150d79219dCF64E1E6Cc01f0B64C4cE",
"id": 5994
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5995
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5996
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5997
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 5998
},
{
"coin": 60,
"type": "token",
"token_id": "0xbBa1DA8aFF34eBEF5Ce4d68312B33ef629548b06",
"id": 6002
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6005
},
{
"coin": 60,
"type": "token",
"token_id": "0x271220FbEFD584A6b0A6ad457721C076321646a1",
"id": 6016
},
{
"coin": 60,
"type": "token",
"token_id": "0x49f941FA7f5731fe302068d79c8604C24C5E7196",
"id": 6020
},
{
"coin": 60,
"type": "token",
"token_id": "0xE277aC35F9D327A670c1A3F3eeC80a83022431e4",
"id": 6022
},
{
"coin": 60,
"type": "token",
"token_id": "0x265Ba42daF2D20F3F358a7361D9f69Cb4E28F0E6",
"id": 6023
},
{
"coin": 60,
"type": "token",
"token_id": "0x0d4b4DA5fb1a7d55E85f8e22f728701cEB6E44C9",
"id": 6025
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6026
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6027
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6031
},
{
"coin": 60,
"type": "token",
"token_id": "0xab55bDEF7057B76482914e79f037999f4eBb6bF1",
"id": 6033
},
{
"coin": 60,
"type": "token",
"token_id": "0x83aD87C988aC0c6277C0c6234Cc8108b20bB5d9B",
"id": 6037
},
{
"coin": 60,
"type": "token",
"token_id": "0x9a1bf361798Ef6538cCB8137EA900C4D4B48CA3D",
"id": 6039
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6040
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6051
},
{
"coin": 60,
"type": "token",
"token_id": "0x2565ae0385659badCada1031DB704442E1b69982",
"id": 6069
},
{
"coin": 60,
"type": "token",
"token_id": "0x584936357D68f5143F12e2e64F0089dB93814dAd",
"id": 6074
},
{
"coin": 714,
"type": "token",
"token_id": "GIV-94E",
"id": 6075
},
{
"coin": 60,
"type": "token",
"token_id": "0xbeea2890775889c7723E5c0B80527976803b5A99",
"id": 6076
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6077
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6078
},
{
"coin": 60,
"type": "token",
"token_id": "0x43dE1145Cd22f0a9Cc99e51c205e6e81161Df6B9",
"id": 6079
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6080
},
{
"coin": 60,
"type": "token",
"token_id": "0x3335f16AF9008bFd32f1eE6C2Be5d4f84FA0b9da",
"id": 6082
},
{
"coin": 60,
"type": "token",
"token_id": "0x974c98Bc2e82FA18de92B7e697A1D9BD25682e80",
"id": 6083
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6084
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6085
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6087
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6088
},
{
"coin": 60,
"type": "token",
"token_id": "0xa38920C00D1a5303dB538A3Ea08da7a779e1F751",
"id": 6090
},
{
"coin": 60,
"type": "token",
"token_id": "0x057FB10e3fec001a40e6B75D3a30B99e23e54107",
"id": 6091
},
{
"coin": 60,
"type": "token",
"token_id": "0x486A72811ae65C4C814Ba929d6da35497d21296f",
"id": 6095
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6097
},
{
"coin": 60,
"type": "token",
"token_id": "0xA340f0937a8c00DB11C83Cc16CEC12310160F0b6",
"id": 6099
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6100
},
{
"coin": 60,
"type": "token",
"token_id": "0x975769557510167d25BEed6E32806537173E292C",
"id": 6104
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6109
},
{
"coin": 60,
"type": "token",
"token_id": "0x7e1A6Fb26702Ecb0439A641C5c285F7eec430419",
"id": 6110
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6112
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6113
},
{
"coin": 60,
"type": "token",
"token_id": "0x5f75112bBB4E1aF516fBE3e21528C63DA2B6a1A5",
"id": 6115
},
{
"coin": 60,
"type": "token",
"token_id": "0x6D6506E6F438edE269877a0A720026559110B7d5",
"id": 6116
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6120
},
{
"coin": 60,
"type": "token",
"token_id": "0xAC8Ea871e2d5F4Be618905F36f73c760f8cFDC8E",
"id": 6126
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6128
},
{
"coin": 60,
"type": "token",
"token_id": "0x9ea463Ec4cE9E9E5bc9cFd0187C4Ac3a70DD951D",
"id": 6129
},
{
"coin": 60,
"type": "token",
"token_id": "0xeF0fDA1d4bd73DDC2f93A4e46E2E5aDBC2D668f4",
"id": 6130
},
{
"coin": 60,
"type": "token",
"token_id": "0x54e8371C1EC43e58fB53D4ef4eD463C17Ba8a6bE",
"id": 6133
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6134
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6138
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6139
},
{
"coin": 60,
"type": "token",
"token_id": "0x542156d51D10Db5acCB99f9Db7e7C91B74E80a2c",
"id": 6140
},
{
"coin": 60,
"type": "token",
"token_id": "0x09E4BDFb273245063eF5E800D891eFF7d04f9B83",
"id": 6141
},
{
"coin": 60,
"type": "token",
"token_id": "0xf0B0A13d908253D954BA031a425dFd54f94a2e3D",
"id": 6142
},
{
"coin": 60,
"type": "token",
"token_id": "0x93E01899c10532d76C0E864537a1D26433dBbDdB",
"id": 6143
},
{
"coin": 60,
"type": "token",
"token_id": "0x136faE4333EA36A24bb751E2d505D6ca4Fd9f00b",
"id": 6144
},
{
"coin": 60,
"type": "token",
"token_id": "0xCee1d3c3A02267e37E6B373060F79d5d7b9e1669",
"id": 6145
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6152
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6155
},
{
"coin": 60,
"type": "token",
"token_id": "0xC0F9bD5Fa5698B6505F643900FFA515Ea5dF54A9",
"id": 6156
},
{
"coin": 60,
"type": "token",
"token_id": "0x8933ea1Ce67B946BdF2436cE860fFBb53Ce814d2",
"id": 6158
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6159
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6179
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6180
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6184
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6188
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6189
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6190
},
{
"coin": 60,
"type": "token",
"token_id": "0x261EfCdD24CeA98652B9700800a13DfBca4103fF",
"id": 6191
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6192
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6193
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 6193
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6194
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6195
},
{
"coin": 60,
"type": "token",
"token_id": "0xE55CC44C0Cf9CEDE2d68f9432cBeeAfA6357ed92",
"id": 6198
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6199
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6200
},
{
"coin": 60,
"type": "token",
"token_id": "0xa2B0fDe6D710e201d0d608e924A484d1A5fEd57c",
"id": 6201
},
{
"coin": 60,
"type": "token",
"token_id": "0x013AE307648f529aa72c5767A334DDd37aaB43c3",
"id": 6204
},
{
"coin": 60,
"type": "token",
"token_id": "0x78A5B382B9A83Fe042A4F7eB2399d563FDa931C3",
"id": 6214
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6215
},
{
"coin": 60,
"type": "token",
"token_id": "0x0f71B8De197A1C84d31de0F1fA7926c365F052B3",
"id": 6218
},
{
"coin": 13107,
"type": "coin",
"id": 6224
},
{
"coin": 60,
"type": "token",
"token_id": "0x98d8d146e644171Cd47fF8588987B7bdeEF72A87",
"id": 6225
},
{
"coin": 195,
"type": "token",
"token_id": "1000317",
"id": 6226
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6230
},
{
"coin": 60,
"type": "token",
"token_id": "0xABe580E7ee158dA464b51ee1a83Ac0289622e6be",
"id": 6236
},
{
"coin": 60,
"type": "token",
"token_id": "0xd32641191578Ea9b208125dDD4EC5E7B84FcaB4C",
"id": 6237
},
{
"coin": 60,
"type": "token",
"token_id": "0xc9287623832668432099CEF2FfDEF3CeD14f4315",
"id": 6238
},
{
"coin": 60,
"type": "token",
"token_id": "0x31CbF205e26Ba63296FdBD254a6b1bE3ED28CE47",
"id": 6240
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6243
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6245
},
{
"coin": 60,
"type": "token",
"token_id": "0xE7750c38c9a10D877650C0D99d1717bB28A5C42e",
"id": 6249
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6250
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6252
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6253
},
{
"coin": 60,
"type": "token",
"token_id": "0xCd475371E39c0d94e82FCCc9dD0ea710D0dc0C0B",
"id": 6254
},
{
"coin": 60,
"type": "token",
"token_id": "0x8cEa63f6383c1C13633F179F1af70ef75701a979",
"id": 6255
},
{
"coin": 60,
"type": "token",
"token_id": "0x22Ef3753e3658e81D5A0d0889ce078178Fe66595",
"id": 6256
},
{
"coin": 60,
"type": "token",
"token_id": "0x9b53E429B0baDd98ef7F01F03702986c516a5715",
"id": 6261
},
{
"coin": 60,
"type": "token",
"token_id": "0xEb7355C2f217b3485a591332Fe13C8c5A76A581D",
"id": 6262
},
{
"coin": 195,
"type": "token",
"token_id": "1001757",
"id": 6263
},
{
"coin": 195,
"type": "token",
"token_id": "1002361",
"id": 6264
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6265
},
{
"coin": 60,
"type": "token",
"token_id": "0xf8aD7dFe656188A23e89da09506Adf7ad9290D5d",
"id": 6283
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6297
},
{
"coin": 60,
"type": "token",
"token_id": "0x89E3aC6Dd69C15e9223BE7649025d6F68Dab1d6a",
"id": 6298
},
{
"coin": 60,
"type": "token",
"token_id": "0xd5dC8921A5c58FB0ebA6db6b40Eab40283Dc3C01",
"id": 6315
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6321
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6323
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6326
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6330
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6365
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6368
},
{
"coin": 60,
"type": "token",
"token_id": "0x0452aeD878805514e28Fb5BD0B56Bef92176E32A",
"id": 6388
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6390
},
{
"coin": 60,
"type": "token",
"token_id": "0x3004Cf8B4e28d60f4E305DF25a57Cd5faF37b8d5",
"id": 6391
},
{
"coin": 60,
"type": "token",
"token_id": "0xBA9ECaa4d6f22D3a69C41daA0584ac0e2418925e",
"id": 6392
},
{
"coin": 60,
"type": "token",
"token_id": "0x87F14E9460ceCb789F1B125b2E3e353Ff8ed6fcd",
"id": 6393
},
{
"coin": 60,
"type": "token",
"token_id": "0x5c89736e9454200141B80C37Eb28eaCECA2cE8Cb",
"id": 6395
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6397
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6403
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6406
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6407
},
{
"coin": 60,
"type": "token",
"token_id": "0xEeEeeeeEe2aF8D0e1940679860398308e0eF24d6",
"id": 6409
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6410
},
{
"coin": 60,
"type": "token",
"token_id": "0x54Ad74EdeAB48e09ccC43eE324f2603071dAD72b",
"id": 6414
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6418
},
{
"coin": 60,
"type": "token",
"token_id": "0xc626C9e2247c8A0C863ad6DaA97ed939E12786DE",
"id": 6419
},
{
"coin": 60,
"type": "token",
"token_id": "0x58379bF685ea2e251AE63d3eE26269c7c3848A17",
"id": 6420
},
{
"coin": 60,
"type": "token",
"token_id": "0x9d3e0892D11f19f5181d4a4C5d04187a9e0d7032",
"id": 6423
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6425
},
{
"coin": 60,
"type": "token",
"token_id": "0x10a34bbE9B3C5AD536cA23D5EefA81CA448e92fF",
"id": 6426
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6430
},
{
"coin": 454,
"type": "coin",
"id": 6436
},
{
"coin": 60,
"type": "token",
"token_id": "0x7a9716685F852ee268Feb86Dffa562D214cC13dB",
"id": 6442
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6444
},
{
"coin": 60,
"type": "token",
"token_id": "0x22f098F08c4eda4bE4ad6B4ba59866F3E98CEF92",
"id": 6448
},
{
"coin": 176,
"type": "coin",
"id": 6450
},
{
"coin": 60,
"type": "token",
"token_id": "0x40F8b7A82b6355D26546D363ce9C12Ce104cf0Ce",
"id": 6457
},
{
"coin": 60,
"type": "token",
"token_id": "0xC3b2140aC3E95EdFea220681EeCA127fC81E4929",
"id": 6461
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6465
},
{
"coin": 60,
"type": "token",
"token_id": "0x19747816A030fECDa3394C6062CDF6b9B4dB0E0b",
"id": 6466
},
{
"coin": 60,
"type": "token",
"token_id": "0x93eCD2ecDFb91aB2fEe28A8779A6adfe2851cda6",
"id": 6467
},
{
"coin": 60,
"type": "token",
"token_id": "0x3C6ff50c9Ec362efa359317009428d52115fe643",
"id": 6468
},
{
"coin": 60,
"type": "token",
"token_id": "0x201a59270DD85Da26615A37bba3EED8665153abb",
"id": 6471
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6473
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6478
},
{
"coin": 60,
"type": "token",
"token_id": "0x14cA41Eecd7D81D5D13098586C0d2314EBa285bE",
"id": 6483
},
{
"coin": 383,
"type": "coin",
"id": 6484
},
{
"coin": 60,
"type": "token",
"token_id": "0x64E65D352f6A2949463B3a7595911B61BBaFc63E",
"id": 6489
},
{
"coin": 195,
"type": "token",
"token_id": "1000451",
"id": 6495
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6495
},
{
"coin": 60,
"type": "token",
"token_id": "0x8DB90E3e7D04C875a51997092f9178FCac9DefdB",
"id": 6496
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6501
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6502
},
{
"coin": 60,
"type": "token",
"token_id": "0xE17f017475a709De58E976081eB916081ff4c9d5",
"id": 6503
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6504
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6506
},
{
"coin": 195,
"type": "token",
"token_id": "1000226",
"id": 6508
},
{
"coin": 60,
"type": "token",
"token_id": "0x990f341946A3fdB507aE7e52d17851B87168017c",
"id": 6511
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6519
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6521
},
{
"coin": 20036,
"type": "coin",
"id": 6524
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6525
},
{
"coin": 60,
"type": "token",
"token_id": "0x8a845Fc339CeB022A695281554890429a34DF120",
"id": 6526
},
{
"coin": 60,
"type": "token",
"token_id": "0xe64b47931f28f89Cc7A0C6965Ecf89EaDB4975f5",
"id": 6527
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6529
},
{
"coin": 113,
"type": "coin",
"id": 6530
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6531
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6532
},
{
"coin": 397,
"type": "coin",
"id": 6535
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x1Fa4a73a3F0133f0025378af00236f3aBDEE5D63",
"id": 6535
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6536
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6538
},
{
"coin": 60,
"type": "token",
"token_id": "0x0e2298E3B3390e3b945a5456fBf59eCc3f55DA16",
"id": 6539
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6541
},
{
"coin": 195,
"type": "token",
"token_id": "1003120",
"id": 6547
},
{
"coin": 60,
"type": "token",
"token_id": "0x7461C43bb1E96863233D72A09191008ee9217Ee8",
"id": 6549
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6551
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6552
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6556
},
{
"coin": 60,
"type": "token",
"token_id": "0xC581399Be631f060B64Df49742626670906FDeA9",
"id": 6565
},
{
"coin": 60,
"type": "token",
"token_id": "0x95172ccBe8344fecD73D0a30F54123652981BD6F",
"id": 6566
},
{
"coin": 60,
"type": "token",
"token_id": "0x1A5F9352Af8aF974bFC03399e3767DF6370d82e4",
"id": 6567
},
{
"coin": 60,
"type": "token",
"token_id": "0x29428639d889fa989405ee9baF3Ba088E6994eDC",
"id": 6570
},
{
"coin": 60,
"type": "token",
"token_id": "0xee98A5c3FD8c9063C5D8777758d3901a88df957b",
"id": 6571
},
{
"coin": 60,
"type": "token",
"token_id": "0xa462d0E6Bb788c7807B1B1C96992CE1f7069E195",
"id": 6572
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6575
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6577
},
{
"coin": 60,
"type": "token",
"token_id": "0x073aF3f70516380654Ba7C5812c4Ab0255F081Bc",
"id": 6578
},
{
"coin": 60,
"type": "token",
"token_id": "0xEb4E33dd1D97407ADD2aFcDcD5dd17851b1695d0",
"id": 6580
},
{
"coin": 60,
"type": "token",
"token_id": "0xc75F15AdA581219c95485c578E124df3985e4CE0",
"id": 6582
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6583
},
{
"coin": 60,
"type": "token",
"token_id": "0x499f434458F62a1e76974fCe5eFcE9DD6B31D4f2",
"id": 6586
},
{
"coin": 60,
"type": "token",
"token_id": "0x3408B204d67BA2dBcA13b9C50e8a45701d8a1cA6",
"id": 6589
},
{
"coin": 60,
"type": "token",
"token_id": "0x54C9EA2E9C9E8eD865Db4A4ce6711C2a0d5063Ba",
"id": 6592
},
{
"coin": 60,
"type": "token",
"token_id": "0x1b980e05943dE3dB3a459C72325338d327B6F5a9",
"id": 6593
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6594
},
{
"coin": 60,
"type": "token",
"token_id": "0xD5525D397898e5502075Ea5E830d8914f6F0affe",
"id": 6597
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6598
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6604
},
{
"coin": 60,
"type": "token",
"token_id": "0x7777770f8A6632ff043c8833310e245EBa9209E6",
"id": 6605
},
{
"coin": 60,
"type": "token",
"token_id": "0x2cAd4991f62fc6Fcd8EC219f37E7DE52B688B75A",
"id": 6606
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6607
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6608
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6612
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6613
},
{
"coin": 60,
"type": "token",
"token_id": "0x6A7Ef4998eB9d0f706238756949F311a59E05745",
"id": 6621
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6622
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6624
},
{
"coin": 93,
"type": "coin",
"id": 6627
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6631
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6632
},
{
"coin": 60,
"type": "token",
"token_id": "0x646707246D7d5C2a86d7206f41CA8199ea9CED69",
"id": 6633
},
{
"coin": 60,
"type": "token",
"token_id": "0x08A2E41FB99A7599725190B9C970Ad3893fa33CF",
"id": 6634
},
{
"coin": 354,
"type": "coin",
"id": 6636
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x7083609fCE4d1d8Dc0C979AAb8c869Ea2C873402",
"id": 6636
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6638
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6650
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 6651
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6654
},
{
"coin": 60,
"type": "token",
"token_id": "0xAba8cAc6866B83Ae4eec97DD07ED254282f6aD8A",
"id": 6657
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6659
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6660
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6661
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6664
},
{
"coin": 60,
"type": "token",
"token_id": "0xaE697F994Fc5eBC000F8e22EbFfeE04612f98A0d",
"id": 6665
},
{
"coin": 60,
"type": "token",
"token_id": "0xb7ba8461664dE526A3ae44189727DFC768625902",
"id": 6666
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6667
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6811
},
{
"coin": 508,
"type": "coin",
"id": 6892
},
{
"coin": 60,
"type": "token",
"token_id": "0xA91ac63D040dEB1b7A5E4d4134aD23eb0ba07e14",
"id": 6928
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x8443f091997f06a61670B735ED92734F5628692F",
"id": 6928
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 6937
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 6937
},
{
"coin": 195,
"type": "token",
"token_id": "<KEY>",
"id": 6990
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xE02dF9e3e622DeBdD69fb838bB799E3F168902c5",
"id": 7064
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 7083
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 7158
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x0E09FaBB73Bd3Ade0a17ECC321fD13a19e81cE82",
"id": 7186
},
{
"coin": 20000714,
"type": "token",
"token_id": "0x009cF7bC57584b7998236eff51b98A168DceA9B0",
"id": 7186
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 7200
},
{
"coin": 60,
"type": "token",
"token_id": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30",
"id": 7226
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xa2B726B1145A4773F68593CF171187d8EBe4d495",
"id": 7226
},
{
"coin": 20000714,
"type": "token",
"token_id": "0xa1faa113cbE53436Df28FF0aEe54275c13B40975",
"id": 7232
},
{
"coin": 60,
"type": "token",
"token_id": "0x62359Ed7505Efc61FF1D56fEF82158CcaffA23D7",
"id": 7242
},
{
"coin": 60,
"type": "token",
"token_id": "0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9",
"id": 7278
},
{
"coin": 20000714,
"type": "token",
"token_id": "<KEY>",
"id": 7288
},
{
"coin": 60,
"type": "token",
"token_id": "<KEY>",
"id": 7358
}
]` | services/markets/coinmarketcap/mapping.go | 0.587825 | 0.546738 | mapping.go | starcoder |
package geom
import (
"math"
)
var yAxis = Dir{0, 1, 0}
// Mtx handles matrix data and operations
// Column-major (as in math and Direct3D)
// https://fgiesen.wordpress.com/2012/02/12/row-major-vs-column-major-row-vectors-vs-column-vectors/
type Mtx struct {
el [4][4]float64
inv *Mtx
}
// NewMat constructs a new matrix
func NewMat(a1, a2, a3, a4, b1, b2, b3, b4, c1, c2, c3, c4, d1, d2, d3, d4 float64) *Mtx {
m := Mtx{
el: [4][4]float64{
[4]float64{a1, b1, c1, d1},
[4]float64{a2, b2, c2, d2},
[4]float64{a3, b3, c3, d3},
[4]float64{a4, b4, c4, d4},
},
}
return &m
}
// Identity creates a new identity matrix
func Identity() *Mtx {
return NewMat(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
)
}
// LookMatrix creates a matrix looking from `from` towards `to`
// http://www.cs.virginia.edu/~gfx/courses/1999/intro.fall99.html/lookat.html
// https://www.3dgep.com/understanding-the-view-matrix/#Look_At_Camera
// http://www.codinglabs.net/article_world_view_projection_matrix.aspx
// https://fgiesen.wordpress.com/2012/02/12/row-major-vs-column-major-row-vectors-vs-column-vectors/
func LookMatrix(o Vec, to Vec) *Mtx {
f, _ := o.Minus(to).Unit() // forward
r, _ := yAxis.Cross(f) // right
u, _ := f.Cross(r) // up
orient := NewMat(
r.X, u.X, f.X, 0,
r.Y, u.Y, f.Y, 0,
r.Z, u.Z, f.Z, 0,
0, 0, 0, 1,
)
return Shift(o).Mult(orient)
}
// Shift creates a new translation matrix
func Shift(v Vec) *Mtx {
return NewMat(
1, 0, 0, v.X,
0, 1, 0, v.Y,
0, 0, 1, v.Z,
0, 0, 0, 1,
)
}
// Scale creates a new scaling matrix
func Scale(v Vec) *Mtx {
return NewMat(
v.X, 0, 0, 0,
0, v.Y, 0, 0,
0, 0, v.Z, 0,
0, 0, 0, 1,
)
}
// Rotate creates a rotation matrix from an angle-axis Vector representation
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToMatrix/
func Rotate(v Vec) *Mtx {
a := v.Len()
c := math.Cos(a)
s := math.Sin(a)
t := 1 - c
n, _ := v.Unit()
x, y, z := n.X, n.Y, n.Z
return NewMat(
t*x*x+c, t*x*y-z*s, t*x*z+y*s, 0,
t*x*y+z*s, t*y*y+c, t*y*z-x*s, 0,
t*x*z-y*s, t*y*z+x*s, t*z*z+c, 0,
0, 0, 0, 1,
)
}
// Tangent creates a matrix that translates from world space to tangent space
// and a corresponding matrix that translates from tangent space to world space.
func Tangent(normal Dir) (to, from *Mtx) {
angle := math.Acos(normal.Dot(Up))
axis, ok := normal.Cross(Up)
if !ok {
return Identity(), Identity()
}
angleAxis := axis.Scaled(angle)
m := Rotate(angleAxis)
return m, m.Inverse()
}
// Mult multiplies by another matrix4
func (a *Mtx) Mult(b *Mtx) *Mtx {
m := Mtx{}
for i := 0; i < 4; i++ {
for j := 0; j < 4; j++ {
for k := 0; k < 4; k++ {
m.el[j][i] += a.el[k][i] * b.el[j][k]
}
}
}
return &m
}
// Equals tests whether two Matrices have equal values
func (a *Mtx) Equals(b *Mtx) bool {
for i := 0; i < 4; i++ {
for j := 0; j < 4; j++ {
if a.el[i][j] != b.el[i][j] {
return false
}
}
}
return true
}
// MultPoint multiplies this matrix4 by a vector, including translation
func (a *Mtx) MultPoint(v Vec) (result Vec) {
result.X = v.X*a.el[0][0] + v.Y*a.el[1][0] + v.Z*a.el[2][0] + a.el[3][0]
result.Y = v.X*a.el[0][1] + v.Y*a.el[1][1] + v.Z*a.el[2][1] + a.el[3][1]
result.Z = v.X*a.el[0][2] + v.Y*a.el[1][2] + v.Z*a.el[2][2] + a.el[3][2]
// final row assumed to be [0,0,0,1]
return
}
// MultDist multiplies this matrix4 by a vector, excluding translation
func (a *Mtx) MultDist(v Vec) (result Vec) {
result.X = v.X*a.el[0][0] + v.Y*a.el[1][0] + v.Z*a.el[2][0]
result.Y = v.X*a.el[0][1] + v.Y*a.el[1][1] + v.Z*a.el[2][1]
result.Z = v.X*a.el[0][2] + v.Y*a.el[1][2] + v.Z*a.el[2][2]
return
}
// MultDir multiplies this matrix4 by a direction, renormalizing the result
func (a *Mtx) MultDir(v Dir) (result Dir) {
dir, _ := a.MultDist(Vec(v)).Unit()
return dir
}
// MultRay multiplies this matrix by a ray
// https://gamedev.stackexchange.com/questions/72440/the-correct-way-to-transform-a-ray-with-a-matrix
func (a *Mtx) MultRay(r *Ray) *Ray {
return NewRay(a.MultPoint(r.Origin), a.MultDir(r.Dir))
}
// Inverse returns the inverse of this matrix
// https://www.gamedev.net/forums/topic/648190-algorithm-for-4x4-matrix-inverse/
// https://stackoverflow.com/questions/1148309/inverting-a-4x4-matrix
func (a *Mtx) Inverse() *Mtx {
if a.inv != nil {
return a.inv
}
i := Identity()
e := a.el
i.el[0][0] = e[1][1]*e[2][2]*e[3][3] - e[1][1]*e[2][3]*e[3][2] - e[2][1]*e[1][2]*e[3][3] + e[2][1]*e[1][3]*e[3][2] + e[3][1]*e[1][2]*e[2][3] - e[3][1]*e[1][3]*e[2][2]
i.el[1][0] = e[1][0]*e[2][3]*e[3][2] - e[1][0]*e[2][2]*e[3][3] + e[2][0]*e[1][2]*e[3][3] - e[2][0]*e[1][3]*e[3][2] - e[3][0]*e[1][2]*e[2][3] + e[3][0]*e[1][3]*e[2][2]
i.el[2][0] = e[1][0]*e[2][1]*e[3][3] - e[1][0]*e[2][3]*e[3][1] - e[2][0]*e[1][1]*e[3][3] + e[2][0]*e[1][3]*e[3][1] + e[3][0]*e[1][1]*e[2][3] - e[3][0]*e[1][3]*e[2][1]
i.el[3][0] = e[1][0]*e[2][2]*e[3][1] - e[1][0]*e[2][1]*e[3][2] + e[2][0]*e[1][1]*e[3][2] - e[2][0]*e[1][2]*e[3][1] - e[3][0]*e[1][1]*e[2][2] + e[3][0]*e[1][2]*e[2][1]
i.el[0][1] = e[0][1]*e[2][3]*e[3][2] - e[0][1]*e[2][2]*e[3][3] + e[2][1]*e[0][2]*e[3][3] - e[2][1]*e[0][3]*e[3][2] - e[3][1]*e[0][2]*e[2][3] + e[3][1]*e[0][3]*e[2][2]
i.el[1][1] = e[0][0]*e[2][2]*e[3][3] - e[0][0]*e[2][3]*e[3][2] - e[2][0]*e[0][2]*e[3][3] + e[2][0]*e[0][3]*e[3][2] + e[3][0]*e[0][2]*e[2][3] - e[3][0]*e[0][3]*e[2][2]
i.el[2][1] = e[0][0]*e[2][3]*e[3][1] - e[0][0]*e[2][1]*e[3][3] + e[2][0]*e[0][1]*e[3][3] - e[2][0]*e[0][3]*e[3][1] - e[3][0]*e[0][1]*e[2][3] + e[3][0]*e[0][3]*e[2][1]
i.el[3][1] = e[0][0]*e[2][1]*e[3][2] - e[0][0]*e[2][2]*e[3][1] - e[2][0]*e[0][1]*e[3][2] + e[2][0]*e[0][2]*e[3][1] + e[3][0]*e[0][1]*e[2][2] - e[3][0]*e[0][2]*e[2][1]
i.el[0][2] = e[0][1]*e[1][2]*e[3][3] - e[0][1]*e[1][3]*e[3][2] - e[1][1]*e[0][2]*e[3][3] + e[1][1]*e[0][3]*e[3][2] + e[3][1]*e[0][2]*e[1][3] - e[3][1]*e[0][3]*e[1][2]
i.el[1][2] = e[0][0]*e[1][3]*e[3][2] - e[0][0]*e[1][2]*e[3][3] + e[1][0]*e[0][2]*e[3][3] - e[1][0]*e[0][3]*e[3][2] - e[3][0]*e[0][2]*e[1][3] + e[3][0]*e[0][3]*e[1][2]
i.el[2][2] = e[0][0]*e[1][1]*e[3][3] - e[0][0]*e[1][3]*e[3][1] - e[1][0]*e[0][1]*e[3][3] + e[1][0]*e[0][3]*e[3][1] + e[3][0]*e[0][1]*e[1][3] - e[3][0]*e[0][3]*e[1][1]
i.el[3][2] = e[0][0]*e[1][2]*e[3][1] - e[0][0]*e[1][1]*e[3][2] + e[1][0]*e[0][1]*e[3][2] - e[1][0]*e[0][2]*e[3][1] - e[3][0]*e[0][1]*e[1][2] + e[3][0]*e[0][2]*e[1][1]
i.el[0][3] = e[0][1]*e[1][3]*e[2][2] - e[0][1]*e[1][2]*e[2][3] + e[1][1]*e[0][2]*e[2][3] - e[1][1]*e[0][3]*e[2][2] - e[2][1]*e[0][2]*e[1][3] + e[2][1]*e[0][3]*e[1][2]
i.el[1][3] = e[0][0]*e[1][2]*e[2][3] - e[0][0]*e[1][3]*e[2][2] - e[1][0]*e[0][2]*e[2][3] + e[1][0]*e[0][3]*e[2][2] + e[2][0]*e[0][2]*e[1][3] - e[2][0]*e[0][3]*e[1][2]
i.el[2][3] = e[0][0]*e[1][3]*e[2][1] - e[0][0]*e[1][1]*e[2][3] + e[1][0]*e[0][1]*e[2][3] - e[1][0]*e[0][3]*e[2][1] - e[2][0]*e[0][1]*e[1][3] + e[2][0]*e[0][3]*e[1][1]
i.el[3][3] = e[0][0]*e[1][1]*e[2][2] - e[0][0]*e[1][2]*e[2][1] - e[1][0]*e[0][1]*e[2][2] + e[1][0]*e[0][2]*e[2][1] + e[2][0]*e[0][1]*e[1][2] - e[2][0]*e[0][2]*e[1][1]
det := 1.0 / (e[0][0]*i.el[0][0] + e[0][1]*i.el[1][0] + e[0][2]*i.el[2][0] + e[0][3]*i.el[3][0])
for j := 0; j < 4; j++ {
for k := 0; k < 4; k++ {
i.el[j][k] *= det
}
}
a.inv, i.inv = i, a
return i
}
func (a *Mtx) At(col, row int) float64 {
return a.el[col-1][row-1]
}
func (a *Mtx) Transpose() *Mtx {
m := &Mtx{}
for col := 0; col < 4; col++ {
for row := 0; row < 4; row++ {
m.el[row][col] = a.el[col][row]
}
}
return m
} | pkg/geom/mtx.go | 0.827967 | 0.549036 | mtx.go | starcoder |
package eaopt
import (
"errors"
"math"
"math/rand"
)
// An oesPoint is a point that belongs to an OES instance.
type oesPoint struct {
x []float64
noise []float64
oes *OES
}
// Evaluate simply returns the value of the point's current position.
func (p *oesPoint) Evaluate() (float64, error) { return p.oes.F(p.x), nil }
// Mutate samples the position around the current center.
func (p *oesPoint) Mutate(rng *rand.Rand) {
for i, m := range p.oes.Mu {
p.noise[i] = rng.NormFloat64()
p.x[i] = m + p.noise[i]*p.oes.Sigma
}
}
// Crossover doesn't do anything.
func (p *oesPoint) Crossover(q Genome, rng *rand.Rand) {}
// Clone returns a deep copy of the Particle.
func (p oesPoint) Clone() Genome {
return &oesPoint{
x: copyFloat64s(p.x),
noise: copyFloat64s(p.noise),
oes: p.oes,
}
}
// OES implements a simple version of the evolution strategy proposed by OpenAI.
// Reference: https://arxiv.org/abs/1703.03864
type OES struct {
Sigma float64
LearningRate float64
Mu []float64
F func([]float64) float64
GA *GA
}
func (oes OES) newPoint(rng *rand.Rand) Genome {
var p = &oesPoint{
x: make([]float64, len(oes.Mu)),
noise: make([]float64, len(oes.Mu)),
oes: &oes,
}
p.Mutate(rng)
return p
}
// NewOES instantiates and returns a OES instance after having checked for input
// errors.
func NewOES(nPoints, nSteps uint, sigma, lr float64, parallel bool, rng *rand.Rand) (*OES, error) {
// Check inputs
if nPoints < 3 {
return nil, errors.New("nPoints should be at least 3")
}
if lr <= 0 {
return nil, errors.New("lr should be positive")
}
if sigma <= 0 {
return nil, errors.New("sigma should be positive")
}
if rng == nil {
rng = newRand()
}
// Instantiate a GA
var ga, err = GAConfig{
NPops: 1,
PopSize: nPoints,
NGenerations: nSteps,
HofSize: 1,
Model: ModMutationOnly{
Strict: false,
},
ParallelEval: parallel,
RNG: rand.New(rand.NewSource(rng.Int63())),
}.NewGA()
if err != nil {
return nil, err
}
var oes = &OES{
Sigma: sigma,
LearningRate: lr,
GA: ga,
}
oes.GA.Callback = func(ga *GA) {
// Retrieve the fitnesses
indis := ga.Populations[0].Individuals
fs := indis.getFitnesses()
// Standardize the fitnesses
m, s := meanFloat64s(fs), math.Sqrt(varianceFloat64s(fs))
for i, f := range fs {
fs[i] = (f - m) / s
}
// Compute the natural gradient
var g float64
for i, f := range fs {
for _, eta := range indis[i].Genome.(*oesPoint).noise {
g += f * eta
}
}
// Move the central position
for i := range oes.Mu {
oes.Mu[i] -= oes.LearningRate * g / (oes.Sigma * float64(len(fs)))
}
}
return oes, nil
}
// NewDefaultOES calls NewOES with default values.
func NewDefaultOES() (*OES, error) {
return NewOES(100, 30, 1, 0.1, false, nil)
}
// Minimize finds the minimum of a given real-valued function.
func (oes *OES) Minimize(f func([]float64) float64, x []float64) ([]float64, float64, error) {
// Set the function to minimize so that the particles can access it
oes.F = f
oes.Mu = x
// Run the genetic algorithm
var err = oes.GA.Minimize(oes.newPoint)
// Return the best obtained vector along with the associated function value
var best = oes.GA.HallOfFame[0]
return best.Genome.(*oesPoint).x, best.Fitness, err
} | oes.go | 0.753829 | 0.541106 | oes.go | starcoder |
package ast
// Packge ast implement the Abstract Syntax Tree that represents the parsed
// source code before being passed on to the interpreter for evaluation.
import (
"bytes"
"fmt"
"strings"
"github.com/prologic/monkey-lang/token"
)
// Node defines an interface for all nodes in the AST.
type Node interface {
TokenLiteral() string
String() string
}
// Statement defines the interface for all statement nodes.
type Statement interface {
Node
statementNode()
}
// Expression defines the interface for all expression nodes.
type Expression interface {
Node
expressionNode()
}
// Program is the root node. All programs consist of a slice of Statement(s)
type Program struct {
Statements []Statement
}
// TokenLiteral prints the literal value of the token associated with this node
func (p *Program) TokenLiteral() string {
if len(p.Statements) > 0 {
return p.Statements[0].TokenLiteral()
}
return ""
}
// String returns a stringified version of the AST for debugging
func (p *Program) String() string {
var out bytes.Buffer
for _, s := range p.Statements {
out.WriteString(s.String())
}
return out.String()
}
// Comment a comment
type Comment struct {
Token token.Token // the token.COMMENT token
Value string
}
func (c *Comment) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (c *Comment) TokenLiteral() string { return c.Token.Literal }
// String returns a stringified version of the AST for debugging
func (c *Comment) String() string {
var out bytes.Buffer
out.WriteString(c.TokenLiteral() + " ")
out.WriteString(c.Value)
return out.String()
}
// ReturnStatement represenets the `return` statement node
type ReturnStatement struct {
Token token.Token // the 'return' token
ReturnValue Expression
}
func (rs *ReturnStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (rs *ReturnStatement) TokenLiteral() string { return rs.Token.Literal }
// String returns a stringified version of the AST for debugging
func (rs *ReturnStatement) String() string {
var out bytes.Buffer
out.WriteString(rs.TokenLiteral() + " ")
if rs.ReturnValue != nil {
out.WriteString(rs.ReturnValue.String())
}
out.WriteString(";")
return out.String()
}
// ExpressionStatement represents an expression statement and holds an
// expression
type ExpressionStatement struct {
Token token.Token // the first token of the expression
Expression Expression
}
func (es *ExpressionStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (es *ExpressionStatement) TokenLiteral() string { return es.Token.Literal }
// String returns a stringified version of the AST for debugging
func (es *ExpressionStatement) String() string {
if es.Expression != nil {
return es.Expression.String()
}
return ""
}
// BlockStatement represents a block statement and holds one or more other
// statements
type BlockStatement struct {
Token token.Token // the { token
Statements []Statement
}
func (bs *BlockStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (bs *BlockStatement) TokenLiteral() string { return bs.Token.Literal }
// String returns a stringified version of the AST for debugging
func (bs *BlockStatement) String() string {
var out bytes.Buffer
for _, s := range bs.Statements {
out.WriteString(s.String())
}
return out.String()
}
// Identifier represents an identiifer and holds the name of the identifier
type Identifier struct {
Token token.Token // the token.IDENT token
Value string
}
func (i *Identifier) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }
// String returns a stringified version of the AST for debugging
func (i *Identifier) String() string { return i.Value }
// Null represents a null value
type Null struct {
Token token.Token
}
func (n *Null) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (n *Null) TokenLiteral() string { return n.Token.Literal }
// String returns a stringified version of the AST for debugging
func (n *Null) String() string { return n.Token.Literal }
// Boolean represents a boolean value and holds the underlying boolean value
type Boolean struct {
Token token.Token
Value bool
}
func (b *Boolean) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (b *Boolean) TokenLiteral() string { return b.Token.Literal }
// String returns a stringified version of the AST for debugging
func (b *Boolean) String() string { return b.Token.Literal }
// IntegerLiteral represents a literal integare and holds an integer value
type IntegerLiteral struct {
Token token.Token
Value int64
}
func (il *IntegerLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (il *IntegerLiteral) TokenLiteral() string { return il.Token.Literal }
// String returns a stringified version of the AST for debugging
func (il *IntegerLiteral) String() string { return il.Token.Literal }
// StringLiteral represents a literal string and holds a string value
type StringLiteral struct {
Token token.Token
Value string
}
func (sl *StringLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal }
// String returns a stringified version of the AST for debugging
func (sl *StringLiteral) String() string { return sl.Token.Literal }
// PrefixExpression represents a prefix expression and holds the operator
// as well as the right-hand side expression
type PrefixExpression struct {
Token token.Token // The prefix token, e.g. !
Operator string
Right Expression
}
func (pe *PrefixExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (pe *PrefixExpression) TokenLiteral() string { return pe.Token.Literal }
// String returns a stringified version of the AST for debugging
func (pe *PrefixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(pe.Operator)
out.WriteString(pe.Right.String())
out.WriteString(")")
return out.String()
}
// InfixExpression represents an infix expression and holds the left-hand
// expression, operator and right-hand expression
type InfixExpression struct {
Token token.Token // The operator token, e.g. +
Left Expression
Operator string
Right Expression
}
func (ie *InfixExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *InfixExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *InfixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString(" " + ie.Operator + " ")
out.WriteString(ie.Right.String())
out.WriteString(")")
return out.String()
}
// IfExpression represents an `if` expression and holds the condition,
// consequence and alternative expressions
type IfExpression struct {
Token token.Token // The 'if' token
Condition Expression
Consequence *BlockStatement
Alternative *BlockStatement
}
func (ie *IfExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *IfExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *IfExpression) String() string {
var out bytes.Buffer
out.WriteString("if")
out.WriteString(ie.Condition.String())
out.WriteString(" ")
out.WriteString(ie.Consequence.String())
if ie.Alternative != nil {
out.WriteString("else ")
out.WriteString(ie.Alternative.String())
}
return out.String()
}
// WhileExpression represents an `while` expression and holds the condition,
// and consequence expression
type WhileExpression struct {
Token token.Token // The 'while' token
Condition Expression
Consequence *BlockStatement
}
func (we *WhileExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (we *WhileExpression) TokenLiteral() string { return we.Token.Literal }
// String returns a stringified version of the AST for debugging
func (we *WhileExpression) String() string {
var out bytes.Buffer
out.WriteString("while")
out.WriteString(we.Condition.String())
out.WriteString(" ")
out.WriteString(we.Consequence.String())
return out.String()
}
// ImportExpression represents an `import` expression and holds the name
// of the module being imported.
type ImportExpression struct {
Token token.Token // The 'import' token
Name Expression
}
func (ie *ImportExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *ImportExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *ImportExpression) String() string {
var out bytes.Buffer
out.WriteString(ie.TokenLiteral())
out.WriteString("(")
out.WriteString(fmt.Sprintf("\"%s\"", ie.Name))
out.WriteString(")")
return out.String()
}
// FunctionLiteral represents a literal functions and holds the function's
// formal parameters and boy of the function as a block statement
type FunctionLiteral struct {
Token token.Token // The 'fn' token
Name string
Parameters []*Identifier
Body *BlockStatement
}
func (fl *FunctionLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (fl *FunctionLiteral) TokenLiteral() string { return fl.Token.Literal }
// String returns a stringified version of the AST for debugging
func (fl *FunctionLiteral) String() string {
var out bytes.Buffer
params := []string{}
for _, p := range fl.Parameters {
params = append(params, p.String())
}
out.WriteString(fmt.Sprintf("%s %s", fl.TokenLiteral(), fl.Name))
out.WriteString("(")
out.WriteString(strings.Join(params, ", "))
out.WriteString(") ")
out.WriteString(fl.Body.String())
return out.String()
}
// CallExpression represents a call expression and holds the function to be
// called as well as the arguments to be passed to that function
type CallExpression struct {
Token token.Token // The '(' token
Function Expression // Identifier or FunctionLiteral
Arguments []Expression
}
func (ce *CallExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ce *CallExpression) TokenLiteral() string { return ce.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ce *CallExpression) String() string {
var out bytes.Buffer
args := []string{}
for _, a := range ce.Arguments {
args = append(args, a.String())
}
out.WriteString(ce.Function.String())
out.WriteString("(")
out.WriteString(strings.Join(args, ", "))
out.WriteString(")")
return out.String()
}
// ArrayLiteral represents the array literal and holds a list of expressions
type ArrayLiteral struct {
Token token.Token // the '[' token
Elements []Expression
}
func (al *ArrayLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal }
// String returns a stringified version of the AST for debugging
func (al *ArrayLiteral) String() string {
var out bytes.Buffer
elements := []string{}
for _, el := range al.Elements {
elements = append(elements, el.String())
}
out.WriteString("[")
out.WriteString(strings.Join(elements, ", "))
out.WriteString("]")
return out.String()
}
// BindExpression represents a binding expression of the form:
// x := 1
type BindExpression struct {
Token token.Token // The := token
Left Expression
Value Expression
}
func (be *BindExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (be *BindExpression) TokenLiteral() string { return be.Token.Literal }
// String returns a stringified version of the AST for debugging
func (be *BindExpression) String() string {
var out bytes.Buffer
out.WriteString(be.Left.String())
out.WriteString(be.TokenLiteral())
out.WriteString(be.Value.String())
return out.String()
}
// AssignmentExpression represents an assignment expression of the form:
// x = 1 or xs[1] = 2
type AssignmentExpression struct {
Token token.Token // The = token
Left Expression
Value Expression
}
func (ae *AssignmentExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ae *AssignmentExpression) TokenLiteral() string { return ae.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ae *AssignmentExpression) String() string {
var out bytes.Buffer
out.WriteString(ae.Left.String())
out.WriteString(ae.TokenLiteral())
out.WriteString(ae.Value.String())
return out.String()
}
// IndexExpression represents an index operator expression, e.g: xs[2]
// and holds the left expression and index expression
type IndexExpression struct {
Token token.Token // The [ token
Left Expression
Index Expression
}
func (ie *IndexExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *IndexExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *IndexExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString("[")
out.WriteString(ie.Index.String())
out.WriteString("])")
return out.String()
}
// HashLiteral represents a hash map or dictionary literal, a set of
// key/value pairs.
type HashLiteral struct {
Token token.Token // the '{' token
Pairs map[Expression]Expression
}
func (hl *HashLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (hl *HashLiteral) TokenLiteral() string { return hl.Token.Literal }
// String returns a stringified version of the AST for debugging
func (hl *HashLiteral) String() string {
var out bytes.Buffer
pairs := []string{}
for key, value := range hl.Pairs {
pairs = append(pairs, key.String()+":"+value.String())
}
out.WriteString("{")
out.WriteString(strings.Join(pairs, ", "))
out.WriteString("}")
return out.String()
} | ast/ast.go | 0.862091 | 0.518302 | ast.go | starcoder |
package slices
import (
// "constraints"
"golang.org/x/exp/constraints"
"golang.org/x/exp/slices"
)
// Sort returns a sorted copy of `v` according to the comparison function
// `less`. The original slice is not modified.
func Sort[T any](v []T, less func(a, b T) bool) []T {
var r = append([]T{}, v...)
slices.SortFunc(r, less)
return r
}
// StableSort returns a sorted copy of `v` according to the comparison function
// `less`, preserving the original order of elements that compare equal. The
// original slice is not modified.
func StableSort[T any](v []T, less func(a, b T) bool) []T {
var r = append([]T{}, v...)
slices.SortStableFunc(r, less)
return r
}
// Uniq returns a copy of `v` where only the first occurrence of a value is
// preserved. The input must be a slice of comparable values, but does not have
// to be sorted.
func Uniq[T comparable](v []T) []T {
var m = make(map[T]struct{}, len(v))
var r []T
for _, a := range v {
if _, ok := m[a]; !ok {
r = append(r, a)
m[a] = struct{}{}
}
}
return r
}
// Min returns the minimum value of `v`. The type of `v` must be a slice with a
// value type that defines a strict ordered relationship. If `v` is empty, the
// function return the zero value of the underlying value type.
func Min[T constraints.Ordered](v []T) T {
var min T
for i, a := range v {
if i == 0 || a < min {
min = a
}
}
return min
}
// Max returns the maximum value of `v`. The type of `v` must be a slice with a
// value type that defines a strict ordered relationship. If `v` is empty, the
// function return the zero value of the underlying value type.
func Max[T constraints.Ordered](v []T) T {
var max T
for i, a := range v {
if i == 0 || a > max {
max = a
}
}
return max
}
// MinMax returns both the minimum abd maximum value of `v`. The type of `v`
// must be a slice with a value type that defines a strict ordered relationship.
// If `v` is empty, the function return a pair of zero values for the underlying
// value type.
func MinMax[T constraints.Ordered](v []T) (T, T) {
var min, max T
for i, a := range v {
if i == 0 {
min = a
max = a
}
if a < min {
min = a
}
if a > max {
max = a
}
}
return min, max
}
// ---
// SortBy returns a sorted copy of `v` to the natural sort order of the result
// of invoking `f` on each element. The original slice is not modified.
func SortBy[T any, U constraints.Ordered](v []T, f func(a T) U) []T {
var r = append([]T{}, v...)
var comp = func(a, b T) bool {
var aa = f(a)
var bb = f(b)
return aa < bb
}
slices.SortFunc(r, comp)
return r
}
// StableSortBy returns a sorted copy of `v` according to the natural sort order
// of the result of invoking `f` on each element, preserving the original order
// of elements that compare equal. The original slice is not modified.
func StableSortBy[T any, U constraints.Ordered](v []T, f func(a T) U) []T {
var r = append([]T{}, v...)
var comp = func(a, b T) bool {
var aa = f(a)
var bb = f(b)
return aa < bb
}
slices.SortStableFunc(r, comp)
return r
}
// UniqBy returns a copy of `v` where only the first occurrence of a value is
// preserved. Values are considered equal if the results of invoking `f` on them
// are equal. The input does not have to be sorted.
func UniqBy[T any, U comparable](v []T, f func(a T) U) []T {
var m = make(map[U]struct{}, len(v))
var r []T
for _, a := range v {
var aa = f(a)
if _, ok := m[aa]; !ok {
r = append(r, a)
m[aa] = struct{}{}
}
}
return r
}
// MinBy returns the first element of `v` for which the result of invoking `f`
// yields the smallest value. If `v` is empty, the zero value of the underlying
// value type is returned.
func MinBy[T any, U constraints.Ordered](v []T, f func(a T) U) T {
var min T
var minv U
for i, a := range v {
var aa = f(a)
if i == 0 {
min = a
minv = aa
}
if aa < minv {
min = a
minv = aa
}
}
return min
}
// MaxBy returns the first element of `v` for which the result of invoking `f`
// yields the largest value. If `v` is empty, the zero value of the underlying
// value type is returned.
func MaxBy[T any, U constraints.Ordered](v []T, f func(a T) U) T {
var max T
var maxv U
for i, a := range v {
var aa = f(a)
if i == 0 {
max = a
maxv = aa
}
if aa > maxv {
max = a
maxv = aa
}
}
return max
}
// MinMaxBy returns the first elements of `v` for which the result of invoking
// `f` yields the smallest and the largest values. If `v` is empty, zero values
// of the underlying value type are returned.
func MinMaxBy[T any, U constraints.Ordered](v []T, f func(a T) U) (T, T) {
var min, max T
var minv, maxv U
for i, a := range v {
var aa = f(a)
if i == 0 {
min = a
max = a
minv = aa
maxv = aa
}
if aa < minv {
min = a
minv = aa
}
if aa > maxv {
max = a
maxv = aa
}
}
return min, max
} | pkg/slices/ordered.go | 0.838614 | 0.583826 | ordered.go | starcoder |
package input
import (
"context"
"fmt"
"sync"
"sync/atomic"
"time"
"github.com/Jeffail/benthos/v3/lib/bloblang/x/mapping"
"github.com/Jeffail/benthos/v3/lib/input/reader"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/x/docs"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeBloblang] = TypeSpec{
constructor: func(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
b, err := newBloblang(conf.Bloblang)
if err != nil {
return nil, err
}
return NewAsyncReader(TypeBloblang, true, b, log, stats)
},
Summary: `
BETA: This input is currently in a BETA stage and is therefore subject to
breaking configuration changes outside of major version releases.
Generates messages at a given interval using a [Bloblang](/docs/guides/bloblang/about)
mapping executed without a context. This allows you to generate messages for
testing your pipeline configs.`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon(
"mapping", "A [bloblang](/docs/guides/bloblang/about) mapping to use for generating messages.",
`root = "hello world"`,
`root = {"test":"message","id":uuid_v4()}`,
),
docs.FieldCommon("interval", "The time interval at which messages should be generated."),
docs.FieldCommon("count", "An optional number of messages to generate, if set above 0 the specified number of messages is generated and then the input will shut down."),
},
Footnotes: `
## Examples
You can use Bloblang to generate payloads of differing structure at random:
` + "```yaml" + `
input:
bloblang:
mapping: |
root = if random_int() % 2 == 0 {
{
"type": "foo",
"foo": "is yummy"
}
} else {
{
"type": "bar",
"bar": "is gross"
}
}
` + "```" + ``,
}
}
//------------------------------------------------------------------------------
// BloblangConfig contains configuration for the Bloblang input type.
type BloblangConfig struct {
Mapping string `json:"mapping" yaml:"mapping"`
Interval string `json:"interval" yaml:"interval"`
Count int `json:"count" yaml:"count"`
}
// NewBloblangConfig creates a new BloblangConfig with default values.
func NewBloblangConfig() BloblangConfig {
return BloblangConfig{
Mapping: "",
Interval: "1s",
Count: 0,
}
}
// Bloblang executes a bloblang mapping with an empty context each time this
// input is read from. An interval period must be specified that determines how
// often a message is generated.
type Bloblang struct {
remaining int32
timerDuration time.Duration
exec *mapping.Executor
timerMut sync.Mutex
timer *time.Ticker
}
// newBloblang creates a new bloblang input reader type.
func newBloblang(conf BloblangConfig) (*Bloblang, error) {
duration, err := time.ParseDuration(conf.Interval)
if err != nil {
return nil, fmt.Errorf("failed to parse interval: %w", err)
}
exec, err := mapping.NewExecutor(conf.Mapping)
if err != nil {
return nil, fmt.Errorf("failed to parse bloblang mapping: %w", err)
}
remaining := int32(conf.Count)
if remaining <= 0 {
remaining = -1
}
return &Bloblang{
timerDuration: duration,
exec: exec,
remaining: remaining,
}, nil
}
// ConnectWithContext establishes a Bloblang reader.
func (b *Bloblang) ConnectWithContext(ctx context.Context) error {
b.timerMut.Lock()
defer b.timerMut.Unlock()
if b.timer != nil {
return nil
}
b.timer = time.NewTicker(b.timerDuration)
return nil
}
// ReadWithContext a new bloblang generated message.
func (b *Bloblang) ReadWithContext(ctx context.Context) (types.Message, reader.AsyncAckFn, error) {
b.timerMut.Lock()
timer := b.timer
b.timerMut.Unlock()
if timer == nil {
return nil, nil, types.ErrNotConnected
}
if atomic.LoadInt32(&b.remaining) >= 0 {
if atomic.AddInt32(&b.remaining, -1) < 0 {
return nil, nil, types.ErrTypeClosed
}
}
select {
case _, open := <-timer.C:
if !open {
return nil, nil, types.ErrNotConnected
}
case <-ctx.Done():
return nil, nil, types.ErrTimeout
}
p, err := b.exec.MapPart(0, message.New(nil))
if err != nil {
return nil, nil, err
}
if p == nil {
return nil, nil, types.ErrTimeout
}
msg := message.New(nil)
msg.Append(p)
return msg, func(context.Context, types.Response) error { return nil }, nil
}
// CloseAsync shuts down the bloblang reader.
func (b *Bloblang) CloseAsync() {
b.timerMut.Lock()
b.timer.Stop()
b.timer = nil
b.timerMut.Unlock()
}
// WaitForClose blocks until the bloblang input has closed down.
func (b *Bloblang) WaitForClose(timeout time.Duration) error {
return nil
} | lib/input/bloblang.go | 0.77518 | 0.561395 | bloblang.go | starcoder |
package trie
import (
"bytes"
"encoding/binary"
"encoding/hex"
"fmt"
"math/big"
)
// ReadVarint reads a variable length number in big endian byte order
func ReadVarint(reader *bytes.Reader) (ret uint64) {
if reader.Len() == 8 {
var num uint64
binary.Read(reader, binary.BigEndian, &num)
ret = uint64(num)
} else if reader.Len() == 4 {
var num uint32
binary.Read(reader, binary.BigEndian, &num)
ret = uint64(num)
} else if reader.Len() == 2 {
var num uint16
binary.Read(reader, binary.BigEndian, &num)
ret = uint64(num)
} else {
var num uint8
binary.Read(reader, binary.BigEndian, &num)
ret = uint64(num)
}
return ret
}
// BinaryLength returns the true binary length of the given number
func BinaryLength(num int) int {
if num == 0 {
return 0
}
return 1 + BinaryLength(num>>8)
}
// CopyBytes returns an exact copy of the provided bytes
func CopyBytes(b []byte) (copiedBytes []byte) {
copiedBytes = make([]byte, len(b))
copy(copiedBytes, b)
return
}
// IsHex ...
func IsHex(str string) bool {
l := len(str)
return l >= 4 && l%2 == 0 && str[0:2] == "0x"
}
// Bytes2Hex ...
func Bytes2Hex(d []byte) string {
return hex.EncodeToString(d)
}
// Hex2Bytes ...
func Hex2Bytes(str string) []byte {
h, _ := hex.DecodeString(str)
return h
}
// Database interface
type Database interface {
Put(key []byte, value []byte)
Get(key []byte) ([]byte, error)
//GetKeys() []*Key
Delete(key []byte) error
LastKnownTD() []byte
Close()
Print()
}
// Decode TODO: Use a bytes.Buffer instead of a raw byte slice.
func Decode(data []byte, pos uint64) (interface{}, uint64) {
var slice []interface{}
char := int(data[pos])
switch {
case char <= 0x7f:
return data[pos], pos + 1
case char <= 0xb7:
b := uint64(data[pos]) - 0x80
return data[pos+1 : pos+1+b], pos + 1 + b
case char <= 0xbf:
b := uint64(data[pos]) - 0xb7
b2 := ReadVarint(bytes.NewReader(data[pos+1 : pos+1+b]))
return data[pos+1+b : pos+1+b+b2], pos + 1 + b + b2
case char <= 0xf7:
b := uint64(data[pos]) - 0xc0
prevPos := pos
pos++
for i := uint64(0); i < b; {
var obj interface{}
// Get the next item in the data list and append it
obj, prevPos = Decode(data, pos)
slice = append(slice, obj)
// Increment i by the amount bytes read in the previous
// read
i += (prevPos - pos)
pos = prevPos
}
return slice, pos
case char <= 0xff:
l := uint64(data[pos]) - 0xf7
b := ReadVarint(bytes.NewReader(data[pos+1 : pos+1+l]))
pos = pos + l + 1
prevPos := b
for i := uint64(0); i < uint64(b); {
var obj interface{}
obj, prevPos = Decode(data, pos)
slice = append(slice, obj)
i += (prevPos - pos)
pos = prevPos
}
return slice, pos
default:
panic(fmt.Sprintf("byte not supported: %q", char))
}
}
// Encode ...
func Encode(object interface{}) []byte {
var buff bytes.Buffer
if object != nil {
switch t := object.(type) {
case *Value:
buff.Write(Encode(t.Raw()))
case int:
buff.Write(Encode(big.NewInt(int64(t))))
case uint:
buff.Write(Encode(big.NewInt(int64(t))))
case int8:
buff.Write(Encode(big.NewInt(int64(t))))
case int16:
buff.Write(Encode(big.NewInt(int64(t))))
case int32:
buff.Write(Encode(big.NewInt(int64(t))))
case int64:
buff.Write(Encode(big.NewInt(t)))
case uint16:
buff.Write(Encode(big.NewInt(int64(t))))
case uint32:
buff.Write(Encode(big.NewInt(int64(t))))
case uint64:
buff.Write(Encode(big.NewInt(int64(t))))
case byte:
buff.Write(Encode(big.NewInt(int64(t))))
case *big.Int:
if t == nil {
buff.WriteByte(0xc0)
} else {
buff.Write(Encode(t.Bytes()))
}
case []byte:
if len(t) == 1 && t[0] <= 0x7f {
buff.Write(t)
} else if len(t) < 56 {
buff.WriteByte(byte(len(t) + 0x80))
buff.Write(t)
} else {
b := big.NewInt(int64(len(t)))
buff.WriteByte(byte(len(b.Bytes()) + 0xb7))
buff.Write(b.Bytes())
buff.Write(t)
}
case string:
buff.Write(Encode([]byte(t)))
case []interface{}:
// Inline function for writing the slice header
WriteSliceHeader := func(length int) {
if length < 56 {
buff.WriteByte(byte(length + 0xc0))
} else {
b := big.NewInt(int64(length))
buff.WriteByte(byte(len(b.Bytes()) + 0xf7))
buff.Write(b.Bytes())
}
}
var b bytes.Buffer
for _, val := range t {
b.Write(Encode(val))
}
WriteSliceHeader(len(b.Bytes()))
buff.Write(b.Bytes())
}
} else {
// Empty list for nil
buff.WriteByte(0xc0)
}
return buff.Bytes()
} | vendor/github.com/c3systems/c3-go/trie/util.go | 0.57678 | 0.443359 | util.go | starcoder |
package topic
type source struct {
key string
}
type condAnd struct {
source Expr
targets []Expr
}
type condOr struct {
source Expr
targets []Expr
}
type condNot struct {
source Expr
targets []Expr
}
// And returns Expr
func And(source Expr, targets ...Expr) Expr {
if len(targets) == 0 {
return source
}
var cond condAnd
switch s := source.(type) {
case condAnd:
cond = s
default:
cond = condAnd{
source: source,
}
}
for _, target := range targets {
switch t := target.(type) {
case condAnd:
cond.targets = append([]Expr{t.source}, t.targets...)
default:
cond.targets = append(cond.targets, t)
}
}
return cond
}
// Or returns Expr
func Or(source Expr, targets ...Expr) Expr {
if len(targets) == 0 {
return source
}
var cond condOr
switch s := source.(type) {
case condOr:
cond = s
default:
cond = condOr{
source: source,
}
}
for _, target := range targets {
switch t := target.(type) {
case condOr:
cond.targets = append([]Expr{t.source}, t.targets...)
default:
cond.targets = append(cond.targets, t)
}
}
return cond
}
// Not returns Expr
func Not(source Expr, targets ...Expr) Expr {
if len(targets) == 0 {
return source
}
var cond condNot
switch s := source.(type) {
case condNot:
cond = s
default:
cond = condNot{
source: source,
}
}
for _, target := range targets {
switch t := target.(type) {
case condNot:
cond.targets = append([]Expr{t.source}, t.targets...)
default:
cond.targets = append(cond.targets, t)
}
}
return cond
}
// Source returns Expr from topic.
func Source(key string) Expr {
return source{key}
}
func (e source) And(targets ...Expr) Expr {
return And(e, targets...)
}
func (e source) Or(targets ...Expr) Expr {
return Or(e, targets...)
}
func (e source) Not(targets ...Expr) Expr {
return Not(e, targets...)
}
func (e source) Exec(p Pipeline) string {
return p.Source(e.key)
}
func (e condAnd) And(targets ...Expr) Expr {
return And(e, targets...)
}
func (e condAnd) Or(targets ...Expr) Expr {
return Or(e, targets...)
}
func (e condAnd) Not(targets ...Expr) Expr {
return Not(e, targets...)
}
func (e condAnd) Exec(pipe Pipeline) string {
keys := make([]string, 0, len(e.targets)+1)
keys = append(keys, e.source.Exec(pipe))
for _, target := range e.targets {
keys = append(keys, target.Exec(pipe))
}
dest := pipe.Session()
pipe.Inter(dest, keys...)
return dest
}
func (e condOr) And(targets ...Expr) Expr {
return And(e, targets...)
}
func (e condOr) Or(targets ...Expr) Expr {
return Or(e, targets...)
}
func (e condOr) Not(targets ...Expr) Expr {
return Not(e, targets...)
}
func (e condOr) Exec(pipe Pipeline) string {
keys := make([]string, 0, len(e.targets)+1)
keys = append(keys, e.source.Exec(pipe))
for _, target := range e.targets {
keys = append(keys, target.Exec(pipe))
}
dest := pipe.Session()
pipe.Union(dest, keys...)
return dest
}
func (e condNot) And(targets ...Expr) Expr {
return And(e, targets...)
}
func (e condNot) Or(targets ...Expr) Expr {
return Or(e, targets...)
}
func (e condNot) Not(targets ...Expr) Expr {
return Not(e, targets...)
}
func (e condNot) Exec(pipe Pipeline) string {
keys := make([]string, 0, len(e.targets)+1)
keys = append(keys, e.source.Exec(pipe))
for _, target := range e.targets {
keys = append(keys, target.Exec(pipe))
}
dest := pipe.Session()
pipe.Diff(dest, keys...)
return dest
} | cond.go | 0.632503 | 0.456591 | cond.go | starcoder |
package utils
import (
"sync"
"github.com/rcrowley/go-metrics"
)
//UniformSample is a metric sample
type UniformSample struct {
count int64
mutex sync.Mutex
reservoirSize int
values []int64
}
// NewUniformSample constructs a new uniform sample with the given reservoir
// size.
func NewUniformSample(reservoirSize int) metrics.Sample {
return &UniformSample{
reservoirSize: reservoirSize,
values: make([]int64, 0, reservoirSize),
}
}
// Clear clears all samples.
func (s *UniformSample) Clear() {
s.mutex.Lock()
defer s.mutex.Unlock()
s.count = 0
s.values = make([]int64, 0, s.reservoirSize)
}
// Count returns the number of samples recorded, which may exceed the
// reservoir size.
func (s *UniformSample) Count() int64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return s.count
}
// Max returns the maximum value in the sample, which may not be the maximum
// value ever to be part of the sample.
func (s *UniformSample) Max() int64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleMax(s.values)
}
// Mean returns the mean of the values in the sample.
func (s *UniformSample) Mean() float64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleMean(s.values)
}
// Min returns the minimum value in the sample, which may not be the minimum
// value ever to be part of the sample.
func (s *UniformSample) Min() int64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleMin(s.values)
}
// Percentile returns an arbitrary percentile of values in the sample.
func (s *UniformSample) Percentile(p float64) float64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SamplePercentile(s.values, p)
}
// Percentiles returns a slice of arbitrary percentiles of values in the
// sample.
func (s *UniformSample) Percentiles(ps []float64) []float64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SamplePercentiles(s.values, ps)
}
// Size returns the size of the sample, which is at most the reservoir size.
func (s *UniformSample) Size() int {
s.mutex.Lock()
defer s.mutex.Unlock()
return len(s.values)
}
// Snapshot returns a read-only copy of the sample.
func (s *UniformSample) Snapshot() metrics.Sample {
s.mutex.Lock()
defer s.mutex.Unlock()
values := make([]int64, len(s.values))
copy(values, s.values)
return metrics.NewSampleSnapshot(s.count, values)
}
// StdDev returns the standard deviation of the values in the sample.
func (s *UniformSample) StdDev() float64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleStdDev(s.values)
}
// Sum returns the sum of the values in the sample.
func (s *UniformSample) Sum() int64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleSum(s.values)
}
// Update samples a new value.
func (s *UniformSample) Update(v int64) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.count++
if len(s.values) < s.reservoirSize {
s.values = append(s.values, v)
} else {
// Use circle buffer to eliminate the oldest value
idx := s.count % int64(s.reservoirSize)
s.values[idx] = v
}
}
// Values returns a copy of the values in the sample.
func (s *UniformSample) Values() []int64 {
s.mutex.Lock()
defer s.mutex.Unlock()
values := make([]int64, len(s.values))
copy(values, s.values)
return values
}
// Variance returns the variance of the values in the sample.
func (s *UniformSample) Variance() float64 {
s.mutex.Lock()
defer s.mutex.Unlock()
return metrics.SampleVariance(s.values)
} | utils/metrics_utils.go | 0.800731 | 0.437523 | metrics_utils.go | starcoder |
package runner
import "io"
// Job is the struct that represents a Job to be executed in a Driver. Similar
// to the Runner it has an underlying io.Writer that will have progress of the
// Job execution written to it. Each Job will belong to a Stage, and will be
// executed in the order that Job was added to the Stage.
type Job struct {
io.Writer
errs []error // errors that occurred during Job execution.
canFail bool // canFail denotes if the job can fail, this is set when a
// Job is added to a Stage.
// Stage is the name of the Stage to which the Job belongs.
Stage string
// Name is the name of the Job.
Name string
// Commands is the list of the commands that should be executed in the
// Driver when the Job is executed.
Commands []string
// Artifacts is the Passthrough that denotes how Artifacts in the Driver
// should map to the host.
Artifacts Passthrough
// Status is the Status of the Job once it has completed execution.
Status Status
}
// jobStore is the struct that holds the jobs for a Stage. Jobs are stored in a
// map where the key is the Job's name. Order is maintained via a slice of the
// Job names. The field curr is an integer that points to the position in the
// order slice.
type jobStore struct {
order []string
curr int
jobs map[string]*Job
}
func (j Job) isZero() bool {
return j.Writer == nil &&
len(j.errs) == 0 &&
!j.canFail &&
j.Stage == "" &&
j.Name == "" &&
len(j.Commands) == 0 &&
j.Artifacts.Values == nil &&
j.Status == Status(0)
}
// Mark a job as failed. The only errors that should be passed to this method
// should be errors pertaining to the functionality of the driver executing
// the job.
func (j *Job) Failed(err error) {
if err != nil {
j.errs = append(j.errs, err)
}
if j.canFail {
j.Status = PassedWithFailures
} else {
j.Status = Failed
}
}
func (j jobStore) len() int { return len(j.jobs) }
// next returns the next Job in the jobStore to be executed. This will
// increment the underlying curr field. If there is no Job to be executed
// then a false value is returned for the bool value.
func (s *jobStore) next() (*Job, bool) {
if s.curr >= len(s.order) {
return nil, false
}
j, ok := s.jobs[s.order[s.curr]]
s.curr++
return j, ok
}
func (s *jobStore) get(name string) (*Job, bool) {
j, ok := s.jobs[name]
return j, ok
}
func (s *jobStore) put(j *Job) {
if s.order == nil {
s.order = make([]string, 0)
}
if s.jobs == nil {
s.jobs = make(map[string]*Job)
}
s.order = append(s.order, j.Name)
s.jobs[j.Name] = j
} | runner/job.go | 0.663233 | 0.515559 | job.go | starcoder |
package physics
import (
// anonymous import for png decoder
_ "image/png"
"os"
"github.com/hajimehoshi/ebiten/v2"
"github.com/jtbonhomme/asteboids/internal/vector"
)
const (
defaultMaxVelocity float64 = 3.5
)
const (
StarshipAgent string = "starship"
AsteroidAgent string = "asteroid"
RubbleAgent string = "rubble"
BulletAgent string = "bullet"
BoidAgent string = "boid"
)
// Size represents coordonnates (X, Y) of a physical body.
type Position struct {
X float64
Y float64
}
// Size represents height and width of a physical body.
type Size struct {
H float64
W float64
}
type Physic interface {
// Draw draws the agent on screen.
Draw(*ebiten.Image)
// Update proceeds the agent state.
Update()
// Init initializes the physic body.
Init(vector.Vector2D)
// ID displays physic body unique ID.
ID() string
// String displays physic body information as a string.
String() string
// Intersect returns true if the physical body collide another one.
// Collision is computed based on Axis-Aligned Bounding Boxes.
// https://developer.mozilla.org/en-US/docs/Games/Techniques/2D_collision_detection
Intersect(Physic) bool
// IntersectMultiple checks if multiple physical bodies are colliding with the first
IntersectMultiple(map[string]Physic) (string, bool)
// position returns physical body position.
Position() vector.Vector2D
// Dimension returns physical body dimension.
Dimension() Size
// Type returns physical body agent type as a string.
Type() string
// Explode proceeds the agent explosion and termination.
Explode()
// Velocity returns physical body velocity.
Velocity() vector.Vector2D
// Dump write out internal agent's state.
Dump(*os.File) error
}
// AgentRegister is a function to register an agent.
type AgentRegister func(Physic)
// AgentUnregister is a function to unregister an agent.
type AgentUnregister func(string, string)
// Todo change float64, float64 parameter by a Position
// AgentVision is a function used by agents to "see" around them.
type AgentVision func(float64, float64) []Physic | internal/physics/physic.go | 0.617051 | 0.432303 | physic.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.