code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package schema
// GitoliteSchemaJSON is the content of the file "gitolite.schema.json".
const GitoliteSchemaJSON = `{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "gitolite.schema.json#",
"title": "GitoliteConnection",
"description": "Configuration for a connection to Gitolite.",
"allowComments": true,
"type": "object",
"additionalProperties": false,
"required": ["prefix", "host"],
"properties": {
"prefix": {
"description": "Repository name prefix that will map to this Gitolite host. This should likely end with a trailing slash. E.g., \"gitolite.example.com/\".\n\nIt is important that the Sourcegraph repository name generated with this prefix be unique to this code host. If different code hosts generate repository names that collide, Sourcegraph's behavior is undefined.",
"not": {
"type": "string",
"pattern": "example\\.com"
},
"type": "string",
"examples": ["gitolite.example.com/"]
},
"host": {
"description": "Gitolite host that stores the repositories (e.g., git<EMAIL>, ssh://git@gitolite.example.com:2222/).",
"not": {
"type": "string",
"pattern": "example\\.com"
},
"type": "string",
"examples": ["git<EMAIL>", "ssh://git@gitolite.example.com:2222/"]
},
"blacklist": {
"description": "Regular expression to filter repositories from auto-discovery, so they will not get cloned automatically.",
"type": "string",
"format": "regex"
},
"exclude": {
"description": "A list of repositories to never mirror from this Gitolite instance. Supports excluding by exact name ({\"name\": \"foo\"}).",
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"title": "ExcludedGitoliteRepo",
"additionalProperties": false,
"anyOf": [{ "required": ["name"] }],
"properties": {
"name": {
"description": "The name of a Gitolite repo (\"my-repo\") to exclude from mirroring.",
"type": "string",
"minLength": 1
}
}
},
"examples": [[{ "name": "myrepo" }]]
},
"phabricatorMetadataCommand": {
"description": "This is DEPRECATED. Use the ` + "`" + `phabricator` + "`" + ` field instead.",
"type": "string"
},
"phabricator": {
"description": "Phabricator instance that integrates with this Gitolite instance",
"type": "object",
"required": ["url", "callsignCommand"],
"additionalProperties": false,
"properties": {
"url": {
"description": "URL of the Phabricator instance that integrates with this Gitolite instance. This should be set ",
"type": "string",
"format": "uri"
},
"callsignCommand": {
"description": " Bash command that prints out the Phabricator callsign for a Gitolite repository. This will be run with environment variable $REPO set to the name of the repository and used to obtain the Phabricator metadata for a Gitolite repository. (Note: this requires ` + "`" + `bash` + "`" + ` to be installed.)",
"type": "string"
}
}
}
}
}
` | schema/gitolite_stringdata.go | 0.766294 | 0.423637 | gitolite_stringdata.go | starcoder |
package datadog
import (
"encoding/json"
"fmt"
)
// LogsGroupByHistogram Used to perform a histogram computation (only for measure facets). Note: At most 100 buckets are allowed, the number of buckets is (max - min)/interval.
type LogsGroupByHistogram struct {
// The bin size of the histogram buckets
Interval float64 `json:"interval"`
// The maximum value for the measure used in the histogram (values greater than this one are filtered out)
Max float64 `json:"max"`
// The minimum value for the measure used in the histogram (values smaller than this one are filtered out)
Min float64 `json:"min"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
UnparsedObject map[string]interface{} `json:-`
}
// NewLogsGroupByHistogram instantiates a new LogsGroupByHistogram object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewLogsGroupByHistogram(interval float64, max float64, min float64) *LogsGroupByHistogram {
this := LogsGroupByHistogram{}
this.Interval = interval
this.Max = max
this.Min = min
return &this
}
// NewLogsGroupByHistogramWithDefaults instantiates a new LogsGroupByHistogram object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewLogsGroupByHistogramWithDefaults() *LogsGroupByHistogram {
this := LogsGroupByHistogram{}
return &this
}
// GetInterval returns the Interval field value
func (o *LogsGroupByHistogram) GetInterval() float64 {
if o == nil {
var ret float64
return ret
}
return o.Interval
}
// GetIntervalOk returns a tuple with the Interval field value
// and a boolean to check if the value has been set.
func (o *LogsGroupByHistogram) GetIntervalOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Interval, true
}
// SetInterval sets field value
func (o *LogsGroupByHistogram) SetInterval(v float64) {
o.Interval = v
}
// GetMax returns the Max field value
func (o *LogsGroupByHistogram) GetMax() float64 {
if o == nil {
var ret float64
return ret
}
return o.Max
}
// GetMaxOk returns a tuple with the Max field value
// and a boolean to check if the value has been set.
func (o *LogsGroupByHistogram) GetMaxOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Max, true
}
// SetMax sets field value
func (o *LogsGroupByHistogram) SetMax(v float64) {
o.Max = v
}
// GetMin returns the Min field value
func (o *LogsGroupByHistogram) GetMin() float64 {
if o == nil {
var ret float64
return ret
}
return o.Min
}
// GetMinOk returns a tuple with the Min field value
// and a boolean to check if the value has been set.
func (o *LogsGroupByHistogram) GetMinOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Min, true
}
// SetMin sets field value
func (o *LogsGroupByHistogram) SetMin(v float64) {
o.Min = v
}
func (o LogsGroupByHistogram) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.UnparsedObject != nil {
return json.Marshal(o.UnparsedObject)
}
if true {
toSerialize["interval"] = o.Interval
}
if true {
toSerialize["max"] = o.Max
}
if true {
toSerialize["min"] = o.Min
}
return json.Marshal(toSerialize)
}
func (o *LogsGroupByHistogram) UnmarshalJSON(bytes []byte) (err error) {
raw := map[string]interface{}{}
required := struct {
Interval *float64 `json:"interval"`
Max *float64 `json:"max"`
Min *float64 `json:"min"`
}{}
all := struct {
Interval float64 `json:"interval"`
Max float64 `json:"max"`
Min float64 `json:"min"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
return err
}
if required.Interval == nil {
return fmt.Errorf("Required field interval missing")
}
if required.Max == nil {
return fmt.Errorf("Required field max missing")
}
if required.Min == nil {
return fmt.Errorf("Required field min missing")
}
err = json.Unmarshal(bytes, &all)
if err != nil {
err = json.Unmarshal(bytes, &raw)
if err != nil {
return err
}
o.UnparsedObject = raw
return nil
}
o.Interval = all.Interval
o.Max = all.Max
o.Min = all.Min
return nil
} | api/v2/datadog/model_logs_group_by_histogram.go | 0.748352 | 0.475118 | model_logs_group_by_histogram.go | starcoder |
package transform
import (
"fmt"
"github.com/RoaringBitmap/roaring/roaring64"
"github.com/golang/protobuf/proto"
pbbstream "github.com/streamingfast/pbgo/sf/bstream/v1"
)
// blockIndex is a generic index for existence of certain keys at certain block heights
type blockIndex struct {
// kv is the main data structure to identify blocks of interest
kv map[string]*roaring64.Bitmap
// lowBlockNum is the lower bound of the current index
lowBlockNum uint64
// indexSize is the distance between upper and lower bounds of this BlockIndex
// thus, the index's exclusive upper bound is determined with lowBlockNum + indexSize
indexSize uint64
}
// NewBlockIndex initializes and returns a new BlockIndex
func NewBlockIndex(lowBlockNum, indexSize uint64) *blockIndex {
return &blockIndex{
lowBlockNum: lowBlockNum,
indexSize: indexSize,
kv: make(map[string]*roaring64.Bitmap),
}
}
func (i *blockIndex) Get(key string) *roaring64.Bitmap {
return i.kv[key]
}
// marshal converts the current index to a protocol buffer
func (i *blockIndex) marshal() ([]byte, error) {
pbIndex := &pbbstream.GenericBlockIndex{}
for k, v := range i.kv {
bitmapBytes, err := v.ToBytes()
if err != nil {
return nil, err
}
pbIndex.Kv = append(pbIndex.Kv, &pbbstream.KeyToBitmap{
Key: []byte(k),
Bitmap: bitmapBytes,
})
}
return proto.Marshal(pbIndex)
}
// unmarshal converts a protocol buffer to the current index
func (i *blockIndex) unmarshal(in []byte) error {
pbIndex := &pbbstream.GenericBlockIndex{}
if i.kv == nil {
i.kv = make(map[string]*roaring64.Bitmap)
}
if err := proto.Unmarshal(in, pbIndex); err != nil {
return fmt.Errorf("couldn't unmarshal GenericBlockIndex: %s", err)
}
for _, data := range pbIndex.Kv {
key := string(data.Key)
r64 := roaring64.NewBitmap()
err := r64.UnmarshalBinary(data.Bitmap)
if err != nil {
return fmt.Errorf("coudln't unmarshal kv bitmap: %s", err)
}
i.kv[key] = r64
}
return nil
}
// add will append the given blockNum to the bitmap identified by the given key
func (i *blockIndex) add(key string, blocknum uint64) {
bitmap, ok := i.kv[key]
if !ok {
i.kv[key] = roaring64.BitmapOf(blocknum)
return
}
bitmap.Add(blocknum)
} | transform/block_index.go | 0.670177 | 0.529081 | block_index.go | starcoder |
package core
import (
"fmt"
b3 "github.com/magicsea/behavior3go"
"github.com/magicsea/behavior3go/config"
)
/**
* The BehaviorTree class, as the name implies, represents the Behavior Tree
* structure.
*
* There are two ways to construct a Behavior Tree: by manually setting the
* root node, or by loading it from a data structure (which can be loaded
* from a JSON). Both methods are shown in the examples below and better
* explained in the user guide.
*
* The tick method must be called periodically, in order to send the tick
* signal to all nodes in the tree, starting from the root. The method
* `BehaviorTree.tick` receives a target object and a blackboard as
* parameters. The target object can be anything: a game agent, a system, a
* DOM object, etc. This target is not used by any piece of Behavior3JS,
* i.e., the target object will only be used by custom nodes.
*
* The blackboard is obligatory and must be an instance of `Blackboard`. This
* requirement is necessary due to the fact that neither `BehaviorTree` or
* any node will store the execution variables in its own object (e.g., the
* BT does not store the target, information about opened nodes or number of
* times the tree was called). But because of this, you only need a single
* tree instance to control multiple (maybe hundreds) objects.
*
* Manual construction of a Behavior Tree
* --------------------------------------
*
* var tree = new b3.BehaviorTree();
*
* tree.root = new b3.Sequence({children:[
* new b3.Priority({children:[
* new MyCustomNode(),
* new MyCustomNode()
* ]}),
* ...
* ]});
*
*
* Loading a Behavior Tree from data structure
* -------------------------------------------
*
* var tree = new b3.BehaviorTree();
*
* tree.load({
* 'title' : 'Behavior Tree title'
* 'description' : 'My description'
* 'root' : 'node-id-1'
* 'nodes' : {
* 'node-id-1' : {
* 'name' : 'Priority', // this is the node type
* 'title' : 'Root Node',
* 'description' : 'Description',
* 'children' : ['node-id-2', 'node-id-3'],
* },
* ...
* }
* })
*
*
* @module b3
* @class BehaviorTree
**/
type BehaviorTree struct {
/**
* The tree id, must be unique. By default, created with `b3.createUUID`.
* @property {String} id
* @readOnly
**/
id string
/**
* The tree title.
* @property {String} title
* @readonly
**/
title string
/**
* Description of the tree.
* @property {String} description
* @readonly
**/
description string
/**
* A dictionary with (key-value) properties. Useful to define custom
* variables in the visual editor.
*
* @property {Object} properties
* @readonly
**/
properties map[string]interface{}
/**
* The reference to the root node. Must be an instance of `b3.BaseNode`.
* @property {BaseNode} root
**/
root IBaseNode
/**
* The reference to the debug instance.
* @property {Object} debug
**/
debug interface{}
dumpInfo *config.BTTreeCfg
}
func NewBeTree() *BehaviorTree {
tree := &BehaviorTree{}
tree.Initialize()
return tree
}
/**
* Initialization method.
* @method Initialize
* @construCtor
**/
func (this *BehaviorTree) Initialize() {
this.id = b3.CreateUUID()
this.title = "The behavior tree"
this.description = "Default description"
this.properties = make(map[string]interface{})
this.root = nil
this.debug = nil
}
func (this *BehaviorTree) GetID() string {
return this.id
}
func (this *BehaviorTree) SetDebug(debug interface{}) {
this.debug = debug
}
/**
* This method loads a Behavior Tree from a data structure, populating this
* object with the provided data. Notice that, the data structure must
* follow the format specified by Behavior3JS. Consult the guide to know
* more about this format.
*
* You probably want to use custom nodes in your BTs, thus, you need to
* provide the `names` object, in which this method can find the nodes by
* `names[NODE_NAME]`. This variable can be a namespace or a dictionary,
* as long as this method can find the node by its name, for example:
*
* //json
* ...
* 'node1': {
* 'name': MyCustomNode,
* 'title': ...
* }
* ...
*
* //code
* var bt = new b3.BehaviorTree();
* bt.load(data, {'MyCustomNode':MyCustomNode})
*
*
* @method load
* @param {Object} data The data structure representing a Behavior Tree.
* @param {Object} [names] A namespace or dict containing custom nodes.
**/
func (this *BehaviorTree) Load(data *config.BTTreeCfg, maps *b3.RegisterStructMaps, extMaps *b3.RegisterStructMaps) {
this.title = data.Title //|| this.title;
this.description = data.Description // || this.description;
this.properties = data.Properties // || this.properties;
this.dumpInfo = data
nodes := make(map[string]IBaseNode)
// Create the node list (without connection between them)
for id, s := range data.Nodes {
spec := &s
var node IBaseNode
if extMaps != nil && extMaps.CheckElem(spec.Name) {
// Look for the name in custom nodes
if tnode, err := extMaps.New(spec.Name); err == nil {
node = tnode.(IBaseNode)
}
} else {
if tnode, err2 := maps.New(spec.Name); err2 == nil {
node = tnode.(IBaseNode)
} else {
fmt.Println("new ", spec.Name, " err:", err2)
}
}
if node == nil {
// Invalid node name
panic("BehaviorTree.load: Invalid node name:" + spec.Name + ",title:" + spec.Title)
}
node.Ctor()
node.Initialize(spec)
node.SetBaseNodeWorker(node.(IBaseWorker))
nodes[id] = node
}
// Connect the nodes
for id, spec := range data.Nodes {
node := nodes[id]
if node.GetCategory() == b3.COMPOSITE && spec.Children != nil {
for i := 0; i < len(spec.Children); i++ {
var cid = spec.Children[i]
comp := node.(IComposite)
comp.AddChild(nodes[cid])
}
} else if node.GetCategory() == b3.DECORATOR && len(spec.Child) > 0 {
dec := node.(IDecorator)
dec.SetChild(nodes[spec.Child])
}
}
this.root = nodes[data.Root]
}
/**
* This method dump the current BT into a data structure.
*
* Note: This method does not record the current node parameters. Thus,
* it may not be compatible with load for now.
*
* @method dump
* @return {Object} A data object representing this tree.
**/
func (this *BehaviorTree) dump() *config.BTTreeCfg {
return this.dumpInfo
}
/**
* Propagates the tick signal through the tree, starting from the root.
*
* This method receives a target object of any type (Object, Array,
* DOMElement, whatever) and a `Blackboard` instance. The target object has
* no use at all for all Behavior3JS components, but surely is important
* for custom nodes. The blackboard instance is used by the tree and nodes
* to store execution variables (e.g., last node running) and is obligatory
* to be a `Blackboard` instance (or an object with the same interface).
*
* Internally, this method creates a Tick object, which will store the
* target and the blackboard objects.
*
* Note: BehaviorTree stores a list of open nodes from last tick, if these
* nodes weren't called after the current tick, this method will close them
* automatically.
*
* @method tick
* @param {Object} target A target object.
* @param {Blackboard} blackboard An instance of blackboard object.
* @return {Constant} The tick signal state.
**/
func (this *BehaviorTree) Tick(target interface{}, blackboard *Blackboard) b3.Status {
if blackboard == nil {
panic("The blackboard parameter is obligatory and must be an instance of b3.Blackboard")
}
/* CREATE A TICK OBJECT */
var tick = NewTick()
tick.debug = this.debug
tick.target = target
tick.Blackboard = blackboard
tick.tree = this
/* TICK NODE */
var state = this.root._execute(tick)
/* CLOSE NODES FROM LAST TICK, IF NEEDED */
var lastOpenNodes = blackboard._getTreeData(this.id).OpenNodes
var currOpenNodes []IBaseNode
copy(tick._openNodes, currOpenNodes)
// does not close if it is still open in this tick
var start = 0
for i := 0; i < b3.MinInt(len(lastOpenNodes), len(currOpenNodes)); i++ {
start = i + 1
if lastOpenNodes[i] != currOpenNodes[i] {
break
}
}
// close the nodes
for i := len(lastOpenNodes) - 1; i >= start; i-- {
lastOpenNodes[i]._close(tick)
}
/* POPULATE BLACKBOARD */
blackboard._getTreeData(this.id).OpenNodes = currOpenNodes
blackboard.SetTree("nodeCount", tick._nodeCount, this.id)
return state
}
func (this *BehaviorTree) Print() {
printNode(this.root, 0)
}
func printNode(root IBaseNode, blk int) {
//fmt.Println("new node:", root.Name, " children:", len(root.Children), " child:", root.Child)
for i := 0; i < blk; i++ {
fmt.Print(" ") //缩进
}
//fmt.Println("|—<", root.Name, ">") //打印"|—<id>"形式
fmt.Print("|—", root.GetTitle())
if root.GetCategory() == b3.DECORATOR {
dec := root.(IDecorator)
if dec.GetChild() != nil {
//fmt.Print("=>")
printNode(dec.GetChild(), blk+3)
}
}
fmt.Println("")
if root.GetCategory() == b3.COMPOSITE {
comp := root.(IComposite)
if comp.GetChildCount() > 0 {
for i := 0; i < comp.GetChildCount(); i++ {
printNode(comp.GetChild(i), blk+3)
}
}
}
} | lab051/lab001/vendor/github.com/magicsea/behavior3go/core/BehaviorTree.go | 0.79053 | 0.510313 | BehaviorTree.go | starcoder |
package analyzer
import (
"github.com/Sndav/go-mysql-server/sql"
"github.com/Sndav/go-mysql-server/sql/expression"
"github.com/Sndav/go-mysql-server/sql/plan"
)
// applyIndexesForSubqueryComparisons converts a `Filter(id = (SELECT ...),
// Child)` or a `Filter(id in (SELECT ...), Child)` to be iterated lookups on
// the Child instead. This analysis phase is currently very concrete. It only
// applies when:
// 1. There is a single `=` or `IN` expression in the Filter.
// 2. The Subquery is on the right hand side of the expression.
// 3. The left hand side is a GetField expression against the Child.
// 4. The Child is a *plan.ResolvedTable.
// 5. The referenced field in the Child is indexed.
func applyIndexesForSubqueryComparisons(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope) (sql.Node, error) {
aliases, err := getTableAliases(n, scope)
if err != nil {
return nil, err
}
return plan.TransformUp(n, func(node sql.Node) (sql.Node, error) {
switch node := node.(type) {
case *plan.Filter:
var replacement sql.Node
if eq, isEqual := node.Expression.(*expression.Equals); isEqual {
replacement = getIndexedInSubqueryFilter(ctx, a, eq.Left(), eq.Right(), node, true, scope, aliases)
} else if is, isInSubquery := node.Expression.(*plan.InSubquery); isInSubquery {
replacement = getIndexedInSubqueryFilter(ctx, a, is.Left, is.Right, node, false, scope, aliases)
}
if replacement != nil {
return replacement, nil
}
}
return node, nil
})
}
func getIndexedInSubqueryFilter(ctx *sql.Context, a *Analyzer, left, right sql.Expression, node *plan.Filter, equals bool, scope *Scope, tableAliases TableAliases) sql.Node {
gf, isGetField := left.(*expression.GetField)
subq, isSubquery := right.(*plan.Subquery)
rt, isResolved := node.Child.(*plan.ResolvedTable)
if !isGetField || !isSubquery || !isResolved {
return nil
}
referencesChildRow := nodeHasGetFieldReferenceBetween(subq.Query, len(scope.Schema()), len(scope.Schema())+len(node.Child.Schema()))
if referencesChildRow {
return nil
}
indexes, err := getIndexesForNode(ctx, a, rt)
if err != nil {
return nil
}
defer indexes.releaseUsedIndexes()
idx := indexes.IndexByExpression(ctx, ctx.GetCurrentDatabase(), normalizeExpressions(ctx, tableAliases, gf)...)
if idx == nil {
return nil
}
keyExpr := gf.WithIndex(0)
ita := plan.NewIndexedTableAccess(rt, idx, []sql.Expression{keyExpr})
return plan.NewIndexedInSubqueryFilter(subq, ita, len(node.Child.Schema()), gf, equals)
}
// nodeHasGetFieldReferenceBetween returns `true` if the given sql.Node has a
// GetField expression anywhere within the tree that references an index in the
// range [low, high).
func nodeHasGetFieldReferenceBetween(n sql.Node, low, high int) bool {
var found bool
plan.Inspect(n, func(n sql.Node) bool {
if er, ok := n.(sql.Expressioner); ok {
for _, e := range er.Expressions() {
if expressionHasGetFieldReferenceBetween(e, low, high) {
found = true
return false
}
}
}
// TODO: Descend into *plan.Subquery?
_, ok := n.(*plan.IndexedInSubqueryFilter)
return !ok
})
return found
}
// expressionHasGetFieldReferenceBetween returns `true` if the given sql.Expression
// has a GetField expression within it that references an index in the range
// [low, high).
func expressionHasGetFieldReferenceBetween(e sql.Expression, low, high int) bool {
var found bool
sql.Inspect(e, func(e sql.Expression) bool {
if gf, ok := e.(*expression.GetField); ok {
if gf.Index() >= low && gf.Index() < high {
found = true
return false
}
}
return true
})
return found
} | sql/analyzer/apply_indexes_for_subquery_comparisons.go | 0.590071 | 0.402862 | apply_indexes_for_subquery_comparisons.go | starcoder |
package ai
import (
"math"
"math/rand"
// anonymous import for png decoder
"image/color"
_ "image/png"
"github.com/hajimehoshi/ebiten/v2"
"github.com/jtbonhomme/asteboids/internal/physics"
"github.com/jtbonhomme/asteboids/internal/vector"
"github.com/sirupsen/logrus"
)
const (
boidMaxForce float64 = 0.3
boidMaxVelocity float64 = 3.0
separationFactor float64 = 1.9
cohesionFactor float64 = 1.5
alignmentFactor float64 = 1.3
)
// Boid is a PhysicalBody agent.
// It represents a single autonomous agent.
type Boid struct {
physics.Body
}
func rayVertices(x1, y1, x2, y2, x3, y3 float64) []ebiten.Vertex {
return []ebiten.Vertex{
{DstX: float32(x1), DstY: float32(y1), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
{DstX: float32(x2), DstY: float32(y2), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
{DstX: float32(x3), DstY: float32(y3), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
}
}
// NewBoid creates a new Boid (PhysicalBody agent)
func NewBoid(
log *logrus.Logger,
x, y,
screenWidth, screenHeight float64,
boidImage *ebiten.Image,
vision physics.AgentVision,
debug bool) *Boid {
b := Boid{}
b.AgentType = physics.BoidAgent
b.Orientation = math.Pi / 32 * float64(rand.Intn(64))
b.Init(vector.Vector2D{
X: boidMaxVelocity * math.Cos(b.Orientation),
Y: boidMaxVelocity * math.Sin(b.Orientation),
})
b.LimitVelocity(boidMaxVelocity)
b.Log = log
b.Move(vector.Vector2D{
X: x,
Y: y,
})
b.PhysicWidth = 10
b.PhysicHeight = 10
b.ScreenWidth = screenWidth
b.ScreenHeight = screenHeight
emptyImage := ebiten.NewImage(10, 10)
emptyImage.Fill(color.RGBA{100, 100, 200, 255})
op := &ebiten.DrawTrianglesOptions{}
op.Address = ebiten.AddressRepeat
op.CompositeMode = ebiten.CompositeModeSourceOut
b.Image = ebiten.NewImage(10, 10)
b.Image.DrawTriangles(
[]ebiten.Vertex{
{DstX: float32(0), DstY: float32(0), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
{DstX: float32(10), DstY: float32(5), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
{DstX: float32(5), DstY: float32(5), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
{DstX: float32(0), DstY: float32(10), SrcX: 0, SrcY: 0, ColorR: 1, ColorG: 1, ColorB: 1, ColorA: 1},
},
[]uint16{0, 1, 2, 3, 1, 2},
emptyImage,
op,
)
b.Vision = vision
b.Debug = debug
return &b
}
// Update proceeds the game state.
// Update is called every tick (1/60 [s] by default).
func (b *Boid) Update() {
acceleration := vector.Vector2D{}
nearestAgent := b.Vision(b.Position().X, b.Position().Y)
cohesion := b.cohesion(nearestAgent)
cohesion.Multiply(cohesionFactor)
acceleration.Add(cohesion)
separation := b.separate(nearestAgent)
separation.Multiply(separationFactor)
acceleration.Add(separation)
alignment := b.align(nearestAgent)
alignment.Multiply(alignmentFactor)
acceleration.Add(alignment)
b.Accelerate(acceleration)
b.UpdateVelocity()
b.UpdateOrientation()
b.UpdatePosition()
}
// Draw draws the game screen.
// Draw is called every frame (typically 1/60[s] for 60Hz display).
func (b *Boid) Draw(screen *ebiten.Image) {
defer b.Body.Draw(screen)
nearestAgent := b.Vision(b.Position().X, b.Position().Y)
b.LinkAgents(screen, nearestAgent, []string{physics.BoidAgent})
}
func (b *Boid) seek(target vector.Vector2D) vector.Vector2D {
desired := b.Position()
desired.Subtract(target)
desired.Normalize()
desired.Multiply(boidMaxVelocity)
steer := b.Velocity()
steer.Subtract(desired)
steer.Limit(boidMaxForce)
return steer
}
// cohesion returns the force imposed by flocking cohesion rule.
func (b *Boid) cohesion(agents []physics.Physic) vector.Vector2D {
result := vector.Vector2D{
X: 0,
Y: 0,
}
var nBoids float64 = 0.0
for _, agent := range agents {
if agent.Type() == physics.BoidAgent && agent.ID() != b.ID() {
nBoids++
result.Add(agent.Position())
}
}
if nBoids > 0 {
result.Divide(nBoids)
result = b.seek(result)
}
return result
}
// separate returns the force imposed by flocking separation rule.
func (b *Boid) separate(agents []physics.Physic) vector.Vector2D {
result := vector.Vector2D{
X: 0,
Y: 0,
}
var nBoids float64 = 0.0
for _, agent := range agents {
if /*agent.Type() == physics.BoidAgent && */ agent.ID() != b.ID() {
nBoids++
d := b.Position().Distance(agent.Position())
diff := b.Position()
diff.Subtract(agent.Position())
diff.Normalize()
diff.Divide(d)
result.Add(diff)
}
}
if nBoids > 0 {
result.Divide(nBoids)
result.Normalize()
result.Multiply(boidMaxVelocity)
result.Subtract(b.Velocity())
result.Limit(boidMaxForce)
}
return result
}
// align returns the force imposed by flocking alignment rule.
func (b *Boid) align(agents []physics.Physic) vector.Vector2D {
result := vector.Vector2D{
X: 0,
Y: 0,
}
var nBoids float64 = 0.0
for _, agent := range agents {
if agent.Type() == physics.BoidAgent && agent.ID() != b.ID() {
nBoids++
result.Add(agent.Velocity())
}
}
if nBoids > 0 {
result.Divide(nBoids)
result.Multiply(boidMaxVelocity)
result.Subtract(b.Velocity())
result.Limit(boidMaxForce)
}
return result
} | internal/ai/boid.go | 0.699357 | 0.620392 | boid.go | starcoder |
package enumerate
import (
"sync"
"golang.org/x/exp/constraints"
. "github.com/noxer/nox/dot"
"github.com/noxer/nox/set"
"github.com/noxer/nox/tuple"
)
type enumMapper[T, S any] struct {
e Enumerable[T]
f func(T) S
}
func (e *enumMapper[T, S]) Next() bool {
return e.e.Next()
}
func (e *enumMapper[T, S]) Value() S {
return e.f(e.e.Value())
}
// Map applies a function f to every element of the enumerable e.
func Map[T, S any](e Enumerable[T], f func(T) S) Enumerable[S] {
return &enumMapper[T, S]{e, f}
}
type enumFilter[T any] struct {
e Enumerable[T]
f func(T) bool
}
func (e *enumFilter[T]) Next() bool {
for e.e.Next() {
if e.f(e.e.Value()) {
return true
}
}
return false
}
func (e *enumFilter[T]) Value() T {
return e.e.Value()
}
// Filter lets only the elements pass that f returns true for.
func Filter[T any](e Enumerable[T], f func(T) bool) Enumerable[T] {
return &enumFilter[T]{e, f}
}
type enumZipper[T, S any] struct {
a Enumerable[T]
b Enumerable[S]
}
func (e *enumZipper[T, S]) Next() bool {
return e.a.Next() && e.b.Next()
}
func (e *enumZipper[T, S]) Value() tuple.T2[T, S] {
return tuple.T2[T, S]{A: e.a.Value(), B: e.b.Value()}
}
// Zip takes two enumerables and returns a enumerable with tuples of them.
func Zip[T, S any](a Enumerable[T], b Enumerable[S]) Enumerable[tuple.T2[T, S]] {
return &enumZipper[T, S]{a, b}
}
// Sum reads all values from an enumerable and sums them up.
func Sum[T Number](e Enumerable[T]) (sum T) {
for e.Next() {
sum += e.Value()
}
return
}
// Max reads all values from an enumerable and returns the biggest one.
func Max[T constraints.Ordered](e Enumerable[T]) Optional[T] {
if !e.Next() {
return Failure[T]()
}
max := e.Value()
for e.Next() {
if val := e.Value(); val > max {
max = val
}
}
return Success(max)
}
// Min reads all values from an enumerable and returns the smallest one.
func Min[T constraints.Ordered](e Enumerable[T]) Optional[T] {
if !e.Next() {
return Failure[T]()
}
min := e.Value()
for e.Next() {
if val := e.Value(); val < min {
min = val
}
}
return Success(min)
}
// Count reads all values from an enumerable and returns the number of values.
func Count[T any](e Enumerable[T]) int {
i := 0
for e.Next() {
i++
}
return i
}
// Drain reads all values from an enumerable.
func Drain[T any](e Enumerable[T]) {
for e.Next() {
}
}
// Consume reads n values from an enumerable.
func Consume[T any](e Enumerable[T], n int) {
for i := 0; i < n; i++ {
if !e.Next() {
return
}
}
}
// Histogram reads all values from an enumerable and returns a map of values and their respective counts.
func Histogram[T comparable, C Number](e Enumerable[T]) map[T]C {
h := make(map[T]C)
for e.Next() {
h[e.Value()]++
}
return h
}
// Unique reads all values from an enumerable and returns a set of unique values.
func Unique[T comparable](e Enumerable[T]) set.Set[T] {
s := make(set.Set[T])
for e.Next() {
s.Put(e.Value())
}
return s
}
type enumMutex[T any] struct {
m sync.RWMutex
e Enumerable[T]
}
func (e *enumMutex[T]) Next() bool {
e.m.Lock()
defer e.m.Unlock()
return e.e.Next()
}
func (e *enumMutex[T]) Value() T {
e.m.RLock()
defer e.m.RUnlock()
return e.e.Value()
}
// Synchronize makes reading the enumerable thread safe.
func Synchronize[T any](e Enumerable[T]) Enumerable[T] {
if _, ok := e.(*enumMutex[T]); ok {
// don't wrap an existing synchronized mutex
return e
}
return &enumMutex[T]{e: e}
} | enumerate/enumerate.go | 0.730001 | 0.46642 | enumerate.go | starcoder |
package assert
import (
"testing"
"github.com/ppapapetrou76/go-testing/internal/pkg/values"
"github.com/ppapapetrou76/go-testing/types"
)
// SliceOpt is a configuration option to initialize an AssertableAny Slice
type SliceOpt func(*AssertableStringSlice)
// AssertableStringSlice is the implementation of AssertableSlice for string slices
type AssertableStringSlice struct {
t *testing.T
actual types.Containable
customMessage string
}
// WithCustomMessage provides a custom message to be added before the assertion error message
func WithCustomMessage(customMessage string) SliceOpt {
return func(c *AssertableStringSlice) {
c.customMessage = customMessage
}
}
// ThatSlice returns a proper assertable structure based on the slice type
func ThatSlice(t *testing.T, actual interface{}, opts ...SliceOpt) AssertableSlice {
switch v := actual.(type) {
case []string:
assertable := &AssertableStringSlice{
t: t,
actual: values.NewStringSliceValue(actual),
}
for _, opt := range opts {
opt(assertable)
}
return assertable
default:
t.Fatalf("only string slice type is supported but got %T type", v)
}
return nil
}
// IsEqualTo asserts if the expected slice is equal to the assertable slice value
// It errors the tests if the compared values (actual VS expected) are not equal
func (a AssertableStringSlice) IsEqualTo(expected interface{}) AssertableSlice {
if !a.actual.IsEqualTo(expected) {
a.t.Error(shouldBeEqual(a.actual, expected))
}
return a
}
// IsNotEqualTo asserts if the expected slice is not equal to the assertable slice value
// It errors the tests if the compared values (actual VS expected) are equal
func (a AssertableStringSlice) IsNotEqualTo(expected interface{}) AssertableSlice {
if a.actual.IsEqualTo(expected) {
a.t.Error(shouldNotBeEqual(a.actual, expected))
}
return a
}
// HasSize asserts if the assertable string slice has the expected length size
// It errors the test if it doesn't have the expected size
func (a AssertableStringSlice) HasSize(size int) AssertableSlice {
if !a.actual.HasSize(size) {
a.t.Error(shouldHaveSize(a.actual, size))
}
return a
}
// IsEmpty asserts if the assertable string slice is empty or not
func (a AssertableStringSlice) IsEmpty() AssertableSlice {
if a.actual.IsNotEmpty() {
a.t.Error(shouldBeEmpty(a.actual))
}
return a
}
// IsNotEmpty asserts if the assertable string slice is not empty
func (a AssertableStringSlice) IsNotEmpty() AssertableSlice {
if a.actual.IsEmpty() {
a.t.Error(shouldNotBeEmpty(a.actual))
}
return a
}
// Contains asserts if the assertable string slice contains the given element(s)
// It errors the test if it does not contain it/them
func (a AssertableStringSlice) Contains(elements interface{}) AssertableSlice {
if a.actual.DoesNotContain(elements) {
a.t.Error(shouldContain(a.actual, elements))
}
return a
}
// ContainsOnly asserts if the assertable string slice contains only the given element(s)
// It errors the test if it does not contain it/them
func (a AssertableStringSlice) ContainsOnly(elements interface{}) AssertableSlice {
if !a.actual.ContainsOnly(elements) {
a.t.Error(shouldContainOnly(a.actual, elements))
}
return a
}
// DoesNotContain asserts if the assertable string slice does not contain the given element
// It errors the test if it contains it/them
func (a AssertableStringSlice) DoesNotContain(elements interface{}) AssertableSlice {
if a.actual.Contains(elements) {
a.t.Error(shouldNotContain(a.actual, elements))
}
return a
} | assert/string_slice.go | 0.854415 | 0.659559 | string_slice.go | starcoder |
package digit
// LinkRelation represents a relationship as definedin RFC5988
type LinkRelation struct {
RelationName string
Description string
Reference string
}
// LinkRelations returns an array of all link relations defined in RFC5988
func LinkRelations() []LinkRelation {
return []LinkRelation{
{
RelationName: "alternate",
Description: "Designates a substitute for the link's context.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "appendix",
Description: "Refers to an appendix.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "bookmark",
Description: "Refers to a bookmark or entry point.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "chapter",
Description: "Refers to a chapter in a collection of resources.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "contents",
Description: "Refers to a table of contents.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "copyright",
Description: "Refers to a copyright statement that applies to the link's context.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "current",
Description: "Refers to a resource containing the most recent item(s) in a collection of resources.",
Reference: "[RFC5005]",
}, {
RelationName: "describedby",
Description: "Refers to a resource providing information about the link's context.",
Reference: "http://www.w3.org/TR/powder-dr/#assoc-linking>",
}, {
RelationName: "edit",
Description: "Refers to a resource that can be used to edit the link's context.",
Reference: "[RFC5023]",
}, {
RelationName: "edit-media",
Description: "Refers to a resource that can be used to edit media associated with the link's context.",
Reference: "[RFC5023]",
}, {
RelationName: "enclosure",
Description: "Identifies a related resource that is potentially large and might require special handling.",
Reference: "[RFC4287]",
}, {
RelationName: "first",
Description: "An IRI that refers to the furthest preceding resource in a series of resources.",
Reference: "[RFC5988]",
}, {
RelationName: "glossary",
Description: "Refers to a glossary of terms.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "help",
Description: "Refers to a resource offering help (more information, links to other sources information, etc.)",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "hub",
Description: "Refers to a hub that enables registration for notification of updates to the context.",
Reference: "<http://pubsubhubbub.googlecode.com/> <http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.3.html>",
}, {
RelationName: "index",
Description: "Refers to an index.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "last",
Description: "An IRI that refers to the furthest following resource in a series of resources.",
Reference: "[RFC5988]",
}, {
RelationName: "latest-version",
Description: "Points to a resource containing the latest (e.g., current) version of the context.",
Reference: "[RFC5829]",
}, {
RelationName: "license",
Description: "Refers to a license associated with the link's context.",
Reference: "[RFC4946]",
}, {
RelationName: "next",
Description: "Refers to the next resource in a ordered series of resources.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "next-archive",
Description: "Refers to the immediately following archive resource.",
Reference: "[RFC5005]",
}, {
RelationName: "payment",
Description: "indicates a resource where payment is accepted.",
Reference: "[RFC5988]",
}, {
RelationName: "prev",
Description: "Refers to the previous resource in an ordered series of resources. Synonym for \"previous\".",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "predecessor-version",
Description: "Points to a resource containing the predecessor version in the version history.",
Reference: "[RFC5829]",
}, {
RelationName: "previous",
Description: "Refers to the previous resource in an ordered series of resources. Synonym for \"prev\".",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "prev-archive",
Description: "Refers to the immediately preceding archive resource.",
Reference: "[RFC5005]",
}, {
RelationName: "related",
Description: "Identifies a related resource.",
Reference: "[RFC4287]",
}, {
RelationName: "replies",
Description: "Identifies a resource that is a reply to the context of the link.",
Reference: "[RFC4685]",
}, {
RelationName: "section",
Description: "Refers to a section in a collection of resources.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "self",
Description: "Conveys an identifier for the link's context.",
Reference: "[RFC4287]",
}, {
RelationName: "service",
Description: "Indicates a URI that can be used to retrieve a service document.",
Reference: "[RFC5023]",
}, {
RelationName: "start",
Description: "Refers to the first resource in a collection of resources.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "stylesheet",
Description: "Refers to an external style sheet.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "subsection",
Description: "Refers to a resource serving as a subsection in a collection of resources.",
Reference: "[W3C.REC-html401-19991224]",
}, {
RelationName: "successor-version",
Description: "Points to a resource containing the successor version in the version history.",
Reference: "[RFC5829]",
}, {
RelationName: "up",
Description: "Refers to a parent document in a hierarchy of documents.",
Reference: "[RFC5988]",
}, {
RelationName: "version-history",
Description: "Points to a resource containing the version history for the context.",
Reference: "[RFC5829]",
}, {
RelationName: "via",
Description: "Identifies a resource that is the source of the information in the link's context.",
Reference: "[RFC4287]",
}, {
RelationName: "working-copy",
Description: "Points to a working copy for this resource.",
Reference: "[RFC5829]",
}, {
RelationName: "working-copy-of",
Description: "Points to the versioned resource from which this working copy was obtained.",
Reference: "[RFC5829]",
},
}
} | linkRelation.go | 0.76105 | 0.533337 | linkRelation.go | starcoder |
// An implementation of Fortune's algorithm to get Voronoi edges for a set of
// points.
package voronoi
import (
"container/heap"
"fmt"
"math"
)
type Point struct {
X float64
Y float64
}
func Pt(x float64, y float64) *Point {
return &Point{X: x, Y: y}
}
type Vertices []*Point
type Edge struct {
Start *Point
End *Point
Direction *Point
Left *Point
Right *Point
F float64
G float64
Neighbor *Edge
}
func Ed(x1 float64, y1 float64, x2 float64, y2 float64) *Edge {
return &Edge{Start: Pt(x1, y1), End: Pt(x2, y2)}
}
func NewEdge(s *Point, a *Point, b *Point) *Edge {
e := &Edge{
Start: s,
Left: a,
Right: b,
Neighbor: nil,
End: nil,
}
e.F = (b.X - a.X) / (a.Y - b.Y)
e.G = s.Y - e.F*s.X
if math.IsInf(float64(e.F), -1) {
e.Direction = Pt(1, 0)
} else if math.IsInf(float64(e.F), 1) {
e.Direction = Pt(-1, 0)
} else {
e.Direction = Pt(b.Y-a.Y, -(b.X - a.X))
}
return e
}
type Edges []*Edge
type Event struct {
Point *Point
IsPlace bool
Y float64
Arch *Parabola
}
func NewEvent(p *Point, place bool) *Event {
e := &Event{
Point: p,
IsPlace: place,
Y: p.Y,
Arch: nil,
}
return e
}
type EventList []*Event
func (l EventList) Find(e *Event) int {
for i := 0; i < len(l); i++ {
if l[i] == e {
return i
}
}
return -1
}
func (l EventList) Last() *Event {
if len(l) == 0 {
return nil
}
return l[len(l)-1]
}
func (l *EventList) Remove(e *Event) bool {
i := l.Find(e)
if i == -1 {
return false
}
a := *l
*l = append(a[:i], a[i+1:]...)
return true
}
type EventQueue []*Event
func (q EventQueue) Len() int {
return len(q)
}
func (q EventQueue) Less(i int, j int) bool {
// Sorted by Y descending.
return q[i].Y > q[j].Y
}
func (q EventQueue) Swap(i int, j int) {
q[i], q[j] = q[j], q[i]
}
func (q *EventQueue) Push(x interface{}) {
a := *q
n := len(a)
if n+1 > cap(a) {
c := make(EventQueue, len(a), 2*cap(a)+1)
copy(c, a)
a = c
}
a = a[0 : n+1]
event := x.(*Event)
a[n] = event
*q = a
}
func (q *EventQueue) Pop() interface{} {
a := *q
n := len(a)
event := a[n-1]
*q = a[0 : n-1]
return event
}
type Parabola struct {
IsLeaf bool
Site *Point
Edge *Edge
Event *Event
Parent *Parabola
left *Parabola
right *Parabola
}
func NewParabola() *Parabola {
return &Parabola{
Site: nil,
IsLeaf: false,
Edge: nil,
Event: nil,
Parent: nil,
left: nil,
right: nil,
}
}
func NewLeafParabola(s *Point) *Parabola {
p := NewParabola()
p.Site = s
p.IsLeaf = true
return p
}
func (p *Parabola) Left() *Parabola {
return p.left
}
func (p *Parabola) Right() *Parabola {
return p.right
}
func (p *Parabola) SetLeft(c *Parabola) {
p.left = c
c.Parent = p
}
func (p *Parabola) SetRight(c *Parabola) {
p.right = c
c.Parent = p
}
func (p *Parabola) GetLeft() *Parabola {
return p.GetLeftParent().GetLeftChild()
}
func (p *Parabola) GetRight() *Parabola {
return p.GetRightParent().GetRightChild()
}
func (p *Parabola) Print() {
fmt.Printf("Parabola: %p\n", &p)
fmt.Printf(" Site: %v\n", p.Site)
fmt.Printf(" IsLeaf: %v\n", p.IsLeaf)
fmt.Printf(" Event: %v\n", p.Event)
fmt.Printf(" Edge: %v\n", p.Edge)
fmt.Printf(" Parent: %v\n", p.Parent)
fmt.Printf(" Left: %v\n", p.left)
fmt.Printf(" Right: %v\n", p.right)
}
func (p *Parabola) GetLeftParent() *Parabola {
par := p.Parent
plast := p
for par.Left() == plast {
if par.Parent == nil {
return nil
}
plast = par
par = par.Parent
}
return par
}
func (p *Parabola) GetRightParent() *Parabola {
par := p.Parent
plast := p
for par.Right() == plast {
if par.Parent == nil {
return nil
}
plast = par
par = par.Parent
}
return par
}
func (p *Parabola) GetLeftChild() *Parabola {
if p == nil {
return nil
}
par := p.Left()
for !par.IsLeaf {
par = par.Right()
}
return par
}
func (p *Parabola) GetRightChild() *Parabola {
if p == nil {
return nil
}
par := p.Right()
for !par.IsLeaf {
par = par.Left()
}
return par
}
type Voronoi struct {
Edges Edges
Vertices Vertices
Places *Vertices
Width float64
Height float64
Root *Parabola
Y float64
del EventList
points Vertices
queue EventQueue
}
func (v *Voronoi) GetEdges(places *Vertices, w float64, h float64) Edges {
v.Places = places
v.Width = w
v.Height = h
v.Root = nil
v.Edges = make(Edges, 0, 0)
v.points = make(Vertices, 0, 0)
v.queue = make(EventQueue, 0, len(*places)+1)
for _, p := range *places {
heap.Push(&v.queue, NewEvent(p, true))
}
v.del = make(EventList, 0, 0)
var e *Event
for v.queue.Len() > 0 {
e = heap.Pop(&v.queue).(*Event)
v.Y = e.Point.Y
if i := v.del.Find(e); i != -1 {
v.del.Remove(e)
continue
}
if e.IsPlace {
v.insertParabola(e.Point)
} else {
v.removeParabola(e)
}
}
v.finishEdge(v.Root)
for _, e := range v.Edges {
if e.Neighbor != nil {
e.Start = e.Neighbor.End
e.Neighbor = nil
}
}
return v.Edges
}
func (v *Voronoi) insertParabola(p *Point) {
if v.Root == nil {
v.Root = NewLeafParabola(p)
return
}
if v.Root.IsLeaf && v.Root.Site.Y-p.Y < 1 {
fp := v.Root.Site
v.Root.IsLeaf = false
v.Root.SetLeft(NewLeafParabola(fp))
v.Root.SetRight(NewLeafParabola(p))
s := Pt((p.X+fp.X)/2.0, v.Height)
v.points = append(v.points, s)
if p.X > fp.X {
v.Root.Edge = NewEdge(s, fp, p)
} else {
v.Root.Edge = NewEdge(s, p, fp)
}
v.Edges = append(v.Edges, v.Root.Edge)
return
}
par := v.getParabolaByX(p.X)
if par.Event != nil {
v.del = append(v.del, par.Event)
par.Event = nil
}
start := Pt(p.X, v.getY(par.Site, p.X))
v.points = append(v.points, start)
el := NewEdge(start, par.Site, p)
er := NewEdge(start, p, par.Site)
el.Neighbor = er
v.Edges = append(v.Edges, el)
par.Edge = er
par.IsLeaf = false
p0 := NewLeafParabola(par.Site)
p1 := NewLeafParabola(p)
p2 := NewLeafParabola(par.Site)
par.SetRight(p2)
par.SetLeft(NewParabola())
par.Left().Edge = el
par.Left().SetLeft(p0)
par.Left().SetRight(p1)
v.checkCircle(p0)
v.checkCircle(p2)
}
func (v *Voronoi) removeParabola(e *Event) {
var (
p1 = e.Arch
xl = p1.GetLeftParent()
xr = p1.GetRightParent()
p0 = xl.GetLeftChild()
p2 = xr.GetRightChild()
)
if p0.Event != nil {
v.del = append(EventList{p0.Event}, v.del...)
p0.Event = nil
}
if p2.Event != nil {
v.del = append(EventList{p2.Event}, v.del...)
p2.Event = nil
}
p := Pt(e.Point.X, v.getY(p1.Site, e.Point.X))
v.points = append(v.points, p)
xl.Edge.End = p
xr.Edge.End = p
var (
higher *Parabola
par *Parabola = p1
)
for par != v.Root {
par = par.Parent
if par == xl {
higher = xl
}
if par == xr {
higher = xr
}
}
higher.Edge = NewEdge(p, p0.Site, p2.Site)
v.Edges = append(v.Edges, higher.Edge)
gparent := p1.Parent.Parent
if p1.Parent.Left() == p1 {
if gparent.Left() == p1.Parent {
gparent.SetLeft(p1.Parent.Right())
}
if gparent.Right() == p1.Parent {
gparent.SetRight(p1.Parent.Right())
}
} else {
if gparent.Left() == p1.Parent {
gparent.SetLeft(p1.Parent.Left())
}
if gparent.Right() == p1.Parent {
gparent.SetRight(p1.Parent.Left())
}
}
p1.Parent = nil
v.checkCircle(p0)
v.checkCircle(p2)
}
func (v *Voronoi) getEdgeIntersection(a *Edge, b *Edge) *Point {
var (
x = (b.G - a.G) / (a.F - b.F)
y = a.F*x + a.G
)
if math.IsInf(float64(b.F), 0) {
x = b.Start.X
y = a.F*x + a.G
}
if math.IsInf(float64(a.F), 0) {
x = a.Start.X
y = b.F*x + b.G
}
if (x-a.Start.X)/a.Direction.X < 0 {
return nil
}
if (y-a.Start.Y)/a.Direction.Y < 0 {
return nil
}
if (x-b.Start.X)/b.Direction.X < 0 {
return nil
}
if (y-b.Start.Y)/b.Direction.Y < 0 {
return nil
}
p := Pt(x, y)
v.points = append(v.points, p)
return p
}
func (v *Voronoi) checkCircle(b *Parabola) {
var (
lp = b.GetLeftParent()
rp = b.GetRightParent()
a = lp.GetLeftChild()
c = rp.GetRightChild()
)
if a == nil || c == nil || a.Site == c.Site {
return
}
s := v.getEdgeIntersection(lp.Edge, rp.Edge)
if s == nil {
return
}
var (
dx = a.Site.X - s.X
dy = a.Site.Y - s.Y
d = float64(math.Sqrt(float64((dx * dx) + (dy * dy))))
)
if s.Y-d >= v.Y {
return
}
e := NewEvent(Pt(s.X, s.Y-d), false)
v.points = append(v.points, e.Point)
b.Event = e
e.Arch = b
heap.Push(&v.queue, e)
}
func (v *Voronoi) getParabolaByX(xx float64) *Parabola {
par := v.Root
var x float64 = 0.0
for !par.IsLeaf {
x = v.getXOfEdge(par, v.Y)
if x > xx {
par = par.Left()
} else {
par = par.Right()
}
}
return par
}
func (v *Voronoi) getY(p *Point, x float64) float64 {
var (
dp = 2 * (p.Y - v.Y)
a1 = 1 / dp
b1 = -2 * p.X / dp
c1 = v.Y + dp/4 + p.X*p.X/dp
)
return a1*x*x + b1*x + c1
}
func (v *Voronoi) finishEdge(n *Parabola) {
if n.IsLeaf {
return
}
var mx float64
if n.Edge.Direction.X > 0.0 {
if v.Width > n.Edge.Start.X+10 {
mx = v.Width
} else {
mx = n.Edge.Start.X + 10
}
} else {
if 0.0 < n.Edge.Start.X-10 {
mx = 0.0
} else {
mx = n.Edge.Start.X - 10
}
}
var end *Point
if math.IsInf(float64(n.Edge.F), 1) {
end = Pt(mx, v.Height)
} else if math.IsInf(float64(n.Edge.F), -1) {
end = Pt(mx, 0)
} else {
end = Pt(mx, mx*n.Edge.F+n.Edge.G)
}
n.Edge.End = end
v.points = append(v.points, end)
v.finishEdge(n.Left())
v.finishEdge(n.Right())
}
func (v *Voronoi) getXOfEdge(par *Parabola, y float64) float64 {
var (
left = par.GetLeftChild()
right = par.GetRightChild()
p = left.Site
r = right.Site
dp = 2.0 * (p.Y - y)
a1 = 1.0 / dp
b1 = -2.0 * p.X / dp
c1 = y + dp/4 + p.X*p.X/dp
)
dp = 2.0 * (r.Y - y)
var (
a2 = 1.0 / dp
b2 = -2.0 * r.X / dp
c2 = v.Y + dp/4 + r.X*r.X/dp
a = a1 - a2
b = b1 - b2
c = c1 - c2
disc = b*b - 4*a*c
x1 = (-b + float64(math.Sqrt(float64(disc)))) / (2 * a)
x2 = (-b - float64(math.Sqrt(float64(disc)))) / (2 * a)
)
var ry float64
if p.Y < r.Y {
if x1 > x2 {
ry = x1
} else {
ry = x2
}
} else {
if x1 < x2 {
ry = x1
} else {
ry = x2
}
}
return ry
} | voronoi.go | 0.685739 | 0.582075 | voronoi.go | starcoder |
package main
import "errors"
/* relationship labels.
*region* is the reference of a course grained physical location.
*vpc* is an abstract reference to a group of resources.
*az* is a medium grained reference to an isolated location (DC/floor/whatever).
*subnet* is an abstract reference to a fixed range pool of IP addresses.
*instance* is a single guest VM which is located in an az and associated with a vpc.
*elb* is a logical group of hosts that provide loadbalancing for one or more instances.
An elb is located in an az and associated with a vpc.
(region) -[hosts]-> (vpc)
(region) <-[hosted_by]- (vpc)
(region) -[houses]-> (az)
(region) <-[housed_by]- (az)
(vpc) -[allocates_network]-> (subnet)
(vpc) <-[network_allocated_by]- (subnet)
(az) -[hosts_network]-> (subnet)
(az) <-[network_hosted_by]- (subnet)
(subnet) -[ip_allocated_to_instance]-> (instance)
(subnet) <-[instance_ip_allocated_from]- (instance)
(subnet) -[homes]-> (elb)
(subnet) <-[homed_in]- (elb)
(elb) -[proxies]-> (instance)
(elb) <-[proxied_by]- (instance)
===
(az) -[provisions_elb]-> (elb)
(az) <-[elb_provisioned_in]- (elb)
(az) -[provisions_instance]-> (instance)
(az) <-[instance_provisioned_in]- (instance)
acl, route, gw
*/
var NodeNotFound = errors.New("Node not found!")
var NeighboursNotFound = errors.New("Neighbours not found!")
type Identity string
type Type uint
type Relationship string
type NodeRef *Node
type Neighbours []*Edge
const InitialNeighbourCapacity = 4
type Node struct {
Id string
Type Type
Value interface{}
}
// EdgeList contains all the relationships between nodes.
type EdgeList struct {
EdgeCount int
Edges map[string]Neighbours
}
func (el *EdgeList) Len() int {
return el.EdgeCount
}
// AddNeighbour
func (el *EdgeList) AddNeighbour(from NodeRef, rel Relationship, to NodeRef) {
el.EdgeCount++
neighbours, ok := el.Edges[from.Id]
if !ok {
neighbours = make(Neighbours, 0, InitialNeighbourCapacity)
}
neighbours = append(neighbours, &Edge{From: from, Relationship: rel, To: to})
el.Edges[from.Id] = neighbours
}
// GetNeighbours
func (el *EdgeList) GetNeighbours(id string) (n Neighbours, err error) {
n, ok := el.Edges[id]
if !ok {
return nil, NeighboursNotFound
}
return n, nil
}
type RelationshipFilterFunc func(r *Edge) bool
// this could get expensive on a large graph :O
func (el *EdgeList) GetNeighboursBy(filters ...RelationshipFilterFunc) (n Neighbours) {
for _, neighbours := range el.Edges {
for _, neighbour := range neighbours {
matches := true
for _, fn := range filters {
if !fn(neighbour) {
matches = false
break
}
}
if matches {
n = append(n, neighbour)
}
}
}
return n
}
type NodeFilterFunc func(n NodeRef) bool
// ByType
func ByType(t Type) (fn NodeFilterFunc) {
return func(n NodeRef) bool {
return (n.Type == t)
}
}
// NodeList contains all of the nodes by Node.Id
type NodeList map[string]NodeRef
// AddNode
func (nl NodeList) AddNode(id string, t Type, v interface{}) (n *Node) {
n = &Node{
Id: id,
Type: t,
Value: v,
}
nl[id] = n
return n
}
// GetNode
func (nl NodeList) GetNode(id string) (n NodeRef, err error) {
n, ok := nl[id]
if !ok {
return nil, NodeNotFound
}
return n, nil
}
// GetNodes
func (nl NodeList) GetNodes(filters ...NodeFilterFunc) (nodes []NodeRef) {
nodes = make([]NodeRef, 0, 16)
for _, n := range nl {
nodeMatches := true
for _, fn := range filters {
if !fn(n) {
nodeMatches = false
break
}
}
if nodeMatches {
nodes = append(nodes, n)
}
}
return nodes
}
// Len
func (nl NodeList) Len() int {
return len(nl)
}
// Edge
type Edge struct {
From NodeRef
Relationship Relationship
To NodeRef
}
// Graph
type Graph struct {
NodeList
*EdgeList
}
// NewGraph
func NewGraph() (g *Graph) {
return &Graph{
NewNodeList(),
NewEdgeList(),
}
}
// NewEdgeList
func NewEdgeList() (el *EdgeList) {
return &EdgeList{
Edges: make(map[string]Neighbours),
}
}
// NewNodeList
func NewNodeList() (nl NodeList) {
return make(NodeList)
} | graph.go | 0.585338 | 0.462473 | graph.go | starcoder |
package util
import (
"bytes"
"fmt"
"io"
"io/ioutil"
)
const (
// Once initialized, capacity will never drop below this number.
initialGrowableByteSliceCapacity = 1024
)
/*
Buffer is used to hold file data in memory.
Slightly different and simpler behavior than bytes.Buffer.
Wraps a byte slice, and can grow it preserving existing data,
truncate it larger or smaller.
Indices are specified in int64s, not ints. Currently the slice is implemented
as a single underlying byte slice, so math.IntMax is the maximum length.
*/
type GrowableByteSlice struct {
data []byte
}
var (
// Read interfaces.
_ io.ReaderAt = &GrowableByteSlice{}
_ io.WriterTo = &GrowableByteSlice{}
// Write interfaces.
_ io.WriterAt = &GrowableByteSlice{}
_ io.ReaderFrom = &GrowableByteSlice{}
)
func (s *GrowableByteSlice) String() string {
return string(s.data)
}
func (s *GrowableByteSlice) GoString() string {
return fmt.Sprintf("GrowableSlice(len=%d,cap=%d)", len(s.data), cap(s.data))
}
// Resize changes the len of the slice, re-allocating if necessary, to be newLen.
// If the new len is larger, the "new" bytes at the end of the buffer will always be zeroed out.
func (s *GrowableByteSlice) Resize(newLen64 int64) {
newLen := int(newLen64)
switch {
case newLen < 0:
panic("newLen must be >= 0")
case newLen < len(s.data):
s.shrink(newLen)
case newLen == len(s.data):
return
case newLen <= cap(s.data):
s.data = s.data[:newLen]
default: // newLen > cap(s.data)
s.reallocate(newLen)
}
}
// shrink returns a byte slice that has len < newLen and the same initial bytes copied from s.data.
func (s *GrowableByteSlice) shrink(newLen int) {
if newLen >= len(s.data) {
panic("cannot shrink larger")
}
if len(s.data) < cap(s.data)/3 {
// Reallocate to avoid leaking memory.
s.reallocate(newLen)
} else {
// Only shrinking a little, we can re-use the array.
// …but zero out the old data so it doesn't leak next time we grow past newLen without reallocating.
for i := range s.data[newLen:] {
s.data[i] = 0
}
s.data = s.data[:newLen]
}
}
// reallocate replaces the underlying array with a new array that has len newLen and capacity 2*newLen
// and copies data over.
func (s *GrowableByteSlice) reallocate(newLen int) {
newCapacity := 2 * newLen
if newCapacity < initialGrowableByteSliceCapacity {
newCapacity = initialGrowableByteSliceCapacity
}
newData := make([]byte, newLen, newCapacity)
copy(newData, s.data)
s.data = newData
}
// ReadAt implements the io.ReaderAt interface.
func (s *GrowableByteSlice) ReadAt(buf []byte, off int64) (n int, err error) {
if off >= int64(len(s.data)) {
return 0, io.EOF
}
// Don't use Slice because we don't want to grow the slice.
n = copy(buf, s.data[off:])
if n+int(off) == len(s.data) {
// Didn't copy the entire buffer, so buf must be bigger than the rest of our buffer.
err = io.EOF
}
return
}
// WriteAt implements the io.WriterAt interface.
func (s *GrowableByteSlice) WriteAt(data []byte, off int64) (int, error) {
end := off + int64(len(data))
copy(s.slice(off, end), data)
return len(data), nil
}
// slice returns the bytes in b[start:end], and will
// grow the slice if end > b.Len().
func (s *GrowableByteSlice) slice(start, end int64) []byte {
if start > end {
panic(fmt.Sprintf("start(%d) > end(%d)", start, end))
}
if end > int64(len(s.data)) {
s.Resize(int64(end))
}
return s.data[start:end]
}
func (s *GrowableByteSlice) WriteTo(w io.Writer) (int64, error) {
return io.Copy(w, bytes.NewReader(s.data))
}
// ReadFrom resizes the slice to 0 then reads all of r.
func (s *GrowableByteSlice) ReadFrom(r io.Reader) (int64, error) {
data, err := ioutil.ReadAll(r)
s.data = data
return int64(len(data)), err
}
func (s *GrowableByteSlice) Len() int64 {
return int64(len(s.data))
} | internal/util/growable_byte_slice.go | 0.667256 | 0.464112 | growable_byte_slice.go | starcoder |
// Package ristretto allows simple and abstracted operations in the Ristretto255 group
package ristretto
import (
"github.com/gtank/ristretto255"
"github.com/bytemare/crypto/group/internal"
)
const canonicalEncodingLength = 32
// Scalar implements the Scalar interface for Ristretto255 group scalars.
type Scalar struct {
scalar *ristretto255.Scalar
}
// Random sets the current scalar to a new random scalar and returns it.
func (s *Scalar) Random() internal.Scalar {
random := internal.RandomBytes(ristrettoInputLength)
s.scalar.FromUniformBytes(random)
return s
}
// Add returns the sum of the scalars, and does not change the receiver.
func (s *Scalar) Add(scalar internal.Scalar) internal.Scalar {
if scalar == nil {
return s
}
sc, ok := scalar.(*Scalar)
if !ok {
panic(internal.ErrCastScalar)
}
return &Scalar{scalar: ristretto255.NewScalar().Add(s.scalar, sc.scalar)}
}
// Sub returns the difference between the scalars, and does not change the receiver.
func (s *Scalar) Sub(scalar internal.Scalar) internal.Scalar {
if scalar == nil {
return s
}
sc, ok := scalar.(*Scalar)
if !ok {
panic("could not cast to same group scalar : wrong group ?")
}
return &Scalar{scalar: ristretto255.NewScalar().Subtract(s.scalar, sc.scalar)}
}
// Mult returns the multiplication of the scalars, and does not change the receiver.
func (s *Scalar) Mult(scalar internal.Scalar) internal.Scalar {
if scalar == nil {
panic("multiplying scalar with nil element")
}
sc, ok := scalar.(*Scalar)
if !ok {
panic("could not cast to same group scalar : wrong group ?")
}
return &Scalar{scalar: ristretto255.NewScalar().Multiply(s.scalar, sc.scalar)}
}
// Invert returns the scalar's modular inverse ( 1 / scalar ).
func (s *Scalar) Invert() internal.Scalar {
return &Scalar{ristretto255.NewScalar().Invert(s.scalar)}
}
// Copy returns a copy of the Scalar.
func (s *Scalar) Copy() internal.Scalar {
return &Scalar{ristretto255.NewScalar().Add(ristretto255.NewScalar(), s.scalar)}
}
// Decode decodes the input an sets the current scalar to its value, and returns it.
func (s *Scalar) Decode(in []byte) (internal.Scalar, error) {
sc, err := decodeScalar(in)
if err != nil {
return nil, err
}
s.scalar = sc
return s, nil
}
// Bytes returns the byte encoding of the scalar.
func (s *Scalar) Bytes() []byte {
return s.scalar.Encode(nil)
}
func decodeScalar(scalar []byte) (*ristretto255.Scalar, error) {
if len(scalar) == 0 {
return nil, internal.ErrParamNilScalar
}
if len(scalar) != canonicalEncodingLength {
return nil, internal.ErrParamScalarLength
}
s := ristretto255.NewScalar()
if err := s.Decode(scalar); err != nil {
return nil, err
}
return s, nil
} | group/ristretto/scalar.go | 0.876125 | 0.494019 | scalar.go | starcoder |
package output
import (
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message/batch"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/output/writer"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/util/kafka/sasl"
"github.com/Jeffail/benthos/v3/lib/util/retries"
"github.com/Jeffail/benthos/v3/lib/util/tls"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeKafka] = TypeSpec{
constructor: NewKafka,
Summary: `
The kafka output type writes a batch of messages to Kafka brokers and waits for
acknowledgement before propagating it back to the input.`,
Description: `
The config field ` + "`ack_replicas`" + ` determines whether we wait for
acknowledgement from all replicas or just a single broker.
Both the ` + "`key` and `topic`" + ` fields can be dynamically set using
function interpolations described [here](/docs/configuration/interpolation#bloblang-queries).
When sending batched messages these interpolations are performed per message
part.`,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
return sanitiseWithBatch(conf.Kafka, conf.Kafka.Batching)
},
Async: true,
Batches: true,
FieldSpecs: append(docs.FieldSpecs{
docs.FieldDeprecated("round_robin_partitions"),
docs.FieldCommon("addresses", "A list of broker addresses to connect to. If an item of the list contains commas it will be expanded into multiple addresses.", []string{"localhost:9092"}, []string{"localhost:9041,localhost:9042"}, []string{"localhost:9041", "localhost:9042"}),
tls.FieldSpec(),
sasl.FieldSpec(),
docs.FieldCommon("topic", "The topic to publish messages to.").SupportsInterpolation(false),
docs.FieldCommon("client_id", "An identifier for the client connection."),
docs.FieldCommon("key", "The key to publish messages with.").SupportsInterpolation(false),
docs.FieldCommon("partitioner", "The partitioning algorithm to use.").HasOptions("fnv1a_hash", "murmur2_hash", "random", "round_robin"),
docs.FieldCommon("compression", "The compression algorithm to use.").HasOptions("none", "snappy", "lz4", "gzip"),
docs.FieldCommon("static_headers", "An optional map of static headers that should be added to messages in addition to metadata.", map[string]string{"first-static-header": "value-1", "second-static-header": "value-2"}),
docs.FieldCommon("max_in_flight", "The maximum number of parallel message batches to have in flight at any given time."),
docs.FieldAdvanced("ack_replicas", "Ensure that messages have been copied across all replicas before acknowledging receipt."),
docs.FieldAdvanced("max_msg_bytes", "The maximum size in bytes of messages sent to the target topic."),
docs.FieldAdvanced("timeout", "The maximum period of time to wait for message sends before abandoning the request and retrying."),
docs.FieldAdvanced("target_version", "The version of the Kafka protocol to use."),
batch.FieldSpec(),
}, retries.FieldSpecs()...),
Categories: []Category{
CategoryServices,
},
}
}
//------------------------------------------------------------------------------
// NewKafka creates a new Kafka output type.
func NewKafka(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
k, err := writer.NewKafka(conf.Kafka, mgr, log, stats)
if err != nil {
return nil, err
}
var w Type
if conf.Kafka.MaxInFlight == 1 {
w, err = NewWriter(
TypeKafka, k, log, stats,
)
} else {
w, err = NewAsyncWriter(
TypeKafka, conf.Kafka.MaxInFlight, k, log, stats,
)
}
if err != nil {
return nil, err
}
return newBatcherFromConf(conf.Kafka.Batching, w, mgr, log, stats)
}
//------------------------------------------------------------------------------ | lib/output/kafka.go | 0.717408 | 0.483831 | kafka.go | starcoder |
package compiler
import (
"fmt"
"regexp"
"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/semantic"
"github.com/influxdata/flux/values"
"github.com/pkg/errors"
)
type Func interface {
Type() semantic.Type
Eval(input values.Object) (values.Value, error)
}
type Evaluator interface {
Type() semantic.Type
Eval(scope Scope) (values.Value, error)
}
type compiledFn struct {
root Evaluator
fnType semantic.Type
inputScope Scope
}
func (c compiledFn) validate(input values.Object) error {
sig := c.fnType.FunctionSignature()
properties := input.Type().Properties()
if len(properties) != len(sig.Parameters) {
return errors.New("mismatched parameters and properties")
}
for k, v := range sig.Parameters {
if !values.AssignableTo(properties[k], v) {
return fmt.Errorf("parameter %q has the wrong type, expected %v got %v", k, v, properties[k])
}
}
return nil
}
func (c compiledFn) buildScope(input values.Object) error {
if err := c.validate(input); err != nil {
return err
}
input.Range(func(k string, v values.Value) {
c.inputScope[k] = v
})
return nil
}
func (c compiledFn) Type() semantic.Type {
return c.fnType.FunctionSignature().Return
}
func (c compiledFn) Eval(input values.Object) (values.Value, error) {
if err := c.buildScope(input); err != nil {
return nil, err
}
return eval(c.root, c.inputScope)
}
type Scope map[string]values.Value
func (s Scope) Type(name string) semantic.Type {
return s[name].Type()
}
func (s Scope) Set(name string, v values.Value) {
s[name] = v
}
func (s Scope) Get(name string) values.Value {
v := s[name]
if v == nil {
panic("attempting to access non-existant value")
}
return v
}
func (s Scope) Copy() Scope {
n := make(Scope, len(s))
for k, v := range s {
n[k] = v
}
return n
}
func eval(e Evaluator, scope Scope) (values.Value, error) {
v, err := e.Eval(scope)
if err != nil {
return nil, err
}
values.CheckKind(e.Type().Nature(), v.Type().Nature())
return v, nil
}
type blockEvaluator struct {
t semantic.Type
body []Evaluator
value values.Value
}
func (e *blockEvaluator) Type() semantic.Type {
return e.t
}
func (e *blockEvaluator) Eval(scope Scope) (values.Value, error) {
var err error
for _, b := range e.body {
e.value, err = eval(b, scope)
if err != nil {
return nil, err
}
}
values.CheckKind(e.value.Type().Nature(), e.Type().Nature())
return e.value, nil
}
type returnEvaluator struct {
Evaluator
}
type declarationEvaluator struct {
t semantic.Type
id string
init Evaluator
}
func (e *declarationEvaluator) Type() semantic.Type {
return e.t
}
func (e *declarationEvaluator) Eval(scope Scope) (values.Value, error) {
v, err := eval(e.init, scope)
if err != nil {
return nil, err
}
scope.Set(e.id, v)
return v, nil
}
type objEvaluator struct {
t semantic.Type
with *identifierEvaluator
properties map[string]Evaluator
}
func (e *objEvaluator) Type() semantic.Type {
return e.t
}
func (e *objEvaluator) Eval(scope Scope) (values.Value, error) {
obj := values.NewObject()
if e.with != nil {
with, err := e.with.Eval(scope)
if err != nil {
return nil, err
}
with.Object().Range(func(name string, v values.Value) {
obj.Set(name, v)
})
}
for k, node := range e.properties {
v, err := eval(node, scope)
if err != nil {
return nil, err
}
obj.Set(k, v)
}
return obj, nil
}
type arrayEvaluator struct {
t semantic.Type
array []Evaluator
}
func (e *arrayEvaluator) Type() semantic.Type {
return e.t
}
func (e *arrayEvaluator) Eval(scope Scope) (values.Value, error) {
arr := values.NewArray(e.t.ElementType())
for _, ev := range e.array {
v, err := eval(ev, scope)
if err != nil {
return nil, err
}
arr.Append(v)
}
return arr, nil
}
type logicalEvaluator struct {
t semantic.Type
operator ast.LogicalOperatorKind
left, right Evaluator
}
func (e *logicalEvaluator) Type() semantic.Type {
return e.t
}
func (e *logicalEvaluator) Eval(scope Scope) (values.Value, error) {
l, err := e.left.Eval(scope)
if err != nil {
return nil, err
}
values.CheckKind(l.Type().Nature(), e.t.Nature())
switch e.operator {
case ast.AndOperator:
if l.IsNull() || !l.Bool() {
return values.NewBool(false), nil
}
case ast.OrOperator:
if !l.IsNull() && l.Bool() {
return values.NewBool(true), nil
}
default:
panic(fmt.Errorf("unknown logical operator %v", e.operator))
}
r, err := e.right.Eval(scope)
if err != nil {
return nil, err
}
return r, nil
}
type conditionalEvaluator struct {
t semantic.Type
test Evaluator
consequent Evaluator
alternate Evaluator
}
func (e *conditionalEvaluator) Type() semantic.Type {
return e.t
}
func (e *conditionalEvaluator) Eval(scope Scope) (values.Value, error) {
t, err := eval(e.test, scope)
if err != nil {
return nil, err
}
if t.Bool() {
return eval(e.consequent, scope)
} else {
return eval(e.alternate, scope)
}
}
type binaryEvaluator struct {
t semantic.Type
left, right Evaluator
f values.BinaryFunction
}
func (e *binaryEvaluator) Type() semantic.Type {
return e.t
}
func (e *binaryEvaluator) Eval(scope Scope) (values.Value, error) {
l, err := eval(e.left, scope)
if err != nil {
return nil, err
}
r, err := eval(e.right, scope)
if err != nil {
return nil, err
}
return e.f(l, r)
}
type unaryEvaluator struct {
t semantic.Type
node Evaluator
op ast.OperatorKind
}
func (e *unaryEvaluator) Type() semantic.Type {
return e.t
}
func (e *unaryEvaluator) Eval(scope Scope) (values.Value, error) {
v, err := e.node.Eval(scope)
if err != nil {
return nil, err
}
ret, err := func(v values.Value) (values.Value, error) {
if e.op == ast.ExistsOperator {
return values.NewBool(!v.IsNull()), nil
}
// If the value is null, return it immediately.
if v.IsNull() {
return v, nil
}
switch e.op {
case ast.AdditionOperator:
// Do nothing.
return v, nil
case ast.SubtractionOperator, ast.NotOperator:
// Fallthrough to below.
default:
return nil, fmt.Errorf("unknown unary operator: %s", e.op)
}
// The subtraction operator falls through to here.
switch v.Type().Nature() {
case semantic.Int:
return values.NewInt(-v.Int()), nil
case semantic.Float:
return values.NewFloat(-v.Float()), nil
case semantic.Bool:
return values.NewBool(!v.Bool()), nil
case semantic.Duration:
return values.NewDuration(-v.Duration()), nil
default:
panic(values.UnexpectedKind(e.t.Nature(), v.Type().Nature()))
}
}(v)
if err != nil {
return nil, err
}
values.CheckKind(ret.Type().Nature(), e.t.Nature())
return ret, nil
}
type integerEvaluator struct {
t semantic.Type
i int64
}
func (e *integerEvaluator) Type() semantic.Type {
return e.t
}
func (e *integerEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewInt(e.i), nil
}
type stringEvaluator struct {
t semantic.Type
s string
}
func (e *stringEvaluator) Type() semantic.Type {
return e.t
}
func (e *stringEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewString(e.s), nil
}
type regexpEvaluator struct {
t semantic.Type
r *regexp.Regexp
}
func (e *regexpEvaluator) Type() semantic.Type {
return e.t
}
func (e *regexpEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewRegexp(e.r), nil
}
type booleanEvaluator struct {
t semantic.Type
b bool
}
func (e *booleanEvaluator) Type() semantic.Type {
return e.t
}
func (e *booleanEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewBool(e.b), nil
}
type floatEvaluator struct {
t semantic.Type
f float64
}
func (e *floatEvaluator) Type() semantic.Type {
return e.t
}
func (e *floatEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewFloat(e.f), nil
}
type timeEvaluator struct {
t semantic.Type
time values.Time
}
func (e *timeEvaluator) Type() semantic.Type {
return e.t
}
func (e *timeEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewTime(e.time), nil
}
type durationEvaluator struct {
t semantic.Type
duration values.Duration
}
func (e *durationEvaluator) Type() semantic.Type {
return e.t
}
func (e *durationEvaluator) Eval(scope Scope) (values.Value, error) {
return values.NewDuration(e.duration), nil
}
type identifierEvaluator struct {
t semantic.Type
name string
}
func (e *identifierEvaluator) Type() semantic.Type {
return e.t
}
func (e *identifierEvaluator) Eval(scope Scope) (values.Value, error) {
v := scope.Get(e.name)
values.CheckKind(v.Type().Nature(), e.t.Nature())
return v, nil
}
type valueEvaluator struct {
value values.Value
}
func (e *valueEvaluator) Type() semantic.Type {
return e.value.Type()
}
func (e *valueEvaluator) Eval(scope Scope) (values.Value, error) {
return e.value, nil
}
type memberEvaluator struct {
t semantic.Type
object Evaluator
property string
}
func (e *memberEvaluator) Type() semantic.Type {
return e.t
}
func (e *memberEvaluator) Eval(scope Scope) (values.Value, error) {
o, err := e.object.Eval(scope)
if err != nil {
return nil, err
}
v, _ := o.Object().Get(e.property)
values.CheckKind(v.Type().Nature(), e.t.Nature())
return v, nil
}
type arrayIndexEvaluator struct {
t semantic.Type
array Evaluator
index Evaluator
}
func (e *arrayIndexEvaluator) Type() semantic.Type {
return e.t
}
func (e *arrayIndexEvaluator) Eval(scope Scope) (values.Value, error) {
a, err := e.array.Eval(scope)
if err != nil {
return nil, err
}
i, err := e.index.Eval(scope)
if err != nil {
return nil, err
}
return a.Array().Get(int(i.Int())), nil
}
type callEvaluator struct {
t semantic.Type
callee Evaluator
args Evaluator
}
func (e *callEvaluator) Type() semantic.Type {
return e.t
}
func (e *callEvaluator) Eval(scope Scope) (values.Value, error) {
args, err := e.args.Eval(scope)
if err != nil {
return nil, err
}
f, err := e.callee.Eval(scope)
if err != nil {
return nil, err
}
return f.Function().Call(args.Object())
}
type functionEvaluator struct {
t semantic.Type
body Evaluator
params []functionParam
}
func (e *functionEvaluator) Type() semantic.Type {
return e.t
}
func (e *functionEvaluator) Eval(scope Scope) (values.Value, error) {
return &functionValue{
t: e.t,
body: e.body,
params: e.params,
scope: scope,
}, nil
}
type functionValue struct {
t semantic.Type
body Evaluator
params []functionParam
scope Scope
}
type functionParam struct {
Key string
Default Evaluator
Type semantic.Type
}
func (f *functionValue) HasSideEffect() bool {
return false
}
func (f *functionValue) Call(args values.Object) (values.Value, error) {
scope := f.scope.Copy()
for _, p := range f.params {
a, ok := args.Get(p.Key)
if !ok && p.Default != nil {
v, err := eval(p.Default, f.scope)
if err != nil {
return nil, err
}
a = v
}
scope.Set(p.Key, a)
}
return eval(f.body, scope)
}
func (f *functionValue) Type() semantic.Type { return f.t }
func (f *functionValue) PolyType() semantic.PolyType { return f.t.PolyType() }
func (f *functionValue) IsNull() bool { return false }
func (f *functionValue) Str() string {
panic(values.UnexpectedKind(semantic.Function, semantic.String))
}
func (f *functionValue) Int() int64 {
panic(values.UnexpectedKind(semantic.Function, semantic.Int))
}
func (f *functionValue) UInt() uint64 {
panic(values.UnexpectedKind(semantic.Function, semantic.UInt))
}
func (f *functionValue) Float() float64 {
panic(values.UnexpectedKind(semantic.Function, semantic.Float))
}
func (f *functionValue) Bool() bool {
panic(values.UnexpectedKind(semantic.Function, semantic.Bool))
}
func (f *functionValue) Time() values.Time {
panic(values.UnexpectedKind(semantic.Function, semantic.Time))
}
func (f *functionValue) Duration() values.Duration {
panic(values.UnexpectedKind(semantic.Function, semantic.Duration))
}
func (f *functionValue) Regexp() *regexp.Regexp {
panic(values.UnexpectedKind(semantic.Function, semantic.Regexp))
}
func (f *functionValue) Array() values.Array {
panic(values.UnexpectedKind(semantic.Function, semantic.Array))
}
func (f *functionValue) Object() values.Object {
panic(values.UnexpectedKind(semantic.Function, semantic.Object))
}
func (f *functionValue) Function() values.Function {
return f
}
func (f *functionValue) Equal(rhs values.Value) bool {
if f.Type() != rhs.Type() {
return false
}
v, ok := rhs.(*functionValue)
return ok && (f == v)
}
type noopEvaluator struct{}
func (noopEvaluator) Type() semantic.Type {
return semantic.Nil
}
func (noopEvaluator) Eval(scope Scope) (values.Value, error) {
return values.Null, nil
} | compiler/runtime.go | 0.682997 | 0.440289 | runtime.go | starcoder |
package processor
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/Jeffail/benthos/lib/log"
"github.com/Jeffail/benthos/lib/metrics"
"github.com/Jeffail/benthos/lib/types"
"github.com/Jeffail/benthos/lib/util/text"
"github.com/opentracing/opentracing-go"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeNumber] = TypeSpec{
constructor: NewNumber,
description: `
Parses message contents into a 64-bit floating point number and performs an
operator on it. In order to execute this processor on a sub field of a document
use it with the ` + "[`process_field`](#process_field)" + ` processor.
The value field can either be a number or a string type. If it is a string type
then this processor will interpolate functions within it, you can find a list of
functions [here](../config_interpolation.md#functions).
For example, if we wanted to subtract the current unix timestamp from the field
'foo' of a JSON document ` + "`{\"foo\":1561219142}`" + ` we could use the
following config:
` + "``` yaml" + `
process_field:
path: foo
result_type: float
processors:
- number:
operator: subtract
value: "${!timestamp_unix}"
` + "```" + `
Value interpolations are resolved once per message batch, in order to resolve it
for each message of the batch place it within a
` + "[`for_each`](#for_each)" + ` processor.
### Operators
#### ` + "`add`" + `
Adds a value.
#### ` + "`subtract`" + `
Subtracts a value.`,
}
}
//------------------------------------------------------------------------------
// NumberConfig contains configuration fields for the Number processor.
type NumberConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Operator string `json:"operator" yaml:"operator"`
Value interface{} `json:"value" yaml:"value"`
}
// NewNumberConfig returns a NumberConfig with default values.
func NewNumberConfig() NumberConfig {
return NumberConfig{
Parts: []int{},
Operator: "add",
Value: 0,
}
}
//------------------------------------------------------------------------------
type numberOperator func(content, value float64) float64
func newNumberAddOperator() numberOperator {
return func(content, value float64) float64 {
return content + value
}
}
func newNumberSubtractOperator() numberOperator {
return func(content, value float64) float64 {
return content - value
}
}
func getNumberOperator(opStr string) (numberOperator, error) {
switch opStr {
case "add":
return newNumberAddOperator(), nil
case "subtract":
return newNumberSubtractOperator(), nil
}
return nil, fmt.Errorf("operator not recognised: %v", opStr)
}
//------------------------------------------------------------------------------
// Number is a processor that performs number based operations on payloads.
type Number struct {
parts []int
interpolatedValue *text.InterpolatedString
value float64
operator numberOperator
conf Config
log log.Modular
stats metrics.Type
mCount metrics.StatCounter
mErr metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewNumber returns a Number processor.
func NewNumber(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
n := &Number{
parts: conf.Number.Parts,
conf: conf,
log: log,
stats: stats,
mCount: stats.GetCounter("count"),
mErr: stats.GetCounter("error"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}
var err error
switch t := conf.Number.Value.(type) {
case string:
if text.ContainsFunctionVariables([]byte(t)) {
n.interpolatedValue = text.NewInterpolatedString(t)
} else {
n.value, err = strconv.ParseFloat(t, 64)
}
case float64:
n.value = t
case int:
n.value = float64(t)
case json.Number:
n.value, err = t.Float64()
default:
err = fmt.Errorf("value type '%T' not allowed", t)
}
if err != nil {
return nil, fmt.Errorf("failed to parse value: %v", err)
}
if n.operator, err = getNumberOperator(conf.Number.Operator); err != nil {
return nil, err
}
return n, nil
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (n *Number) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
n.mCount.Incr(1)
newMsg := msg.Copy()
value := n.value
if n.interpolatedValue != nil {
interpStr := n.interpolatedValue.Get(msg)
var err error
if value, err = strconv.ParseFloat(interpStr, 64); err != nil {
n.log.Errorf("Failed to parse interpolated value '%v' into float: %v\n", interpStr, err)
newMsg.Iter(func(i int, p types.Part) error {
FlagErr(p, err)
return nil
})
n.mBatchSent.Incr(1)
n.mSent.Incr(int64(newMsg.Len()))
return []types.Message{newMsg}, nil
}
}
proc := func(index int, span opentracing.Span, part types.Part) error {
data, err := strconv.ParseFloat(string(part.Get()), 64)
if err != nil {
n.mErr.Incr(1)
n.log.Debugf("Failed to parse content into float: %v\n", err)
return err
}
data = n.operator(data, value)
part.Set([]byte(strconv.FormatFloat(data, 'f', -1, 64)))
return nil
}
IteratePartsWithSpan(TypeNumber, n.parts, newMsg, proc)
n.mBatchSent.Incr(1)
n.mSent.Incr(int64(newMsg.Len()))
return []types.Message{newMsg}, nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (n *Number) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (n *Number) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/number.go | 0.815012 | 0.718138 | number.go | starcoder |
package result
import "github.com/medmouine/gomad/maybe"
/*
Result aims at abstracting all logic related to operations susceptible to failures, such as external API calls, etc.
It offers constructors and methods to safely manipulate the result in case of success and handle errors gracefully in case of failure.
*/
type Result[T any] interface {
WithDefault(T) Result[T]
Ok() T
IsOk() bool
IfOk(f func(T)) Result[T]
Map(func(T) T) Result[T]
Or(T) T
Err() error
IsErr() bool
IfErr(f func(error)) Result[T]
MapErr(func(error) error) Result[T]
Maybe() maybe.Maybe[T]
}
type result[T any] struct {
Result[T]
val *T
err error
}
/*
Ok creates a new Result from a valid value.
*/
func Ok[T any](val T) Result[T] {
return result[T]{val: &val}
}
/*
Err creates a new Result from an invalid value (error).
*/
func Err[T any](err error) Result[T] {
return result[T]{err: err}
}
/*
FromMaybe creates a new Result from a Maybe instance.
*/
func FromMaybe[T any](m maybe.Maybe[T], err error) Result[T] {
if m.IsNil() {
return Err[T](err)
}
return Ok(m.Unwrap())
}
/*
Of creates a new Result from a possibly valid value and an error.
*/
func Of[T any](val T, err error) Result[T] {
if err != nil {
return Err[T](err)
}
return Ok(val)
}
func (r result[T]) Ok() T {
if r.IsOk() {
return *r.val
}
panic(any("result.Ok() called on Err() result"))
}
func (r result[T]) Or(val T) T {
if r.IsOk() {
return r.Ok()
}
return val
}
func (r result[T]) Err() error {
if r.IsErr() {
return r.err
}
panic(any("result.Err() called on Ok() result"))
}
func (r result[T]) WithDefault(val T) Result[T] {
if r.IsOk() {
return r
}
return Ok(val)
}
func (r result[T]) Maybe() maybe.Maybe[T] {
if r.IsErr() {
return maybe.None[T]()
}
return maybe.Just[T](r.Ok())
}
func (r result[T]) MapErr(f func(error) error) Result[T] {
if r.IsErr() {
return Err[T](f(r.Err()))
}
return r
}
func (r result[T]) Map(f func(T) T) Result[T] {
if r.IsOk() {
return Ok(f(r.Ok()))
}
return r
}
func (r result[T]) IfErr(f func(error)) Result[T] {
if r.IsErr() {
f(r.err)
}
return r
}
func (r result[T]) IfOk(f func(T)) Result[T] {
if r.IsOk() {
f(r.Ok())
}
return r
}
func (r result[T]) IsOk() bool {
return !r.IsErr()
}
func (r result[T]) IsErr() bool {
return r.err != nil
} | result/result.go | 0.586523 | 0.487124 | result.go | starcoder |
package enigma
import (
"fmt"
)
// RotorConfig contains full configuration of a rotor
type RotorConfig = struct {
Model RotorModel
WheelPosition byte
RingPosition int
}
type rotor struct {
model RotorModel
wiringMapIn map[int]int // In = first pass through the rotors (from the plugboard to the reflector)
wiringMapOut map[int]int // Out = second pass (from the reflector to the plugboard)
notchPositions []int
initialWheelPosition byte // necessary for rotor reset
wheelPosition int
ringPosition int
}
func newRotor(rotorModel RotorModel) rotor {
if !rotorModel.exists() {
panic(fmt.Errorf("unsupported rotor model"))
}
wiring := rotorModel.getWiring()
if !Alphabet.isValidWiring(wiring) {
panic(fmt.Errorf("invalid rotor wiring %s", wiring))
}
notchPositions := make([]int, len(rotorModel.getNotchPositions()))
for i, notchPositionByte := range rotorModel.getNotchPositions() {
notchPositionInt, ok := Alphabet.charToInt(notchPositionByte)
if !ok {
panic(fmt.Errorf("invalid notch position %s", string(notchPositionByte)))
}
notchPositions[i] = notchPositionInt
}
in := make(map[int]int, Alphabet.getSize())
out := make(map[int]int, Alphabet.getSize())
for i, letter := range wiring {
letterIndex, ok := Alphabet.charToInt(byte(letter))
if !ok {
panic(fmt.Errorf("unsupported wiring letter %s", string(letter))) // should not happen, we already checked the wiring validity
}
in[i] = letterIndex
out[letterIndex] = i
}
return rotor{
model: rotorModel,
wiringMapIn: in,
wiringMapOut: out,
notchPositions: notchPositions,
initialWheelPosition: Alphabet.intToChar(0),
wheelPosition: 0, // start on the first position by default
ringPosition: 1,
}
}
func (r *rotor) setWheelPosition(letter byte) error {
index, ok := Alphabet.charToInt(letter)
if !ok {
return fmt.Errorf("unsupported rotor wheel position \"%s\"", string(letter))
}
r.wheelPosition = index
r.initialWheelPosition = letter
return nil
}
func (r *rotor) getWheelPosition() int {
return r.wheelPosition
}
func (r *rotor) setRingPosition(position int) error {
if position < 1 || position > Alphabet.getSize() {
return fmt.Errorf("invalid ring position %d, must be a number between 1 and %d", position, Alphabet.getSize())
}
r.ringPosition = position
return nil
}
func (r *rotor) reset() {
if err := r.setWheelPosition(r.initialWheelPosition); err != nil {
panic(fmt.Errorf("failed to reset rotor %s: %w", r.model, err))
}
}
func (r *rotor) translateIn(input int) int {
return r.translate(input, r.wiringMapIn)
}
func (r *rotor) translateOut(input int) int {
return r.translate(input, r.wiringMapOut)
}
func (r *rotor) translate(input int, translateMap map[int]int) int {
shiftSize := r.wheelPosition - r.ringPosition + 1
rotatedInput := shift(input, shiftSize) // shift according to the wheel and ring rotation
rotatedOutput := translateMap[rotatedInput] // translate
return shift(rotatedOutput, -shiftSize) // shift back
}
func (r *rotor) rotate() {
r.wheelPosition = (r.wheelPosition + 1) % Alphabet.getSize()
}
func (r *rotor) shouldRotateNext() bool {
for _, notchPosition := range r.notchPositions {
if r.wheelPosition == notchPosition {
return true // double-stepping - we are about to cross a notch in the next step, next rotor should be rotated too then
}
}
return false
} | rotor.go | 0.720073 | 0.452294 | rotor.go | starcoder |
package main
import "reflect"
var dirs = []struct{ x, y int }{{-1, 0}, {1, 0}, {0, -1}, {0, 1}}
func colorBorder_dfs(grid [][]int, row, col, color int) [][]int {
m, n := len(grid), len(grid[0])
type point struct{ x, y int }
borders := []point{}
originalColor := grid[row][col]
vis := make([][]bool, m)
for i := range vis {
vis[i] = make([]bool, n)
}
var dfs func(int, int)
dfs = func(x, y int) {
vis[x][y] = true
isBorder := false
for _, dir := range dirs {
nx, ny := x+dir.x, y+dir.y
if !(0 <= nx && nx < m && 0 <= ny && ny < n && grid[nx][ny] == originalColor) {
isBorder = true
} else if !vis[nx][ny] {
vis[nx][ny] = true
dfs(nx, ny)
}
}
if isBorder {
borders = append(borders, point{x, y})
}
}
dfs(row, col)
for _, p := range borders {
grid[p.x][p.y] = color
}
return grid
}
func colorBorder_bfs(grid [][]int, row, col, color int) [][]int {
m, n := len(grid), len(grid[0])
type point struct{ x, y int }
borders := []point{}
originalColor := grid[row][col]
vis := make([][]bool, m)
for i := range vis {
vis[i] = make([]bool, n)
}
q := []point{{row, col}}
vis[row][col] = true
for len(q) > 0 {
p := q[0]
q = q[1:]
x, y := p.x, p.y
isBorder := false
for _, dir := range dirs {
nx, ny := x+dir.x, y+dir.y
if !(0 <= nx && nx < m && 0 <= ny && ny < n && grid[nx][ny] == originalColor) {
isBorder = true
} else if !vis[nx][ny] {
vis[nx][ny] = true
q = append(q, point{nx, ny})
}
}
if isBorder {
borders = append(borders, point{x, y})
}
}
for _, p := range borders {
grid[p.x][p.y] = color
}
return grid
}
func main() {
assert := func(a, b [][]int) {
if !reflect.DeepEqual(a, b) {
panic("Not Passed")
}
}
assert(colorBorder_dfs([][]int{{1, 1}, {1, 2}}, 0, 0, 3), [][]int{{3, 3}, {3, 2}})
assert(colorBorder_bfs([][]int{{1, 1}, {1, 2}}, 0, 0, 3), [][]int{{3, 3}, {3, 2}})
assert(colorBorder_dfs([][]int{{1, 2, 2}, {2, 3, 2}}, 0, 1, 3), [][]int{{1, 3, 3}, {2, 3, 3}})
assert(colorBorder_bfs([][]int{{1, 2, 2}, {2, 3, 2}}, 0, 1, 3), [][]int{{1, 3, 3}, {2, 3, 3}})
assert(colorBorder_dfs([][]int{{1, 1, 1}, {1, 1, 1}, {1, 1, 1}}, 1, 1, 2), [][]int{{2, 2, 2}, {2, 1, 2}, {2, 2, 2}})
assert(colorBorder_bfs([][]int{{1, 1, 1}, {1, 1, 1}, {1, 1, 1}}, 1, 1, 2), [][]int{{2, 2, 2}, {2, 1, 2}, {2, 2, 2}})
} | 1034_color_border/color_border.go | 0.510741 | 0.424472 | color_border.go | starcoder |
package main
import (
"github.com/nsf/termbox-go"
"math"
"time"
)
const framedelay = 20
const thetaspacing = 0.02
const phispacing = 0.02
const R1 = 0.9
const R2 = 1.8
const K2 = 8.0
type Screen struct {
dim int
lum24 [][]int
data [][]byte
zoom float64
transition int
}
func newZBuffer(d int) *[][]float64 {
b := make([][]float64, d)
for i := range b {
b[i] = make([]float64, d)
}
return &b
}
func newScreen(d int) *Screen {
b := make([][]byte, d)
c := make([][]int, d)
for i := range b {
b[i] = make([]byte, d)
c[i] = make([]int, d)
}
a := float64(0)
z := int(0)
return &Screen{d, c, b, a, z}
}
func (screen *Screen) render(rendermode int) {
switch rendermode {
case 1:
termbox.SetOutputMode(termbox.OutputGrayscale)
case 2:
termbox.SetOutputMode(termbox.Output216)
case 3:
termbox.SetOutputMode(termbox.Output216)
default:
termbox.SetOutputMode(termbox.OutputGrayscale)
}
for x, _ := range screen.data {
for y, _ := range screen.data[x] {
switch rendermode {
case 1:
screen.draw(x, y, ' ', screen.lum24[x][y], screen.lum24[x][y])
case 2:
screen.draw(x, y, rune(screen.data[x][y]), screen.lum24[x][y]/3+1, screen.lum24[x][y]/4)
case 3:
screen.draw(x, y, ' ', 0, screen.lum24[x][y])
default:
screen.draw(x, y, rune(screen.data[x][y]), screen.lum24[x][y], 0)
}
}
}
screen.clear()
}
func (screen *Screen) draw(x, y int, char rune, fg, bg int) {
termbox.SetCell(x*2, y, char, termbox.Attribute(fg), termbox.Attribute(bg))
termbox.SetCell(x*2-1, y, char, termbox.Attribute(fg), termbox.Attribute(bg))
}
func (screen *Screen) clear() {
for x, _ := range screen.data {
for y, _ := range screen.data[x] {
if screen.lum24[x][y] == ' ' {
screen.draw(x, y, ' ', 0, 0)
} else {
screen.data[x][y] = ' '
screen.lum24[x][y] = ' '
}
}
}
}
func (screen *Screen) computeFrame(A, B, K1 float64) {
// precompute sines and cosines of A and B
cosA := math.Cos(A)
sinA := math.Sin(A)
cosB := math.Cos(B)
sinB := math.Sin(B)
zbuffer := newZBuffer(screen.dim)
// theta goes around the cross-sectional circle of a torus
for theta := 0.0; theta < 2.0*math.Pi; theta += thetaspacing {
// precompute sines and cosines of theta
costheta := math.Cos(theta)
sintheta := math.Sin(theta)
// phi goes around the center of revolution of a torus
for phi := 0.0; phi < 2.0*math.Pi; phi += phispacing {
// precompute sines and cosines of phi
cosphi := math.Cos(phi)
sinphi := math.Sin(phi)
// the x,y coordinate of the circle, before revolving (factored out of the above equations)
circlex := R2 + R1*costheta
circley := R1 * sintheta
// final 3D (x,y,z) coordinate after rotations, directly from our math above
x := circlex*(cosB*cosphi+sinA*sinB*sinphi) - circley*cosA*sinB
y := circlex*(sinB*cosphi-sinA*cosB*sinphi) + circley*cosA*cosB
z := K2 + cosA*circlex*sinphi + circley*sinA
ooz := 1 / z // "one over z"
// x and y projection. note that y is negated here, because y goes up in
// 3D space but down on 2D displays.
xp := int(float64(screen.dim)/2.0 + K1*ooz*x)
yp := int(float64(screen.dim)/2.0 - K1*ooz*y)
// calculate luminance. ugly, but correct.
L := cosphi*costheta*sinB - cosA*costheta*sinphi - sinA*sintheta +
cosB*(cosA*sintheta-costheta*sinA*sinphi)
// L ranges from -sqrt(2) to +sqrt(2). If it's < 0, the surface is
// pointing away from us, so we won't bother trying to plot it.
if L > 0 {
// test against the z-buffer. larger 1/z means the pixel is closer to
// the viewer than what's already plotted.
if ooz > (*zbuffer)[yp][xp] {
(*zbuffer)[yp][xp] = ooz
asciiIndex := int(L * 8.0) // this brings L into the range 0..11 (8*sqrt(2) = 11.3)
lum24 := int((L * 16.0) + 1) // this brings L into the range 1..24 (16*sqrt(2) + 1 = 23.6)
// now we lookup the character corresponding to the luminance and plot it in our output:
screen.data[yp][xp] = ".,-~:;=!*#$@"[asciiIndex]
screen.lum24[yp][xp] = lum24
}
}
}
}
}
func main() {
err := termbox.Init()
if err != nil {
panic(err)
}
defer termbox.Close()
eventQueue := make(chan termbox.Event)
go func() {
for {
eventQueue <- termbox.PollEvent()
}
}()
w, h := termbox.Size()
dim := int(math.Min(float64(w), float64(h)))
screen := newScreen(dim)
rendermode := int(0)
termbox.SetOutputMode(termbox.OutputGrayscale)
// Calculate K1 based on screen size: the maximum x-distance occurs roughly at
// the edge of the torus, which is at x=R1+R2, z=0. we want that to be
// displaced 3/8ths of the width of the screen, which is 3/4th of the way from
// the center to the side of the screen.
//screen.dim*3/8 = K1*(R1+R2)/(K2+0)
//screen.dim*K2*3/(8*(R1+R2)) = K1
A, B, K1 := 1.0, 1.0, float64(screen.dim)*K2*3.0/(8.0*(R1+R2))
// start zoomed out
screen.zoom = K1
K1 = 0
screen.transition = 2
loop:
for {
select {
case ev := <-eventQueue:
if ev.Type == termbox.EventKey && ev.Key == termbox.KeyEsc {
break loop
}
if ev.Type == termbox.EventKey && ev.Key == termbox.KeyEnter {
screen.transition = 1
}
default:
// rotate the torus
A += 0.07
B += 0.03
// zoom out if transitioning to new rendermode
if screen.transition == 1 {
K1 -= 2
}
// all the way zoomed out, switch rendermodes
if K1 < 0 {
screen.transition = 2
rendermode++
K1++
if rendermode == 4 {
rendermode = 0
}
}
// zoom back in
if screen.transition == 2 {
if K1 < screen.zoom {
K1 += 1.5
}
if K1 >= screen.zoom {
K1 = screen.zoom
screen.transition = 0
}
}
// draw all the things
screen.computeFrame(A, B, K1)
screen.render(rendermode)
termbox.Flush()
time.Sleep(framedelay * time.Millisecond)
}
}
} | donut.go | 0.588771 | 0.414721 | donut.go | starcoder |
package common
import (
"bytes"
"io"
)
var (
// ErrIOEOF used for io.EOF.
ErrIOEOF = io.EOF
)
// Buffer represents the buffer tuple.
type Buffer struct {
pos int
seek int
cap int
buf []byte
}
// NewBuffer creates a new buffer.
func NewBuffer(cap int) *Buffer {
return &Buffer{pos: 0,
cap: cap,
buf: make([]byte, cap),
}
}
// ReadBuffer used to read buffer from datas.
func ReadBuffer(b []byte) *Buffer {
return &Buffer{
buf: b,
pos: len(b),
}
}
// Reset used to reset a buffer.
func (b *Buffer) Reset(data []byte) {
b.buf = data
b.pos = len(data)
b.seek = 0
}
// Datas returns the datas of the buffer.
func (b *Buffer) Datas() []byte {
return b.buf[:b.pos]
}
// Length returns the last position of the buffer.
func (b *Buffer) Length() int {
return b.pos
}
// Seek returns the seek position of the buffer.
func (b *Buffer) Seek() int {
return b.seek
}
func (b *Buffer) extend(n int) {
if (b.pos + n) > b.cap {
// allocate double what's needed, for future growth
b.cap = (b.pos + n) * 2
t := make([]byte, b.cap)
copy(t, b.buf)
b.buf = t
}
}
// WriteU8 used to write uint8.
func (b *Buffer) WriteU8(v uint8) {
b.extend(1)
b.buf[b.pos] = v
b.pos++
}
// ReadU8 used read uint8.
func (b *Buffer) ReadU8() (v uint8, err error) {
if (b.seek + 1) > b.pos {
err = ErrIOEOF
return
}
v = uint8(b.buf[b.seek])
b.seek++
return
}
// WriteU16 used to write uint16.
func (b *Buffer) WriteU16(v uint16) {
b.extend(2)
b.buf[b.pos] = byte(v)
b.buf[b.pos+1] = byte(v >> 8)
b.pos += 2
}
// ReadU16 used to read uint16.
func (b *Buffer) ReadU16() (v uint16, err error) {
if (b.seek + 2) > b.pos {
err = ErrIOEOF
return
}
v = uint16(b.buf[b.seek]) |
uint16(b.buf[b.seek+1])<<8
b.seek += 2
return
}
// WriteU24 used to write uint24.
func (b *Buffer) WriteU24(v uint32) {
b.extend(3)
b.buf[b.pos] = byte(v)
b.buf[b.pos+1] = byte(v >> 8)
b.buf[b.pos+2] = byte(v >> 16)
b.pos += 3
}
// ReadU24 used to read uint24.
func (b *Buffer) ReadU24() (v uint32, err error) {
if (b.seek + 3) > b.pos {
err = ErrIOEOF
return
}
v = uint32(b.buf[b.seek]) |
uint32(b.buf[b.seek+1])<<8 |
uint32(b.buf[b.seek+2])<<16
b.seek += 3
return
}
// WriteU32 used to write uint32.
func (b *Buffer) WriteU32(v uint32) {
b.extend(4)
b.buf[b.pos] = byte(v)
b.buf[b.pos+1] = byte(v >> 8)
b.buf[b.pos+2] = byte(v >> 16)
b.buf[b.pos+3] = byte(v >> 24)
b.pos += 4
}
// ReadU32 used to read uint32.
func (b *Buffer) ReadU32() (v uint32, err error) {
if (b.seek + 4) > b.pos {
err = ErrIOEOF
return
}
v = uint32(b.buf[b.seek]) |
uint32(b.buf[b.seek+1])<<8 |
uint32(b.buf[b.seek+2])<<16 |
uint32(b.buf[b.seek+3])<<24
b.seek += 4
return
}
// WriteU64 used to write uint64.
func (b *Buffer) WriteU64(v uint64) {
b.extend(8)
b.buf[b.pos] = byte(v)
b.buf[b.pos+1] = byte(v >> 8)
b.buf[b.pos+2] = byte(v >> 16)
b.buf[b.pos+3] = byte(v >> 24)
b.buf[b.pos+4] = byte(v >> 32)
b.buf[b.pos+5] = byte(v >> 40)
b.buf[b.pos+6] = byte(v >> 48)
b.buf[b.pos+7] = byte(v >> 56)
b.pos += 8
}
// ReadU64 used to read uint64.
func (b *Buffer) ReadU64() (v uint64, err error) {
if (b.seek + 8) > b.pos {
err = ErrIOEOF
return
}
v = uint64(b.buf[b.seek]) |
uint64(b.buf[b.seek+1])<<8 |
uint64(b.buf[b.seek+2])<<16 |
uint64(b.buf[b.seek+3])<<24 |
uint64(b.buf[b.seek+4])<<32 |
uint64(b.buf[b.seek+5])<<40 |
uint64(b.buf[b.seek+6])<<48 |
uint64(b.buf[b.seek+7])<<56
b.seek += 8
return
}
// WriteLenEncode used to write variable length.
// https://dev.mysql.com/doc/internals/en/integer.html#length-encoded-integer
func (b *Buffer) WriteLenEncode(v uint64) {
switch {
case v < 251:
b.WriteU8(uint8(v))
case v >= 251 && v < (1<<16):
b.WriteU8(0xfc)
b.WriteU16(uint16(v))
case v >= (1<<16) && v < (1<<24):
b.WriteU8(0xfd)
b.WriteU24(uint32(v))
default:
b.WriteU8(0xfe)
b.WriteU64(v)
}
}
// WriteLenEncodeNUL used to write NUL>
// 0xfb is represents a NULL in a ProtocolText::ResultsetRow
func (b *Buffer) WriteLenEncodeNUL() {
b.WriteU8(0xfb)
}
// ReadLenEncode used to read variable length.
func (b *Buffer) ReadLenEncode() (v uint64, err error) {
var u8 uint8
var u16 uint16
var u24 uint32
if u8, err = b.ReadU8(); err != nil {
return
}
switch u8 {
case 0xfb:
// nil value
// we set the length to maxuint64.
v = ^uint64(0)
return
case 0xfc:
if u16, err = b.ReadU16(); err != nil {
return
}
v = uint64(u16)
return
case 0xfd:
if u24, err = b.ReadU24(); err != nil {
return
}
v = uint64(u24)
return
case 0xfe:
if v, err = b.ReadU64(); err != nil {
return
}
return
default:
return uint64(u8), nil
}
}
// WriteLenEncodeString used to write variable string.
func (b *Buffer) WriteLenEncodeString(s string) {
l := len(s)
b.WriteLenEncode(uint64(l))
b.WriteString(s)
}
// ReadLenEncodeString used to read variable string.
func (b *Buffer) ReadLenEncodeString() (s string, err error) {
var l uint64
if l, err = b.ReadLenEncode(); err != nil {
return
}
if s, err = b.ReadString(int(l)); err != nil {
return
}
return
}
// WriteLenEncodeBytes used to write variable bytes.
func (b *Buffer) WriteLenEncodeBytes(v []byte) {
l := len(v)
b.WriteLenEncode(uint64(l))
b.WriteBytes(v)
}
// ReadLenEncodeBytes used to read variable bytes.
func (b *Buffer) ReadLenEncodeBytes() (v []byte, err error) {
var l uint64
if l, err = b.ReadLenEncode(); err != nil {
return
}
// nil value.
if l == ^uint64(0) {
return
}
if l == 0 {
return []byte{}, nil
}
if v, err = b.ReadBytes(int(l)); err != nil {
return
}
return
}
// WriteEOF used to write EOF.
func (b *Buffer) WriteEOF(n int) {
b.extend(n)
for i := 0; i < n; i++ {
b.buf[b.pos] = 0xfe
b.pos++
}
}
// ReadEOF used to read EOF.
func (b *Buffer) ReadEOF(n int) (err error) {
return b.ReadZero(n)
}
// WriteZero used to write zero.
func (b *Buffer) WriteZero(n int) {
b.extend(n)
for i := 0; i < n; i++ {
b.buf[b.pos] = 0
b.pos++
}
}
// ReadZero used to read zero.
func (b *Buffer) ReadZero(n int) (err error) {
if (b.seek + n) > b.pos {
err = ErrIOEOF
return
}
b.seek += n
return
}
// WriteString used to write string.
func (b *Buffer) WriteString(s string) {
n := len(s)
b.extend(n)
copy(b.buf[b.pos:], s)
b.pos += n
}
// ReadString used to read string.
func (b *Buffer) ReadString(n int) (s string, err error) {
if (b.seek + n) > b.pos {
err = ErrIOEOF
return
}
s = string(b.buf[b.seek:(b.seek + n)])
b.seek += n
return
}
// ReadStringNUL reads until the first NUL in the buffer
// returning a string containing the data up to and not including the NUL
func (b *Buffer) ReadStringNUL() (s string, err error) {
var v []byte
if v, err = b.readBytesWithToken(0x00); err != nil {
return
}
s = string(v)
return
}
// ReadStringEOF reads until the first EOF in the buffer
// returning a string containing the data up to and not including the EOF
func (b *Buffer) ReadStringEOF() (s string, err error) {
var v []byte
if v, err = b.readBytesWithToken(0xfe); err != nil {
return
}
s = string(v)
return
}
// ReadBytesNUL reads until the first NUL in the buffer
// returning a byte slice containing the data up to and not including the NUL
func (b *Buffer) ReadBytesNUL() (v []byte, err error) {
return b.readBytesWithToken(0x00)
}
// ReadBytesEOF reads until the first EOF in the buffer
// returning a byte slice containing the data up to and not including the EOF
func (b *Buffer) ReadBytesEOF() (v []byte, err error) {
return b.readBytesWithToken(0xfe)
}
func (b *Buffer) readBytesWithToken(token uint8) (v []byte, err error) {
i := bytes.IndexByte(b.buf[b.seek:], token)
end := b.seek + i + 1
if i < 0 {
b.seek = len(b.buf)
err = ErrIOEOF
return
}
v = b.buf[b.seek : end-1]
b.seek = end
return
}
// WriteBytes used to write bytes.
func (b *Buffer) WriteBytes(bs []byte) {
n := len(bs)
b.extend(n)
copy(b.buf[b.pos:], bs)
b.pos += n
}
// ReadBytes used to read bytes.
func (b *Buffer) ReadBytes(n int) (v []byte, err error) {
if n == 0 {
return nil, nil
}
if (b.seek + n) > b.pos {
err = ErrIOEOF
return
}
v = b.buf[b.seek:(b.seek + n)]
b.seek += n
return
} | sqlparser/depends/common/buffer.go | 0.566738 | 0.410166 | buffer.go | starcoder |
package mapstructure
import (
"strconv"
)
//PathPart is interface for different kinds of FieldPath elements.
type PathPart interface {
getDelimiter() string
String() string
}
//InStructPathPart is FieldPath element that represents field name in structure.
type InStructPathPart struct {
val string
}
func (p InStructPathPart) getDelimiter() string {
return "."
}
func (p InStructPathPart) String() string {
return p.val
}
func (p InStructPathPart) Value() string {
return p.val
}
//InMapPathPart is FieldPath element that represents key in map.
type InMapPathPart struct {
val string
}
func (p InMapPathPart) getDelimiter() string {
return ""
}
func (p InMapPathPart) String() string {
return "[" + p.val + "]"
}
func (p InMapPathPart) Value() string {
return p.val
}
//InSlicePathPart is FieldPath element that represents index in slice or array.
type InSlicePathPart struct {
val int
}
func (p InSlicePathPart) getDelimiter() string {
return ""
}
func (p InSlicePathPart) String() string {
return "[" + strconv.Itoa(p.val) + "]"
}
func (p InSlicePathPart) Value() int {
return p.val
}
//FieldPath represents path to a field in nested structure.
type FieldPath struct {
parts []PathPart
}
func (f FieldPath) addStruct(part string) FieldPath {
return FieldPath{
parts: appendPart(f.parts, InStructPathPart{val: part}),
}
}
func (f FieldPath) addMap(part string) FieldPath {
return FieldPath{
parts: appendPart(f.parts, InMapPathPart{val: part}),
}
}
func (f FieldPath) addSlice(part int) FieldPath {
return FieldPath{
parts: appendPart(f.parts, InSlicePathPart{val: part}),
}
}
func (f FieldPath) notEmpty() bool {
return len(f.parts) > 0
}
func newFieldPath() FieldPath {
return FieldPath{
parts: make([]PathPart, 0),
}
}
func (f FieldPath) Parts() []PathPart {
return f.parts
}
func (f FieldPath) String() string {
result := ""
for i, part := range f.parts {
delimiter := ""
if i > 0 { //there is no delimiter before first element
delimiter = part.getDelimiter()
}
result += delimiter + part.String()
}
return result
}
//appendPart appends PathPart to a PathPart slice with guarantee of slice immutability.
func appendPart(parts []PathPart, part PathPart) []PathPart {
p := make([]PathPart, len(parts))
copy(p, parts)
return append(p, part)
} | field_path.go | 0.730097 | 0.431524 | field_path.go | starcoder |
package main
import (
"fmt"
"io/ioutil"
"strings"
)
/**hasBit returns true if the bit at position pos in the
binary representation of the number n is set to 1.*/
func hasBit(n int, pos int) bool {
val := n & (1 << pos)
return (val > 0)
}
/**clearBit returns the number n minus 2^pos*/
func clearBit(n int, pos int) int {
mask := ^(1 << pos)
n &= mask
return n
}
/**txt2state converts a string representation of a Game of Life
* grid into an integer representation.
*
* A 5x5 grid would be mapped as follows: Starting from the top-left
* corner of the grid and working rightward along each row,
* the corresponding bit of a binary number will be set to 1 if
* that space has a bug, or 0 if the space does not contain a bug.
*
* Example: A 5x5 grid with each position index.
* 0 1 2 3 4
* 5 6 7 8 9
* 10 11 12 13 14
* 15 16 17 18 19
* 20 21 22 23 24
*
* If a grid contains a bug in the upper left corner,
* the 2^0 bit would be set to 1.
*
* In the case of a grid that ONLY contains a single bug
* at index 4, 4, the binary representation would be
* 10000 00000 00000 00000 00000, or decimal 16777216.
*/
func txt2state(data string) int {
n := 0
data = strings.ReplaceAll(data, "\n", "")
for i, char := range data {
if string(char) == "#" {
n |= (1 << i)
}
}
return n
}
/**getNextState calculates the state of the board after a single
* iteration of the following algorithm:
*
* - A bug dies (becoming an empty space) unless there is exactly
* one bug adjacent to it.
*
* - An empty space becomes infested with a bug
* if exactly one or two bugs are adjacent to it.
*/
func getNextState(state int) int {
width := 5
n := state // next state
for y := 0; y < width; y++ {
for x := 0; x < width; x++ {
neighbors := 0
/* left */
if x > 0 { // leftmost row has no left neighbors
if hasBit(state, y*width+(x-1)) {
neighbors++
}
}
/* right */
if x < 4 { // rightmost row has no right neighbors
if hasBit(state, y*width+(x+1)) {
neighbors++
}
}
/* up */
if y > 0 { // top row has no top neighbors
if hasBit(state, (y-1)*width+x) {
neighbors++
}
}
/* down */
if y < 4 { // bottom row has no neighbors below it
if hasBit(state, (y+1)*width+x) {
neighbors++
}
}
pos := y*width + x
if hasBit(state, pos) && neighbors != 1 {
/*A bug dies (becoming an empty space) unless
there is exactly one bug adjacent to it.*/
n = clearBit(n, pos)
} else if !hasBit(state, pos) && (neighbors == 1 || neighbors == 2) {
/*An empty space becomes infested with a bug
if exactly one or two bugs are adjacent to it.*/
n |= (1 << pos)
}
}
}
return n
}
/**
* Calculates the index of the bit that is associated
* with a given position in a 5x5 grid (see txt2state)
*/
func pos2bitIndex(x, y int) int {
return 5*y + x
}
/**getNextStateRecursive calculates the state of the board after a single
* iteration of the following algorithm:
*
* - A bug dies (becoming an empty space) unless there is exactly
* one bug adjacent to it.
*
* - An empty space becomes infested with a bug
* if exactly one or two bugs are adjacent to it.
*
* The center cell inside the grid represented by `state` contains
* within it another grid (`inner`). Similarly, `state` itself is
* also housed inside another grid (`outer`). This recursive structure
* can be visualized as follows:
*
* | | | |
* 1 | 2 | 3 | 4 | 5
* | | | |
* -----+-----+---------+-----+-----
* | | | |
* 6 | 7 | 8 | 9 | 10
* | | | |
* -----+-----+---------+-----+-----
* | |A|B|C|D|E| |
* | |-+-+-+-+-| |
* | |F|G|H|I|J| |
* | |-+-+-+-+-| |
* 11 | 12 |K|L|?|N|O| 14 | 15
* | |-+-+-+-+-| |
* | |P|Q|R|S|T| |
* | |-+-+-+-+-| |
* | |U|V|W|X|Y| |
* -----+-----+---------+-----+-----
* | | | |
* 16 | 17 | 18 | 19 | 20
* | | | |
* -----+-----+---------+-----+-----
* | | | |
* 21 | 22 | 23 | 24 | 25
* | | | |
*
* Some examples of how neighboring cells are calculated in this space:
* - Cell 8 has five neighbors (5, 3, 9, A, B, C, D, E).
* - Cell K has four neighbors (F, L, P, 12)
* - Cell E has four neighbors (D, J, 8, 14)
* - Cell T has four neighbors (O, S, Y, 14)
*/
func getNextStateRecursive(state int, outer int, inner int) int {
width := 5
n := state // next state
for y := 0; y < width; y++ {
for x := 0; x < width; x++ {
if y == 2 && x == 2 {
// center cell is home to the recursive inner grid
// so it will never hold a bug
continue
}
neighbors := 0
/* left */
if x == 0 {
// Cells A, F, K, P, U have cell 12 as their left-hand neighbor
if hasBit(outer, pos2bitIndex(1, 2)) {
neighbors++
}
} else if x == 3 && y == 2 {
// Cell 14's left-hand neighbors are E, J, O, T, Y
for i := 0; i < width; i++ {
if hasBit(inner, pos2bitIndex(4, i)) {
neighbors++
}
}
} else { // all other squares are calculated normally
if hasBit(state, y*width+(x-1)) {
neighbors++
}
}
/* right */
if x == 4 {
// cells E, J, O, T, Y all have cell 14 as their right-hand neighbor
if hasBit(outer, pos2bitIndex(3, 2)) {
neighbors++
}
} else if x == 1 && y == 2 {
// cell 12 has cells A, F, K, P, U as its right-hand neighbors
for i := 0; i < width; i++ {
if hasBit(inner, pos2bitIndex(0, i)) {
neighbors++
}
}
} else { // all other squares are calculated normally
if hasBit(state, y*width+(x+1)) {
neighbors++
}
}
/* up */
if y == 0 {
// Cells A, B, C, D, and E all have Cell 8 as their upper neighbor
if hasBit(outer, pos2bitIndex(2, 1)) {
neighbors++
}
} else if y == 3 && x == 2 {
// Cell 18 has Cells U, V, W, X, Y as its upper neighbors
for i := 0; i < width; i++ {
if hasBit(inner, pos2bitIndex(i, 4)) {
neighbors++
}
}
} else {
// all other squares are calculated normally
if hasBit(state, (y-1)*width+x) {
neighbors++
}
}
/* down */
if y == 4 {
// Cells U, V, W, X, Y all have Cell 18 as their lower neighbor
if hasBit(outer, pos2bitIndex(2, 3)) {
neighbors++
}
} else if y == 1 && x == 2 {
// Cell 8 has A, B, C, D, E as its lower neighbors
for i := 0; i < width; i++ {
if hasBit(inner, pos2bitIndex(i, 0)) {
neighbors++
}
}
} else {
// all other squares are calculated normally
if hasBit(state, (y+1)*width+x) {
neighbors++
}
}
pos := pos2bitIndex(x, y)
if hasBit(state, pos) && neighbors != 1 {
/*A bug dies (becoming an empty space) unless
there is exactly one bug adjacent to it.*/
n = clearBit(n, pos)
} else if !hasBit(state, pos) && (neighbors == 1 || neighbors == 2) {
/*An empty space becomes infested with a bug
if exactly one or two bugs are adjacent to it.*/
n |= (1 << pos)
}
}
}
return n
}
func printState(state int) {
width := 5
for i := 0; i < width*width; i++ {
if i == 12 {
fmt.Print("?")
} else if hasBit(state, i) {
fmt.Print("#")
} else {
fmt.Print(".")
}
if i%width == width-1 {
fmt.Print("\n")
}
}
fmt.Print("\n")
}
func countBugs(state int) int {
count := 0
for i := 0; i < 25; i++ {
if hasBit(state, i) {
count++
}
}
return count
}
func part1(initialState int) {
state := initialState
memo := make(map[int]bool)
for true {
if memo[state] {
fmt.Println("[Part 1] The biodiversity rating of the first state to be repeated is:", state)
break
} else {
memo[state] = true
}
state = getNextState(state)
}
}
func part2(initialState int) {
// number of iterations
n := 200
// every two iterations we will reach a new inner grid, and a new outer grid.
// Therefore the amount of space needed in the array is:
// = 1 for the original, starting grid
// + n/2 outer recursive grids
// + n/2 inner recursive grids
length := n + 1
var gridsA []int = make([]int, length)
var gridsB []int = make([]int, length) // store intermediate values here
gridsA[length/2] = initialState
a := &gridsA
b := &gridsB
var outer int
var inner int
for timestep := 0; timestep < n; timestep++ {
for i := 0; i < length; i++ {
if i == 0 {
outer = 0
} else {
outer = (*a)[i-1]
}
if i == length-1 {
inner = 0
} else {
inner = (*a)[i+1]
}
(*b)[i] = getNextStateRecursive((*a)[i], outer, inner)
}
// swap pointers
a, b = b, a
}
ttl := 0
for _, s := range *a {
if s != 0 {
ttl += countBugs(s)
}
}
fmt.Println("[Part 2] The total number of bugs after", n, "iterations is", ttl)
}
func main() {
data, err := ioutil.ReadFile("input.txt")
if err != nil {
panic("Can't read input file.")
}
state := txt2state(string(data))
part1(state)
part2(state)
} | puzzle24/main.go | 0.800146 | 0.608623 | main.go | starcoder |
package creator
import (
"errors"
"fmt"
"strconv"
"github.com/pzduniak/unipdf/common"
"github.com/pzduniak/unipdf/model"
)
// Chapter is used to arrange multiple drawables (paragraphs, images, etc) into a single section.
// The concept is the same as a book or a report chapter.
type Chapter struct {
// The number of the chapter.
number int
// The title of the chapter.
title string
// The heading paragraph of the chapter.
heading *Paragraph
// The content components of the chapter.
contents []Drawable
// The number of subchapters the chapter has.
subchapters int
// Show chapter numbering
showNumbering bool
// Include in TOC.
includeInTOC bool
// Positioning: relative / absolute.
positioning positioning
// Absolute coordinates (when in absolute mode).
xPos, yPos float64
// Margins to be applied around the block when drawing on Page.
margins margins
// Reference to the parent chapter the current chapter belongs to.
parent *Chapter
// Reference to the TOC of the creator.
toc *TOC
// Reference to the outline of the creator.
outline *model.Outline
// The item of the chapter in the outline.
outlineItem *model.OutlineItem
// The level of the chapter in the chapters hierarchy.
level uint
}
// newChapter creates a new chapter with the specified title as the heading.
func newChapter(parent *Chapter, toc *TOC, outline *model.Outline, title string, number int, style TextStyle) *Chapter {
var level uint = 1
if parent != nil {
level = parent.level + 1
}
chapter := &Chapter{
number: number,
title: title,
showNumbering: true,
includeInTOC: true,
parent: parent,
toc: toc,
outline: outline,
contents: []Drawable{},
level: level,
}
p := newParagraph(chapter.headingText(), style)
p.SetFont(style.Font)
p.SetFontSize(style.FontSize)
chapter.heading = p
return chapter
}
// NewSubchapter creates a new child chapter with the specified title.
func (chap *Chapter) NewSubchapter(title string) *Chapter {
style := newTextStyle(chap.heading.textFont)
style.FontSize = 14
chap.subchapters++
subchapter := newChapter(chap, chap.toc, chap.outline, title, chap.subchapters, style)
chap.Add(subchapter)
return subchapter
}
// SetShowNumbering sets a flag to indicate whether or not to show chapter numbers as part of title.
func (chap *Chapter) SetShowNumbering(show bool) {
chap.showNumbering = show
chap.heading.SetText(chap.headingText())
}
// SetIncludeInTOC sets a flag to indicate whether or not to include in tOC.
func (chap *Chapter) SetIncludeInTOC(includeInTOC bool) {
chap.includeInTOC = includeInTOC
}
// GetHeading returns the chapter heading paragraph. Used to give access to address style: font, sizing etc.
func (chap *Chapter) GetHeading() *Paragraph {
return chap.heading
}
// SetMargins sets the Chapter margins: left, right, top, bottom.
// Typically not needed as the creator's page margins are used.
func (chap *Chapter) SetMargins(left, right, top, bottom float64) {
chap.margins.left = left
chap.margins.right = right
chap.margins.top = top
chap.margins.bottom = bottom
}
// GetMargins returns the Chapter's margin: left, right, top, bottom.
func (chap *Chapter) GetMargins() (float64, float64, float64, float64) {
return chap.margins.left, chap.margins.right, chap.margins.top, chap.margins.bottom
}
// Add adds a new Drawable to the chapter.
func (chap *Chapter) Add(d Drawable) error {
if Drawable(chap) == d {
common.Log.Debug("ERROR: Cannot add itself")
return errors.New("range check error")
}
switch d.(type) {
case *Paragraph, *StyledParagraph, *Image, *Block, *Table, *PageBreak, *Chapter:
chap.contents = append(chap.contents, d)
default:
common.Log.Debug("Unsupported: %T", d)
return errors.New("type check error")
}
return nil
}
// headingNumber returns the chapter heading number based on the chapter
// hierarchy and the showNumbering property.
func (chap *Chapter) headingNumber() string {
var chapNumber string
if chap.showNumbering {
if chap.number != 0 {
chapNumber = strconv.Itoa(chap.number) + "."
}
if chap.parent != nil {
parentChapNumber := chap.parent.headingNumber()
if parentChapNumber != "" {
chapNumber = parentChapNumber + chapNumber
}
}
}
return chapNumber
}
// headingText returns the chapter heading text content.
func (chap *Chapter) headingText() string {
heading := chap.title
if chapNumber := chap.headingNumber(); chapNumber != "" {
heading = fmt.Sprintf("%s %s", chapNumber, heading)
}
return heading
}
// GeneratePageBlocks generate the Page blocks. Multiple blocks are generated if the contents wrap
// over multiple pages.
func (chap *Chapter) GeneratePageBlocks(ctx DrawContext) ([]*Block, DrawContext, error) {
origCtx := ctx
if chap.positioning.isRelative() {
// Update context.
ctx.X += chap.margins.left
ctx.Y += chap.margins.top
ctx.Width -= chap.margins.left + chap.margins.right
ctx.Height -= chap.margins.top
}
blocks, c, err := chap.heading.GeneratePageBlocks(ctx)
if err != nil {
return blocks, ctx, err
}
ctx = c
// Generate chapter title and number.
posX := ctx.X
posY := ctx.Y - chap.heading.Height()
page := int64(ctx.Page)
chapNumber := chap.headingNumber()
chapTitle := chap.headingText()
// Add to TOC.
if chap.includeInTOC {
line := chap.toc.Add(chapNumber, chap.title, strconv.FormatInt(page, 10), chap.level)
if chap.toc.showLinks {
line.SetLink(page, posX, posY)
}
}
// Add to outline.
if chap.outlineItem == nil {
chap.outlineItem = model.NewOutlineItem(
chapTitle,
model.NewOutlineDest(page-1, posX, posY),
)
if chap.parent != nil {
chap.parent.outlineItem.Add(chap.outlineItem)
} else {
chap.outline.Add(chap.outlineItem)
}
} else {
outlineDest := &chap.outlineItem.Dest
outlineDest.Page = page - 1
outlineDest.X = posX
outlineDest.Y = posY
}
for _, d := range chap.contents {
newBlocks, c, err := d.GeneratePageBlocks(ctx)
if err != nil {
return blocks, ctx, err
}
if len(newBlocks) < 1 {
continue
}
// The first block is always appended to the last..
blocks[len(blocks)-1].mergeBlocks(newBlocks[0])
blocks = append(blocks, newBlocks[1:]...)
ctx = c
}
if chap.positioning.isRelative() {
// Move back X to same start of line.
ctx.X = origCtx.X
}
if chap.positioning.isAbsolute() {
// If absolute: return original context.
return blocks, origCtx, nil
}
return blocks, ctx, nil
} | bot/vendor/github.com/pzduniak/unipdf/creator/chapters.go | 0.713831 | 0.441492 | chapters.go | starcoder |
package fastrlp
import (
"bytes"
"encoding/binary"
"fmt"
"math/big"
"sync"
)
// bufPool to convert int to bytes
var bufPool = sync.Pool{
New: func() interface{} {
buf := make([]byte, 8)
return &buf
},
}
type cache struct {
buf [8]byte
vs []Value
size uint64
indx uint64
}
func (c *cache) reset() {
c.vs = c.vs[:0]
c.size = 0
c.indx = 0
}
func (c *cache) getValue() *Value {
if cap(c.vs) > len(c.vs) {
c.vs = c.vs[:len(c.vs)+1]
} else {
c.vs = append(c.vs, Value{})
}
return &c.vs[len(c.vs)-1]
}
// Type represents an RLP type.
type Type int
const (
// TypeArray is an RLP array value.
TypeArray Type = iota
// TypeBytes is an RLP bytes value.
TypeBytes
// TypeNull is an RLP bytes null (0x80)
TypeNull
// TypeArrayNull is an RLP array null (0xC0)
TypeArrayNull
)
// String returns the string representation of the type.
func (t Type) String() string {
switch t {
case TypeArray:
return "array"
case TypeBytes:
return "bytes"
case TypeNull:
return "null"
case TypeArrayNull:
return "null-array"
default:
panic(fmt.Errorf("BUG: unknown Value type: %d", t))
}
}
// Value is an RLP value
type Value struct {
// t is the type of the value, either Bytes or Array
t Type
// a are the list of objects for the type array
a []*Value
// b is the bytes content of the bytes type
b []byte
// l is the length of the value
l uint64
// i is the starting index in the bytes input buffer
i uint64
}
// GetString returns string value.
func (v *Value) GetString() (string, error) {
if v.t != TypeBytes {
return "", errNoBytes()
}
return string(v.b), nil
}
// GetElems returns the elements of an array.
func (v *Value) GetElems() ([]*Value, error) {
if v.t != TypeArray {
return nil, errNoArray()
}
return v.a, nil
}
// GetBigInt returns big.int value.
func (v *Value) GetBigInt(b *big.Int) error {
if v.t != TypeBytes {
return errNoBytes()
}
b.SetBytes(v.b)
return nil
}
// GetBool returns bool value.
func (v *Value) GetBool() (bool, error) {
if v.t != TypeBytes {
return false, errNoBytes()
}
if bytes.Equal(v.b, valueTrue.b) {
return true, nil
}
if bytes.Equal(v.b, valueFalse.b) {
return false, nil
}
return false, fmt.Errorf("not a valid bool")
}
// Raw returns the raw bytes
func (v *Value) Raw() []byte {
return v.b
}
// Bytes returns the raw bytes.
func (v *Value) Bytes() ([]byte, error) {
if v.t != TypeBytes {
return nil, errNoBytes()
}
return v.b, nil
}
// GetBytes returns bytes to dst.
func (v *Value) GetBytes(dst []byte, bits ...int) ([]byte, error) {
if v.t != TypeBytes {
return nil, errNoBytes()
}
if len(bits) > 0 {
if len(v.b) != bits[0] {
return nil, fmt.Errorf("bad length, expected %d but found %d", bits[0], len(v.b))
}
}
dst = append(dst[:0], v.b...)
return dst, nil
}
// GetAddr returns bytes of size 20.
func (v *Value) GetAddr(buf []byte) error {
_, err := v.GetBytes(buf, 20)
return err
}
// GetHash returns bytes of size 32.
func (v *Value) GetHash(buf []byte) error {
_, err := v.GetBytes(buf, 32)
return err
}
// GetByte returns a byte
func (v *Value) GetByte() (byte, error) {
if v.t != TypeBytes {
return 0, errNoBytes()
}
if len(v.b) != 1 {
return 0, fmt.Errorf("bad length, expected 1 but found %d", len(v.b))
}
return byte(v.b[0]), nil
}
// GetUint64 returns uint64.
func (v *Value) GetUint64() (uint64, error) {
if v.t != TypeBytes {
return 0, errNoBytes()
}
if len(v.b) > 8 {
return 0, fmt.Errorf("bytes %d too long for uint64", len(v.b))
}
buf := bufPool.Get().(*[]byte)
num := readUint(v.b, *buf)
bufPool.Put(buf)
return num, nil
}
// Type returns the type of the value
func (v *Value) Type() Type {
return v.t
}
// Get returns the item at index i in the array
func (v *Value) Get(i int) *Value {
if i > len(v.a) {
return nil
}
return v.a[i]
}
// Elems returns the number of elements if its an array
func (v *Value) Elems() int {
return len(v.a)
}
// Len returns the raw size of the value
func (v *Value) Len() uint64 {
if v.t == TypeArray {
return v.l + intsize(v.l)
}
return v.l
}
func (v *Value) fullLen() uint64 {
// null
if v.t == TypeNull || v.t == TypeArrayNull {
return 1
}
// bytes
size := v.l
if v.t == TypeBytes {
if size == 1 && v.b[0] <= 0x7F {
return 1
} else if size < 56 {
return 1 + size
} else {
return 1 + intsize(size) + size
}
}
// array
if size < 56 {
return 1 + size
}
return 1 + intsize(size) + size
}
// Set sets a value in the array
func (v *Value) Set(vv *Value) {
if v == nil || v.t != TypeArray {
return
}
v.l += vv.fullLen()
v.a = append(v.a, vv)
}
func (v *Value) marshalLongSize(dst []byte) []byte {
return v.marshalSize(dst, 0xC0, 0xF7)
}
func (v *Value) marshalShortSize(dst []byte) []byte {
return v.marshalSize(dst, 0x80, 0xB7)
}
func (v *Value) marshalSize(dst []byte, short, long byte) []byte {
if v.l < 56 {
return append(dst, short+byte(v.l))
}
intSize := intsize(v.l)
buf := bufPool.Get().(*[]byte)
binary.BigEndian.PutUint64((*buf)[:], uint64(v.l))
dst = append(dst, long+byte(intSize))
dst = append(dst, (*buf)[8-intSize:]...)
bufPool.Put(buf)
return dst
}
// MarshalTo appends marshaled v to dst and returns the result.
func (v *Value) MarshalTo(dst []byte) []byte {
switch v.t {
case TypeBytes:
if len(v.b) == 1 && v.b[0] <= 0x7F {
// single element
return append(dst, v.b...)
}
dst = v.marshalShortSize(dst)
return append(dst, v.b...)
case TypeArray:
dst = v.marshalLongSize(dst)
for _, vv := range v.a {
dst = vv.MarshalTo(dst)
}
return dst
case TypeNull:
return append(dst, []byte{0x80}...)
case TypeArrayNull:
return append(dst, []byte{0xC0}...)
default:
panic(fmt.Errorf("BUG: unexpected Value type: %d", v.t))
}
}
var (
valueArrayNull = &Value{t: TypeArrayNull, l: 1}
valueNull = &Value{t: TypeNull, l: 1}
valueFalse = valueNull
valueTrue = &Value{t: TypeBytes, b: []byte{0x1}, l: 1}
)
func intsize(val uint64) uint64 {
switch {
case val < (1 << 8):
return 1
case val < (1 << 16):
return 2
case val < (1 << 24):
return 3
case val < (1 << 32):
return 4
case val < (1 << 40):
return 5
case val < (1 << 48):
return 6
case val < (1 << 56):
return 7
}
return 8
}
func errNoBytes() error {
return fmt.Errorf("value is not of type bytes")
}
func errNoArray() error {
return fmt.Errorf("value is not of type array")
} | vendor/github.com/umbracle/fastrlp/encode.go | 0.670824 | 0.466481 | encode.go | starcoder |
package update
import (
"encoding/json"
"testing"
"github.com/nlopes/slack"
"github.com/stretchr/testify/assert"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
"github.com/infracloudio/botkube/pkg/config"
"github.com/infracloudio/botkube/pkg/notify"
"github.com/infracloudio/botkube/pkg/utils"
"github.com/infracloudio/botkube/test/e2e/env"
testutils "github.com/infracloudio/botkube/test/e2e/utils"
)
type context struct {
*env.TestEnv
}
func (c *context) testUpdateResource(t *testing.T) {
// Test cases
tests := map[string]testutils.UpdateObjects{
"update resource when IncludeDiff is set to false": {
// Diff message should not be generated in Attachment if IncludeDiff field is false
GVR: schema.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
Kind: "Pod",
Namespace: "test",
Specs: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "test-pod-update-diff-false"}, Spec: v1.PodSpec{Containers: []v1.Container{{Name: "test-pod-container", Image: "tomcat:9.0.34"}}}},
ExpectedSlackMessage: testutils.SlackMessage{
Attachments: []slack.Attachment{{Color: "warning", Title: "v1/pods updated", Fields: []slack.AttachmentField{{Value: "Pod *test/test-pod-update-diff-false* has been updated in *test-cluster-1* cluster\n", Short: false}}, Footer: "BotKube"}},
},
Patch: []byte(`{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "test-pod-update-diff-false",
"namespace": "test"
},
"spec": {
"containers": [
{
"name": "test-pod-container",
"image": "tomcat:8.0"
}
]
}
}
`),
UpdateSetting: config.UpdateSetting{Fields: []string{"spec.containers[*].image"}, IncludeDiff: false},
Diff: "spec.containers[*].image:\n\t-: tomcat:9.0.34\n\t+: tomcat:8.0\n",
ExpectedWebhookPayload: testutils.WebhookPayload{
EventMeta: notify.EventMeta{Kind: "Pod", Name: "test-pod-update-diff-false", Namespace: "test", Cluster: "test-cluster-1"},
EventStatus: notify.EventStatus{Type: "update", Level: "warn", Reason: "", Error: "", Messages: []string(nil)},
Summary: "Pod *test/test-pod-update-diff-false* has been updated in *test-cluster-1* cluster\n",
},
},
"create and update pod in configured namespace": {
// Diff message generated in Attachment if IncludeDiff field is true
GVR: schema.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
Kind: "Pod",
Namespace: "test",
Specs: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "test-pod-update"}, Spec: v1.PodSpec{Containers: []v1.Container{{Name: "test-pod-container", Image: "tomcat:9.0.34"}}}},
ExpectedSlackMessage: testutils.SlackMessage{
Attachments: []slack.Attachment{{Color: "warning", Title: "v1/pods updated", Fields: []slack.AttachmentField{{Value: "Pod *test/test-pod-update* has been updated in *test-cluster-1* cluster\n```\nspec.containers[*].image:\n\t-: tomcat:9.0.34\n\t : tomcat:8.0\n\n```", Short: false}}, Footer: "BotKube"}},
},
Patch: []byte(`{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "test-pod-update",
"namespace": "test"
},
"spec": {
"containers": [
{
"name": "test-pod-container",
"image": "tomcat:8.0"
}
]
}
}
`),
UpdateSetting: config.UpdateSetting{Fields: []string{"spec.containers[*].image"}, IncludeDiff: true},
Diff: "spec.containers[*].image:\n\t-: tomcat:9.0.34\n\t+: tomcat:8.0\n",
ExpectedWebhookPayload: testutils.WebhookPayload{
EventMeta: notify.EventMeta{Kind: "Pod", Name: "test-pod-update", Namespace: "test", Cluster: "test-cluster-1"},
EventStatus: notify.EventStatus{Type: "update", Level: "warn", Reason: "", Error: "", Messages: []string{"spec.containers[*].image:\n\t-: tomcat:9.0.34\n\t+: tomcat:8.0\n"}},
Summary: "Pod *test/test-pod-update* has been updated in *test-cluster-1* cluster\n```\nspec.containers[*].image:\n\t-: tomcat:9.0.34\n\t+: tomcat:8.0\n\n```",
},
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
resource := utils.GVRToString(test.GVR)
// checking if update operation is true
isAllowed := utils.AllowedEventKindsMap[utils.EventKind{
Resource: resource,
Namespace: "all",
EventType: config.UpdateEvent}] ||
utils.AllowedEventKindsMap[utils.EventKind{
Resource: resource,
Namespace: test.Namespace,
EventType: config.UpdateEvent}]
assert.Equal(t, isAllowed, true)
// modifying the update setting value as per testcases
utils.AllowedUpdateEventsMap[utils.KindNS{Resource: "v1/pods", Namespace: "all"}] = test.UpdateSetting
// getting the updated and old object
oldObj, newObj := testutils.UpdateResource(t, test)
updateMsg := utils.Diff(oldObj.Object, newObj.Object, test.UpdateSetting)
assert.Equal(t, test.Diff, updateMsg)
// Inject an event into the fake client.
if c.TestEnv.Config.Communications.Slack.Enabled {
// Get last seen slack message
lastSeenMsg := c.GetLastSeenSlackMessage()
// Convert text message into Slack message structure
m := slack.Message{}
err := json.Unmarshal([]byte(*lastSeenMsg), &m)
assert.NoError(t, err, "message should decode properly")
assert.Equal(t, c.Config.Communications.Slack.Channel, m.Channel)
if len(m.Attachments) != 0 {
m.Attachments[0].Ts = ""
}
assert.Equal(t, test.ExpectedSlackMessage.Attachments, m.Attachments)
}
if c.TestEnv.Config.Communications.Webhook.Enabled {
// Get last seen webhook payload
lastSeenPayload := c.GetLastReceivedPayload()
t.Logf("LastSeenPayload :%#v", lastSeenPayload)
assert.Equal(t, test.ExpectedWebhookPayload.EventMeta, lastSeenPayload.EventMeta)
assert.Equal(t, test.ExpectedWebhookPayload.EventStatus, lastSeenPayload.EventStatus)
assert.Equal(t, test.ExpectedWebhookPayload.Summary, lastSeenPayload.Summary)
}
})
}
}
// Run tests
func (c *context) Run(t *testing.T) {
t.Run("update resource", c.testUpdateResource)
t.Run("skip update event", c.testSKipUpdateEvent)
t.Run("skip update event for wrong setting", c.testSkipWrongSetting)
}
// E2ETests runs create notification tests
func E2ETests(testEnv *env.TestEnv) env.E2ETest {
return &context{
testEnv,
}
}
func (c *context) testSKipUpdateEvent(t *testing.T) {
// Modifying AllowedEventKindsMap configure dummy namespace for update event and ignore all
utils.AllowedEventKindsMap[utils.EventKind{Resource: "v1/pods", Namespace: "dummy", EventType: "update"}] = true
delete(utils.AllowedEventKindsMap, utils.EventKind{Resource: "v1/pods", Namespace: "all", EventType: "update"})
// reset to original test config
defer delete(utils.AllowedEventKindsMap, utils.EventKind{Resource: "v1/pods", Namespace: "dummy", EventType: "update"})
// test scenarios
tests := map[string]testutils.UpdateObjects{
"skip update event for namespaces not configured": {
// update operation not allowed for Pod in test namespace so event should be skipped
GVR: schema.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
Kind: "Pod",
Namespace: "test",
Specs: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "test-pod-update"}, Spec: v1.PodSpec{Containers: []v1.Container{{Name: "test-pod-container", Image: "tomcat:9.0.34"}}}},
},
"skip update event for resources not added": {
// update operation not allowed for namespaces in test_config so event should be skipped
GVR: schema.GroupVersionResource{Group: "", Version: "v1", Resource: "namespaces"},
Kind: "Namespace",
Specs: &v1.Namespace{ObjectMeta: metav1.ObjectMeta{Name: "abc"}},
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
resource := utils.GVRToString(test.GVR)
// checking if update operation is true
isAllowed := utils.CheckOperationAllowed(utils.AllowedEventKindsMap, test.Namespace, resource, config.UpdateEvent)
assert.Equal(t, isAllowed, false)
})
}
// Resetting original configuration as per test_config
utils.AllowedEventKindsMap[utils.EventKind{Resource: "v1/pods", Namespace: "all", EventType: "update"}] = true
}
func (c *context) testSkipWrongSetting(t *testing.T) {
// test scenarios
tests := map[string]testutils.UpdateObjects{
"skip update event for wrong updateSettings value": {
// update event given with wrong value of updateSettings which doesn't exist would be skipped
GVR: schema.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
Kind: "Pod",
Namespace: "test",
Specs: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "test-pod-update-skip"}, Spec: v1.PodSpec{Containers: []v1.Container{{Name: "test-pod-container", Image: "tomcat:9.0.34"}}}},
Patch: []byte(`{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "test-pod-update-skip",
"namespace": "test"
},
"spec": {
"containers": [
{
"name": "test-pod-container",
"image": "tomcat:8.0"
}
]
}
}
`),
// adding wrong field
UpdateSetting: config.UpdateSetting{Fields: []string{"spec.invalid"}, IncludeDiff: true},
// diff calcuted should be empty because of error
Diff: "",
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
resource := utils.GVRToString(test.GVR)
// checking if update operation is true
isAllowed := utils.CheckOperationAllowed(utils.AllowedEventKindsMap, test.Namespace, resource, config.UpdateEvent)
assert.Equal(t, isAllowed, true)
// modifying the update setting value as per testcases
utils.AllowedUpdateEventsMap[utils.KindNS{Resource: "v1/pods", Namespace: "all"}] = test.UpdateSetting
// getting the updated and old object
oldObj, newObj := testutils.UpdateResource(t, test)
updateMsg := utils.Diff(oldObj.Object, newObj.Object, test.UpdateSetting)
assert.Equal(t, test.Diff, updateMsg)
})
}
} | test/e2e/notifier/update/update.go | 0.516352 | 0.472379 | update.go | starcoder |
package backendbase
import (
"fmt"
"math"
)
type Vec [2]float64
func (v Vec) String() string {
return fmt.Sprintf("[%f,%f]", v[0], v[1])
}
func (v Vec) Add(v2 Vec) Vec {
return Vec{v[0] + v2[0], v[1] + v2[1]}
}
func (v Vec) Sub(v2 Vec) Vec {
return Vec{v[0] - v2[0], v[1] - v2[1]}
}
func (v Vec) Mul(v2 Vec) Vec {
return Vec{v[0] * v2[0], v[1] * v2[1]}
}
func (v Vec) Mulf(f float64) Vec {
return Vec{v[0] * f, v[1] * f}
}
func (v Vec) MulMat(m Mat) Vec {
return Vec{
m[0]*v[0] + m[2]*v[1] + m[4],
m[1]*v[0] + m[3]*v[1] + m[5]}
}
func (v Vec) MulMat2(m Mat2) Vec {
return Vec{m[0]*v[0] + m[2]*v[1], m[1]*v[0] + m[3]*v[1]}
}
func (v Vec) Div(v2 Vec) Vec {
return Vec{v[0] / v2[0], v[1] / v2[1]}
}
func (v Vec) Divf(f float64) Vec {
return Vec{v[0] / f, v[1] / f}
}
func (v Vec) Dot(v2 Vec) float64 {
return v[0]*v2[0] + v[1]*v2[1]
}
func (v Vec) Len() float64 {
return math.Sqrt(v[0]*v[0] + v[1]*v[1])
}
func (v Vec) LenSqr() float64 {
return v[0]*v[0] + v[1]*v[1]
}
func (v Vec) Norm() Vec {
return v.Mulf(1.0 / v.Len())
}
func (v Vec) Atan2() float64 {
return math.Atan2(v[1], v[0])
}
func (v Vec) Angle() float64 {
return math.Pi*0.5 - math.Atan2(v[1], v[0])
}
func (v Vec) AngleTo(v2 Vec) float64 {
return math.Acos(v.Norm().Dot(v2.Norm()))
}
type Mat [6]float64
func (m *Mat) String() string {
return fmt.Sprintf("[%f,%f,0,\n %f,%f,0,\n %f,%f,1,]", m[0], m[2], m[4], m[1], m[3], m[5])
}
var MatIdentity = Mat{
1, 0,
0, 1,
0, 0}
func MatTranslate(v Vec) Mat {
return Mat{
1, 0,
0, 1,
v[0], v[1]}
}
func MatScale(v Vec) Mat {
return Mat{
v[0], 0,
0, v[1],
0, 0}
}
func MatRotate(radians float64) Mat {
s, c := math.Sincos(radians)
return Mat{
c, s,
-s, c,
0, 0}
}
func (m Mat) Mul(m2 Mat) Mat {
return Mat{
m[0]*m2[0] + m[1]*m2[2],
m[0]*m2[1] + m[1]*m2[3],
m[2]*m2[0] + m[3]*m2[2],
m[2]*m2[1] + m[3]*m2[3],
m[4]*m2[0] + m[5]*m2[2] + m2[4],
m[4]*m2[1] + m[5]*m2[3] + m2[5]}
}
func (m Mat) Invert() Mat {
identity := 1.0 / (m[0]*m[3] - m[2]*m[1])
return Mat{
m[3] * identity,
-m[1] * identity,
-m[2] * identity,
m[0] * identity,
(m[2]*m[5] - m[3]*m[4]) * identity,
(m[1]*m[4] - m[0]*m[5]) * identity,
}
}
type Mat2 [4]float64
func (m Mat) Mat2() Mat2 {
return Mat2{m[0], m[1], m[2], m[3]}
}
func (m *Mat2) String() string {
return fmt.Sprintf("[%f,%f,\n %f,%f]", m[0], m[2], m[1], m[3])
} | backend/backendbase/math.go | 0.742982 | 0.563678 | math.go | starcoder |
package raylib
//#include "raylib.h"
//#include <stdlib.h>
import "C"
import "unsafe"
import "math"
//Matrix A representation of a 4 x 4 matrix
type Matrix struct {
M0 float32
M1 float32
M2 float32
M3 float32
M4 float32
M5 float32
M6 float32
M7 float32
M8 float32
M9 float32
M10 float32
M11 float32
M12 float32
M13 float32
M14 float32
M15 float32
}
func newMatrixFromPointer(ptr unsafe.Pointer) Matrix { return *(*Matrix)(ptr) }
func (m *Matrix) cptr() *C.Matrix {
return (*C.Matrix)(unsafe.Pointer(m))
}
//NewMatrixFromQuaternion creates a new rotation matrix from a quaternion
func NewMatrixFromQuaternion(q Quaternion) Matrix {
x := q.X
y := q.Y
z := q.Z
w := q.W
x2 := x + x
y2 := y + y
z2 := z + z
lengthSquared := q.SqrLength()
xx := x * x2 / lengthSquared
xy := x * y2 / lengthSquared
xz := x * z2 / lengthSquared
yy := y * y2 / lengthSquared
yz := y * z2 / lengthSquared
zz := z * z2 / lengthSquared
wx := w * x2 / lengthSquared
wy := w * y2 / lengthSquared
wz := w * z2 / lengthSquared
return Matrix{
M0: 1.0 - (yy + zz),
M1: xy - wz,
M2: xz + wy,
M3: 0.0,
M4: xy + wz,
M5: 1.0 - (xx + zz),
M6: yz - wx,
M7: 0.0,
M8: xz - wy,
M9: yz + wx,
M10: 1.0 - (xx + yy),
M11: 0.0,
M12: 0.0,
M13: 0.0,
M14: 0.0,
M15: 1.0,
}
}
//NewMatrixIdentity creates a identity
func NewMatrixIdentity() Matrix {
return Matrix{
M0: 1, M1: 0, M2: 0, M3: 0,
M4: 0, M5: 1, M6: 0, M7: 0,
M8: 0, M9: 0, M10: 1, M11: 0,
M12: 0, M13: 0, M14: 0, M15: 1,
}
}
//NewMatrixTranslate creates a blank translation matrix
func NewMatrixTranslate(x, y, z float32) Matrix {
return Matrix{
M0: 1, M1: 0, M2: 0, M3: x,
M4: 0, M5: 1, M6: 0, M7: y,
M8: 0, M9: 0, M10: 1, M11: z,
M12: 0, M13: 0, M14: 0, M15: 1,
}
}
//NewMatrixRotate creates a rotation matrix based of the acis and radians
func NewMatrixRotate(axis Vector3, radians float32) Matrix {
x := axis.X
y := axis.Y
z := axis.Z
length := axis.Length()
if length != 1 && length != 0 {
length = 1 / length
x *= length
y *= length
z *= length
}
sinres := float32(math.Sin(float64(radians)))
cosres := float32(math.Cos(float64(radians)))
t := 1 - cosres
return Matrix{
M0: x*x*t + cosres,
M1: y*x*t + z*sinres,
M2: z*x*t - y*sinres,
M3: 0,
M4: x*y*t - z*sinres,
M5: y*y*t + cosres,
M6: z*y*t + x*sinres,
M7: 0,
M8: x*z*t + y*sinres,
M9: y*z*t - x*sinres,
M10: z*z*t + cosres,
M11: 0,
M12: 0,
M13: 0,
M14: 0,
M15: 1,
}
}
//ToQuaternion turns the rotation matrix into a Quaternion. Alias of newQuaternionFromMatrix
func (m Matrix) ToQuaternion() Quaternion { return NewQuaternionFromMatrix(m) }
//Trace of the matrix (sum of values along diagonal)
func (m Matrix) Trace() float32 {
return m.M0 + m.M5 + m.M10 + m.M15
}
//Detrimant of the matrix
func (m Matrix) Detrimant() float32 {
// Cache the matrix values (speed optimization)
a00 := m.M0
a01 := m.M1
a02 := m.M2
a03 := m.M3
a10 := m.M4
a11 := m.M5
a12 := m.M6
a13 := m.M7
a20 := m.M8
a21 := m.M9
a22 := m.M10
a23 := m.M11
a30 := m.M12
a31 := m.M13
a32 := m.M14
a33 := m.M15
return a30*a21*a12*a03 - a20*a31*a12*a03 - a30*a11*a22*a03 + a10*a31*a22*a03 +
a20*a11*a32*a03 - a10*a21*a32*a03 - a30*a21*a02*a13 + a20*a31*a02*a13 +
a30*a01*a22*a13 - a00*a31*a22*a13 - a20*a01*a32*a13 + a00*a21*a32*a13 +
a30*a11*a02*a23 - a10*a31*a02*a23 - a30*a01*a12*a23 + a00*a31*a12*a23 +
a10*a01*a32*a23 - a00*a11*a32*a23 - a20*a11*a02*a33 + a10*a21*a02*a33 +
a20*a01*a12*a33 - a00*a21*a12*a33 - a10*a01*a22*a33 + a00*a11*a22*a33
}
//Transpose the matrix
func (m Matrix) Transpose() Matrix {
return Matrix{
M0: m.M0,
M1: m.M4,
M2: m.M8,
M3: m.M12,
M4: m.M1,
M5: m.M5,
M6: m.M9,
M7: m.M13,
M8: m.M2,
M9: m.M6,
M10: m.M10,
M11: m.M14,
M12: m.M3,
M13: m.M7,
M14: m.M11,
M15: m.M15,
}
}
//Invert the matrix
func (m Matrix) Invert() Matrix {
a00 := m.M0
a01 := m.M1
a02 := m.M2
a03 := m.M3
a10 := m.M4
a11 := m.M5
a12 := m.M6
a13 := m.M7
a20 := m.M8
a21 := m.M9
a22 := m.M10
a23 := m.M11
a30 := m.M12
a31 := m.M13
a32 := m.M14
a33 := m.M15
b00 := a00*a11 - a01*a10
b01 := a00*a12 - a02*a10
b02 := a00*a13 - a03*a10
b03 := a01*a12 - a02*a11
b04 := a01*a13 - a03*a11
b05 := a02*a13 - a03*a12
b06 := a20*a31 - a21*a30
b07 := a20*a32 - a22*a30
b08 := a20*a33 - a23*a30
b09 := a21*a32 - a22*a31
b10 := a21*a33 - a23*a31
b11 := a22*a33 - a23*a32
// Calculate the invert determinant (inlined to avoid double-caching)
invDet := 1 / (b00*b11 - b01*b10 + b02*b09 + b03*b08 - b04*b07 + b05*b06)
return Matrix{
M0: (a11*b11 - a12*b10 + a13*b09) * invDet,
M1: (-a01*b11 + a02*b10 - a03*b09) * invDet,
M2: (a31*b05 - a32*b04 + a33*b03) * invDet,
M3: (-a21*b05 + a22*b04 - a23*b03) * invDet,
M4: (-a10*b11 + a12*b08 - a13*b07) * invDet,
M5: (a00*b11 - a02*b08 + a03*b07) * invDet,
M6: (-a30*b05 + a32*b02 - a33*b01) * invDet,
M7: (a20*b05 - a22*b02 + a23*b01) * invDet,
M8: (a10*b10 - a11*b08 + a13*b06) * invDet,
M9: (-a00*b10 + a01*b08 - a03*b06) * invDet,
M10: (a30*b04 - a31*b02 + a33*b00) * invDet,
M11: (-a20*b04 + a21*b02 - a23*b00) * invDet,
M12: (-a10*b09 + a11*b07 - a12*b06) * invDet,
M13: (a00*b09 - a01*b07 + a02*b06) * invDet,
M14: (-a30*b03 + a31*b01 - a32*b00) * invDet,
M15: (a20*b03 - a21*b01 + a22*b00) * invDet,
}
}
//Normalize calcuates the normal of the matrix
func (m Matrix) Normalize() Matrix {
det := m.Detrimant()
return Matrix{
M0: m.M0 / det,
M1: m.M1 / det,
M2: m.M2 / det,
M3: m.M3 / det,
M4: m.M4 / det,
M5: m.M5 / det,
M6: m.M6 / det,
M7: m.M7 / det,
M8: m.M8 / det,
M9: m.M9 / det,
M10: m.M10 / det,
M11: m.M11 / det,
M12: m.M12 / det,
M13: m.M13 / det,
M14: m.M14 / det,
M15: m.M15 / det,
}
}
//Add two matrices
func (m Matrix) Add(right Matrix) Matrix {
return Matrix{
M0: m.M0 + right.M0,
M1: m.M1 + right.M1,
M2: m.M2 + right.M2,
M3: m.M3 + right.M3,
M4: m.M4 + right.M4,
M5: m.M5 + right.M5,
M6: m.M6 + right.M6,
M7: m.M7 + right.M7,
M8: m.M8 + right.M8,
M9: m.M9 + right.M9,
M10: m.M10 + right.M10,
M11: m.M11 + right.M11,
M12: m.M12 + right.M12,
M13: m.M13 + right.M13,
M14: m.M14 + right.M14,
M15: m.M15 + right.M15,
}
}
//Subtract two matrices
func (m Matrix) Subtract(right Matrix) Matrix {
return Matrix{
M0: m.M0 - right.M0,
M1: m.M1 - right.M1,
M2: m.M2 - right.M2,
M3: m.M3 - right.M3,
M4: m.M4 - right.M4,
M5: m.M5 - right.M5,
M6: m.M6 - right.M6,
M7: m.M7 - right.M7,
M8: m.M8 - right.M8,
M9: m.M9 - right.M9,
M10: m.M10 - right.M10,
M11: m.M11 - right.M11,
M12: m.M12 - right.M12,
M13: m.M13 - right.M13,
M14: m.M14 - right.M14,
M15: m.M15 - right.M15,
}
}
//NewMatrixRotateXYZ new xyz-rotation matrix (in radians)
func NewMatrixRotateXYZ(radians Vector3) Matrix {
cosz := float32(math.Cos(float64(-radians.Z)))
sinz := float32(math.Sin(float64(-radians.Z)))
cosy := float32(math.Cos(float64(-radians.Y)))
siny := float32(math.Sin(float64(-radians.Y)))
cosx := float32(math.Cos(float64(-radians.X)))
sinx := float32(math.Sin(float64(-radians.X)))
result := NewMatrixIdentity()
result.M0 = cosz * cosy
result.M4 = (cosz * siny * sinx) - (sinz * cosx)
result.M8 = (cosz * siny * cosx) + (sinz * sinx)
result.M1 = sinz * cosy
result.M5 = (sinz * siny * sinx) + (cosz * cosx)
result.M9 = (sinz * siny * cosx) - (cosz * sinx)
result.M2 = -siny
result.M6 = cosy * sinx
result.M10 = cosy * cosx
return result
}
//NewMatrixRotateX creates a new matrix that is rotated
func NewMatrixRotateX(radians float32) Matrix {
result := NewMatrixIdentity()
cosres := float32(math.Cos(float64(radians)))
sinres := float32(math.Sin(float64(radians)))
result.M5 = cosres
result.M6 = -sinres
result.M9 = sinres
result.M10 = cosres
return result
}
//NewMatrixRotateY creates a new matrix that is rotated
func NewMatrixRotateY(radians float32) Matrix {
result := NewMatrixIdentity()
cosres := float32(math.Cos(float64(radians)))
sinres := float32(math.Sin(float64(radians)))
result.M0 = cosres
result.M2 = sinres
result.M8 = -sinres
result.M10 = cosres
return result
}
//NewMatrixRotateZ creates a new matrix that is rotated
func NewMatrixRotateZ(radians float32) Matrix {
result := NewMatrixIdentity()
cosres := float32(math.Cos(float64(radians)))
sinres := float32(math.Sin(float64(radians)))
result.M0 = cosres
result.M1 = -sinres
result.M4 = sinres
result.M5 = cosres
return result
}
//NewMatrixScale creates a new scalling matrix
func NewMatrixScale(scale Vector3) Matrix {
return Matrix{
M0: scale.X, M1: 0, M2: 0, M3: 0,
M4: 0, M5: scale.Y, M6: 0, M7: 0,
M8: 0, M9: 0, M10: scale.Z, M11: 0,
M12: 0, M13: 0, M14: 0, M15: 1,
}
}
//Multiply two matrix together. Note that order matters.
func (m Matrix) Multiply(right Matrix) Matrix {
return Matrix{
M0: m.M0*right.M0 + m.M1*right.M4 + m.M2*right.M8 + m.M3*right.M12,
M1: m.M0*right.M1 + m.M1*right.M5 + m.M2*right.M9 + m.M3*right.M13,
M2: m.M0*right.M2 + m.M1*right.M6 + m.M2*right.M10 + m.M3*right.M14,
M3: m.M0*right.M3 + m.M1*right.M7 + m.M2*right.M11 + m.M3*right.M15,
M4: m.M4*right.M0 + m.M5*right.M4 + m.M6*right.M8 + m.M7*right.M12,
M5: m.M4*right.M1 + m.M5*right.M5 + m.M6*right.M9 + m.M7*right.M13,
M6: m.M4*right.M2 + m.M5*right.M6 + m.M6*right.M10 + m.M7*right.M14,
M7: m.M4*right.M3 + m.M5*right.M7 + m.M6*right.M11 + m.M7*right.M15,
M8: m.M8*right.M0 + m.M9*right.M4 + m.M10*right.M8 + m.M11*right.M12,
M9: m.M8*right.M1 + m.M9*right.M5 + m.M10*right.M9 + m.M11*right.M13,
M10: m.M8*right.M2 + m.M9*right.M6 + m.M10*right.M10 + m.M11*right.M14,
M11: m.M8*right.M3 + m.M9*right.M7 + m.M10*right.M11 + m.M11*right.M15,
M12: m.M12*right.M0 + m.M13*right.M4 + m.M14*right.M8 + m.M15*right.M12,
M13: m.M12*right.M1 + m.M13*right.M5 + m.M14*right.M9 + m.M15*right.M13,
M14: m.M12*right.M2 + m.M13*right.M6 + m.M14*right.M10 + m.M15*right.M14,
M15: m.M12*right.M3 + m.M13*right.M7 + m.M14*right.M11 + m.M15*right.M15,
}
}
//NewMatrixFrustum creates a new perspective projection matrix
func NewMatrixFrustum(left, right, bottom, top, near, far float64) Matrix {
rl := (right - left)
tb := (top - bottom)
fn := (far - near)
return Matrix{
M0: float32((near * 2) / rl),
M1: 0,
M2: 0,
M3: 0,
M4: 0,
M5: float32((near * 2) / tb),
M6: 0,
M7: 0,
M8: float32((right + left) / rl),
M9: float32((top + bottom) / tb),
M10: float32(-(far + near) / fn),
M11: -1,
M12: 0,
M13: 0,
M14: float32(-(far * near * 2) / fn),
M15: 0,
}
}
//NewMatrixPerspective creates a perspective projection matrix. Angles should be provided in radians
func NewMatrixPerspective(fovy, aspect, near, far float64) Matrix {
top := near * math.Tan(fovy*0.5)
right := top * aspect
return NewMatrixFrustum(-right, right, -top, top, near, far)
}
//NewMatrixOrtho creates a orthographic projection
func NewMatrixOrtho(left, right, bottom, top, near, far float64) Matrix {
rl := (right - left)
tb := (top - bottom)
fn := (far - near)
return Matrix{
M0: float32(2 / rl),
M1: 0,
M2: 0,
M3: 0,
M4: 0,
M5: float32(2 / tb),
M6: 0,
M7: 0,
M8: 0,
M9: 0,
M10: float32(-2 / fn),
M11: 0,
M12: float32(-(left + right) / rl),
M13: float32(-(top + bottom) / tb),
M14: float32(-(far + near) / fn),
M15: 1,
}
}
//NewMatrixLookAt creates a matrix to look at a target
func NewMatrixLookAt(eye, target, up Vector3) Matrix {
z := eye.Subtract(target).Normalize()
x := up.CrossProduct(z).Normalize()
y := z.CrossProduct(x).Normalize()
return Matrix{
M0: x.X,
M1: x.Y,
M2: x.Z,
M3: 0,
M4: y.X,
M5: y.Y,
M6: y.Z,
M7: 0,
M8: z.X,
M9: z.Y,
M10: z.Z,
M11: 0,
M12: eye.X,
M13: eye.Y,
M14: eye.Z,
M15: 1,
}
}
//Decompose turns a matrix into an slice of floats
func (m Matrix) Decompose() []float32 {
return []float32{
m.M0, m.M1, m.M2, m.M3, m.M4, m.M5, m.M6, m.M7, m.M8, m.M9,
m.M10, m.M11, m.M12, m.M13, m.M14, m.M15,
}
} | raylib/matrix.go | 0.703448 | 0.42054 | matrix.go | starcoder |
package cards
import (
"encoding/json"
"fmt"
"math/rand"
"reflect"
"time"
)
// Card information is originally defined in a JSON file - however, in order to make this simpler for the user (and so they
// don't have to worry about having specific files in the right place), we can bundle the contents of the `cards.json` file into
// our program at compile time. As a result, our game can be distributed as a single binary, instead of multiple files that have
// to be in specific locations relative to each other.
// There is currently no built in way to do this in Go, so we can use a tall called `go-bindata` to generate some Go code that
// contains our file and some helper functions to access it. Because this is a command line program, we can define the command
// that needs to be run here in order to generate our code file using a `go:generate` directive.
// In order to run this command, you can use the `go generate <pkgname>` command - for example, to generate the code for this
// package, you could run `go generate github.com/codemicro/cs-toptrumps/internal/cards`.
//go:generate go-bindata -pkg cards cards.json
type Card struct {
Name string
NumEngines int `readable:"Number of engines"` // <- this weird string thing is called a struct tag
MaxPax int `readable:"Maximum passenger count"`
Range int `readable:"Range"`
Cost int `readable:"Cost when new"`
}
// GetReadableNames iterates all attributes of a given card `c`, and generates a slice of any struct tag that has the readable
// field set. This is slightly confusing to do, but works well.
func (c Card) GetReadableNames() (names []string) {
ct := reflect.TypeOf(c)
for i := 0; i < ct.NumField(); i += 1 {
field := ct.Field(i)
tag := field.Tag.Get("readable")
if tag != "" {
names = append(names, tag)
}
}
return // equivalent to `return names`
}
// GetValueByReadable takes the readable name of an attribute and returns the value of that attribute, if it exists. This
// assumes that only integer values have `readable` tags attached to them. If an attribute that is not an integer is read from
// by this function, it will panic and crash the application.
func (c Card) GetValueByReadable(readable string) int {
ct := reflect.TypeOf(c)
for i := 0; i < ct.NumField(); i += 1 {
tField := ct.Field(i)
tag := tField.Tag.Get("readable")
if tag == readable {
cv := reflect.ValueOf(c)
vField := cv.Field(i)
return vField.Interface().(int) // The panic would occur here, when the generic interface{} type is asserted into an
// integer so it can be returned
}
}
return 0
}
var (
AllCards []Card
AvailCards []Card // Like AllCards, but is modified when cards are removed from the deck
)
// init runs automagically on package initialisation
func init() {
// Load all card info from cards.json, which is a bundled file
fCont := MustAsset("cards.json") // MustAsset is a function from the generated code.
err := json.Unmarshal(fCont, &AllCards)
if err != nil {
fmt.Println("Unable to load cards.json. Is the format correct?")
panic(err)
}
AvailCards = AllCards
}
// Deal will select n cards from the deck of available cards at random, remove them from that deck, and return them in a new
// mini-deck
func Deal(n int) (deck []Card) {
r := rand.New(rand.NewSource(time.Now().UnixNano()))
for i := 0; i < n; i += 1 {
chosenIndex := r.Intn(len(AvailCards))
deck = append(deck, AvailCards[chosenIndex])
AvailCards = append(AvailCards[:chosenIndex], AvailCards[chosenIndex+1:]...) // Remove chosen card from available deck
// The `...` syntax means thge items of that slice/array are used as arguments to the `append` function.
}
return
}
// SplitCards will create two decks of even size and return those. Cards returned are removed from the deck of all available
// cards
func SplitCards(n int) (decks [][]Card) {
numCards := len(AvailCards)
// If there are less cards than there are decks to create, that's never going to work.
// In the context of this program, this is only ever going to be caused by a programming error and not by anything that a
// user inputs. Because of this, we don't need to go to all the hassle of properly handling an error, and can instead just
// call panic and quit.
if numCards < n {
panic(fmt.Errorf("there are not enough available cards (have: %d) in order to create %d new deck(s)", numCards, n))
}
// While the number of cards in the deck doesn't divide evenly by the number of required decks, reduce the size of the pool
// of cards to select from
for numCards%n != 0 {
numCards -= 1
}
cardsPerDeck := numCards / n
for i := 0; i < n; i += 1 {
decks = append(decks, Deal(cardsPerDeck))
}
return
} | internal/cards/cards.go | 0.683314 | 0.404831 | cards.go | starcoder |
package main
import (
"fmt"
"image"
"image/color"
"image/png"
"log"
"os"
)
func max(a int, b int) int {
if a > b {
return a
} else {
return b
}
}
func min(a int, b int) int {
if a < b {
return a
} else {
return b
}
}
func getIdentityFilter() ([][]float64, float64, float64) {
return [][]float64{
{0, 0, 0},
{0, 1, 0},
{0, 0, 0},
},
0.0, //bias
1.0 //factor
}
func getEdgeFilter1() ([][]float64, float64, float64) {
return [][]float64{
{1, 0, -1},
{0, 0, 0},
{-1, 0, 1},
},
0.0, //bias
1.0 //factor
}
func getEdgeFilter2() ([][]float64, float64, float64) {
return [][]float64{
{0, 1, 0},
{1, -4, 1},
{0, 1, 0},
},
0.0, //bias
1.0 //factor
}
func getEdgeFilter3() ([][]float64, float64, float64) {
return [][]float64{
{-1, -1, -1},
{-1, 8, -1},
{-1, -1, -1},
},
0.0, //bias
1.0 //factor
}
func getEdgeFilter4() ([][]float64, float64, float64) {
return [][]float64{
{-1, 0, 0, 0, 0},
{0, -2, 0, 0, 0},
{0, 0, 6, 0, 0},
{0, 0, 0, -2, 0},
{0, 0, 0, 0, -1},
},
0.0, //bias
1.0 //factor
}
func getEmboss() ([][]float64, float64, float64) {
return [][]float64{
{-1, -1, 0},
{-1, 0, 1},
{0, 1, 1},
},
50.0, //bias
1.0 //factor
}
func getEmboss2() ([][]float64, float64, float64) {
return [][]float64{
{-1, -1, -1, -1, 0},
{-1, -1, -1, 0, 1},
{-1, -1, 0, 1, 1},
{-1, 0, 1, 1, 1},
{0, 1, 1, 1, 1},
},
0.0, //bias
1.0 //factor
}
func getBlur2() ([][]float64, float64, float64) {
return [][]float64{
{0, 0, 1, 0, 0},
{0, 1, 1, 1, 0},
{1, 1, 1, 1, 1},
{0, 1, 1, 1, 0},
{0, 0, 1, 0, 0},
},
0.0, //bias
1.0 / 16 //factor
}
func getBlur() ([][]float64, float64, float64) {
return [][]float64{
{0.0625, 0.125, 0.0625},
{0.125, 0.25, 0.125},
{0.0625, 0.125, 0.0625},
},
8.0, //bias
1 //factor
}
func getExcessiveEdge() ([][]float64, float64, float64) {
return [][]float64{
{1, 1, 1},
{1, -7, 1},
{1, 1, 1},
},
1.0, //bias
1.0 //factor
}
func getEdge() ([][]float64, float64, float64) {
return [][]float64{
{-1, -1, -1},
{0, 0, 0},
{1, 1, 1},
},
100.0, //bias
1.0 //factor
}
func getSharpen() ([][]float64, float64, float64) {
return [][]float64{
{-1, -1, -1, -1, -1},
{-1, 2, 2, 2, -1},
{-1, 2, 8, 2, -1},
{-1, 2, 2, 2, -1},
{-1, -1, -1, -1, -1},
},
1.0, //bias
1.0 / 4 //factor
}
type filter func() ([][]float64, float64, float64)
func main() {
m, w, h := getImageArray("lena.png")
f := []filter{
getIdentityFilter,
getEdgeFilter1,
getEdgeFilter2,
getEdgeFilter3,
getEdgeFilter4,
getEmboss,
getEmboss2,
getBlur,
getBlur2,
getEdge,
getExcessiveEdge,
getSharpen,
}
for idx, method := range f {
filter, bias, factor := method()
fmt.Println(filter, bias, factor)
stride := len(filter)
result := applyFilter(w, h, m, filter, stride, factor, bias)
outfile := fmt.Sprintf("output_%d.png", idx)
img, _ := os.Create(outfile)
defer img.Close()
png.Encode(img, result)
}
}
func getImageArray(imagePath string) (image.Image, int, int) {
image.RegisterFormat("png", "png", png.Decode, png.DecodeConfig)
reader, err := os.Open(imagePath)
if err != nil {
log.Fatal(err)
}
defer reader.Close()
m, _, err := image.Decode(reader)
if err != nil {
log.Fatal(err)
}
bounds := m.Bounds()
w, h := bounds.Max.X, bounds.Max.Y
return m, w, h
}
func applyFilter(w int, h int, m image.Image, filter [][]float64, stride int, factor float64, bias float64) *image.RGBA {
result := image.NewRGBA(image.Rect(0, 0, w, h))
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
var red float64 = 0.0
var green float64 = 0.0
var blue float64 = 0.0
for filterY := 0; filterY < stride; filterY++ {
for filterX := 0; filterX < stride; filterX++ {
imageX := (x - stride/2 + filterX + w) % w
imageY := (y - stride/2 + filterY + h) % h
r, g, b, _ := m.At(imageX, imageY).RGBA()
red += (float64(r) / 257) * filter[filterY][filterX]
green += (float64(g) / 257) * filter[filterY][filterX]
blue += (float64(b) / 257) * filter[filterY][filterX]
}
}
_r := min(max(int(factor*red+bias), 0), 255)
_g := min(max(int(factor*green+bias), 0), 255)
_b := min(max(int(factor*blue+bias), 0), 255)
c := color.RGBA{
uint8(_r),
uint8(_g),
uint8(_b),
255,
}
result.Set(x, y, c)
}
}
return result
} | filter.go | 0.625667 | 0.513668 | filter.go | starcoder |
package unit
import (
"bytes"
"fmt"
"reflect"
"runtime"
"strings"
"testing"
)
func Run(fixture interface{}, t *testing.T) {
fixtureType := reflect.TypeOf(fixture)
for x := 0; x < fixtureType.NumMethod(); x++ {
testMethodName := fixtureType.Method(x).Name
if strings.HasPrefix(testMethodName, "Test") {
t.Run(testMethodName, func(t *testing.T) {
instance := reflect.New(fixtureType.Elem())
innerFixture := newFixture(t, testing.Verbose())
field := instance.Elem().FieldByName("Fixture")
field.Set(reflect.ValueOf(innerFixture))
defer innerFixture.Finalize()
if setup := instance.MethodByName("Setup"); setup.IsValid() {
setup.Call(nil)
}
instance.MethodByName(testMethodName).Call(nil)
if teardown := instance.MethodByName("Teardown"); teardown.IsValid() {
teardown.Call(nil)
}
})
}
}
}
type Fixture struct {
t *testing.T
log *bytes.Buffer
verbose bool
}
func newFixture(t *testing.T, verbose bool) *Fixture {
return &Fixture{t: t, verbose: verbose, log: &bytes.Buffer{}}
}
func (this *Fixture) So(actual interface{}, assert assertion, expected ...interface{}) bool {
failure := assert(actual, expected...)
failed := len(failure) > 0
if failed {
this.fail(failure)
}
return !failed
}
func (this *Fixture) fail(failure string) {
this.t.Fail()
this.Print(failure)
}
// Assert tests a boolean which, if not true, marks the current test case as failed and
// prints the provided message.
func (this *Fixture) Assert(condition bool, messages ...string) bool {
if !condition {
if len(messages) == 0 {
messages = append(messages, "Expected condition to be true, was false instead.")
}
this.fail(strings.Join(messages, ", "))
}
return condition
}
func (this *Fixture) AssertEqual(expected, actual interface{}) bool {
return this.Assert(expected == actual, fmt.Sprintf(comparisonFormat, fmt.Sprint(expected), fmt.Sprint(actual)))
}
func (this *Fixture) AssertSprintEqual(expected, actual interface{}) bool {
return this.AssertEqual(fmt.Sprint(expected), fmt.Sprint(actual))
}
func (this *Fixture) AssertSprintfEqual(expected, actual interface{}, format string) bool {
return this.AssertEqual(fmt.Sprintf(format, expected), fmt.Sprintf(format, actual))
}
func (this *Fixture) AssertDeepEqual(expected, actual interface{}) bool {
return this.Assert(reflect.DeepEqual(expected, actual),
fmt.Sprintf(comparisonFormat, fmt.Sprintf("%#v", expected), fmt.Sprintf("%#v", actual)))
}
const comparisonFormat = "Expected: [%s]\nActual: [%s]"
func (this *Fixture) Error(args ...interface{}) { this.fail(fmt.Sprint(args...)) }
func (this *Fixture) Errorf(f string, args ...interface{}) { this.fail(fmt.Sprintf(f, args...)) }
func (this *Fixture) Print(a ...interface{}) { fmt.Fprint(this.log, a...) }
func (this *Fixture) Printf(format string, a ...interface{}) { fmt.Fprintf(this.log, format, a...) }
func (this *Fixture) Println(a ...interface{}) { fmt.Fprintln(this.log, a...) }
func (this *Fixture) Write(p []byte) (int, error) { return this.log.Write(p) }
func (this *Fixture) Failed() bool { return this.t.Failed() }
func (this *Fixture) Name() string { return this.t.Name() }
func (this *Fixture) Finalize() {
if r := recover(); r != nil {
this.recoverPanic(r)
}
if this.t.Failed() || (this.verbose && this.log.Len() > 0) {
this.t.Log("\n" + strings.TrimSpace(this.log.String()) + "\n")
}
}
func (this *Fixture) recoverPanic(r interface{}) {
this.Println("PANIC:", r)
buffer := make([]byte, 1024*16)
runtime.Stack(buffer, false)
this.Println(strings.TrimSpace(string(buffer)))
this.t.Fail()
}
// assertion is a copy of github.com/smartystreets/assertions.assertion.
type assertion func(actual interface{}, expected ...interface{}) string | vendor/github.com/smartystreets/assertions/internal/unit/fixture.go | 0.54819 | 0.437944 | fixture.go | starcoder |
package cmd
import (
"fmt"
"github.com/aurumbot/flags"
"github.com/aurumbot/lib/dat"
f "github.com/aurumbot/lib/foundation"
dsg "github.com/bwmarrin/discordgo"
"strings"
)
func init() {
Cmd["botcfg"] = &f.Command{
Name: "Bot configuration tool",
Help: `Info: Allows administrators to set lower level configuration values for the bot.
Options:
**prefix <-s> <prefix>** : set the default prefix for the bot
**admins <-a|-r|-l> [role ID...]** : add or remove a botadmin role, which gives users with the role authorization to all bot abilities. Multiple items can be modified, separated by spaces.
**blchans <-a|-r|-l> [channelID]** : add or remove a channel to the blacklist. Blacklisted channels will never have the bot respond to commands (overwritted by admin permissions). Multiple items can be modified, separated by spaces.
**blroles <-a|-r|-l> [roleID]** : add or remove a role to the blacklist. Users with blacklisted roles will never have the bot respond to their commands. (overwritten by admin permissions) Multiple items can be modified, separated by spaces.
**Usage : ` + f.Config.Prefix + `botcfg <flag> <value [args...]>
` + f.Config.Prefix + `botcfg admins -a 452901410065874954 485528736276414505
Powered by Aurum at https://github.com/aurumbot/core`,
Perms: dsg.PermissionAdministrator,
Version: "v1.0.0β",
Action: botcfg,
}
}
func botcfg(session *dsg.Session, message *dsg.Message) {
if len(strings.Split(message.Content, " ")) <= 1 {
session.ChannelMessageSend(message.ChannelID, fmt.Sprintf("You need to provide a valid operator. Please use `%vhelp botcfg` for info.", f.Config.Prefix))
return
}
flagsParsed := flags.Parse(message.Content)
for i := range flagsParsed {
if flagsParsed[i].Name == "--unflagged" {
switch flagsParsed[i].Value {
case "prefix":
session.ChannelMessageSend(message.ChannelID, prefix(flagsParsed))
case "admins":
session.ChannelMessageSend(message.ChannelID, admins(flagsParsed))
case "blchans":
session.ChannelMessageSend(message.ChannelID, blchans(flagsParsed))
case "blroles":
session.ChannelMessageSend(message.ChannelID, blroles(flagsParsed))
default:
session.ChannelMessageSend(message.ChannelID, fmt.Sprintf("You need to provide a valid operator. Please use `%vhelp botcfg` for info.", f.Config.Prefix))
}
}
}
}
func prefix(flgs []*flags.Flag) string {
for i := range flgs {
switch true {
case flgs[i].Name == "-s":
f.Config.Prefix = flgs[i].Value
if err := dat.Save("aurum/preferences.json", f.Config); err != nil {
dat.Log.Println(err)
return fmt.Sprintf("Encountered error trying to save changes:\n```%v```", err)
}
return fmt.Sprintf("Prefix successfully changed to **%v**.", f.Config.Prefix)
}
}
return fmt.Sprintf("Unable to change prefix. Are you using correct syntax?")
}
func admins(flgs []*flags.Flag) string {
for i := range flgs {
switch true {
case flgs[i].Name == "-a":
for _, role := range strings.Split(flgs[i].Value, " ") {
f.Config.Admins = append(f.Config.Admins, role)
}
if err := dat.Save("aurum/preferences.json", f.Config); err != nil {
dat.Log.Println(err)
return fmt.Sprintf("Encountered error trying to save changes:\n```%v```", err)
}
return fmt.Sprintf("Successfully added admin roles.")
case flgs[i].Name == "-r":
for k := range f.Config.Admins {
for _, role := range strings.Split(flgs[i].Value, " ") {
if f.Config.Admins[k] == role {
f.Config.Admins[k] = f.Config.Admins[len(f.Config.Admins)-1]
f.Config.Admins[len(f.Config.Admins)-1] = ""
f.Config.Admins = f.Config.Admins[:len(f.Config.Admins)-1]
break
}
}
}
if err := dat.Save("aurum/preferences.json", f.Config); err != nil {
dat.Log.Println(err)
return fmt.Sprintf("Encountered error trying to save changes:\n```%v```", err)
}
return fmt.Sprintf("Successfully removed given admin roles.")
case flgs[i].Name == "-l":
msg := "**Administrator Role IDs:\n"
for _, role := range f.Config.Admins {
msg += fmt.Sprintf("\n- %v", role)
}
return msg
}
}
return fmt.Sprintf("Unable to complete task. Are you using correct syntax?")
}
func blroles(flgs []*flags.Flag) string {
return flgs[0].Name
}
func blchans(flgs []*flags.Flag) string {
return flgs[0].Name
} | handler/botcfg.go | 0.573201 | 0.410225 | botcfg.go | starcoder |
package model
// TypeExpr provides a type name that may be rewritten to use a package name.
import (
"fmt"
"go/ast"
)
type TypeExpr struct {
Expr string // The unqualified type expression, e.g. "[]*MyType"
PkgName string // The default package idenifier
pkgIndex int // The index where the package identifier should be inserted.
Valid bool
}
// Returns a new type from the data
func NewTypeExprFromData(expr, pkgName string, pkgIndex int, valid bool) TypeExpr {
return TypeExpr{expr, pkgName, pkgIndex, valid}
}
// NewTypeExpr returns the syntactic expression for referencing this type in Go.
func NewTypeExprFromAst(pkgName string, expr ast.Expr) TypeExpr {
error := ""
switch t := expr.(type) {
case *ast.Ident:
if IsBuiltinType(t.Name) {
pkgName = ""
}
return TypeExpr{t.Name, pkgName, 0, true}
case *ast.SelectorExpr:
e := NewTypeExprFromAst(pkgName, t.X)
return NewTypeExprFromData(t.Sel.Name, e.Expr, 0, e.Valid)
case *ast.StarExpr:
e := NewTypeExprFromAst(pkgName, t.X)
return NewTypeExprFromData("*"+e.Expr, e.PkgName, e.pkgIndex+1, e.Valid)
case *ast.ArrayType:
e := NewTypeExprFromAst(pkgName, t.Elt)
return NewTypeExprFromData("[]"+e.Expr, e.PkgName, e.pkgIndex+2, e.Valid)
case *ast.MapType:
if identKey, ok := t.Key.(*ast.Ident); ok && IsBuiltinType(identKey.Name) {
e := NewTypeExprFromAst(pkgName, t.Value)
return NewTypeExprFromData("map["+identKey.Name+"]"+e.Expr, e.PkgName, e.pkgIndex+len("map["+identKey.Name+"]"), e.Valid)
}
error = fmt.Sprintf("Failed to generate name for Map field :%v. Make sure the field name is valid.", t.Key)
case *ast.Ellipsis:
e := NewTypeExprFromAst(pkgName, t.Elt)
return NewTypeExprFromData("[]"+e.Expr, e.PkgName, e.pkgIndex+2, e.Valid)
default:
error = fmt.Sprintf("Failed to generate name for field: %v Package: %v. Make sure the field name is valid.", expr, pkgName)
}
return NewTypeExprFromData(error, "", 0, false)
}
// TypeName returns the fully-qualified type name for this expression.
// The caller may optionally specify a package name to override the default.
func (e TypeExpr) TypeName(pkgOverride string) string {
pkgName := FirstNonEmpty(pkgOverride, e.PkgName)
if pkgName == "" {
return e.Expr
}
return e.Expr[:e.pkgIndex] + pkgName + "." + e.Expr[e.pkgIndex:]
}
var builtInTypes = map[string]struct{}{
"bool": {},
"byte": {},
"complex128": {},
"complex64": {},
"error": {},
"float32": {},
"float64": {},
"int": {},
"int16": {},
"int32": {},
"int64": {},
"int8": {},
"rune": {},
"string": {},
"uint": {},
"uint16": {},
"uint32": {},
"uint64": {},
"uint8": {},
"uintptr": {},
}
// IsBuiltinType checks the given type is built-in types of Go
func IsBuiltinType(name string) bool {
_, ok := builtInTypes[name]
return ok
}
// Returns the first non empty string from a list of arguements
func FirstNonEmpty(strs ...string) string {
for _, str := range strs {
if len(str) > 0 {
return str
}
}
return ""
} | model/type_expr.go | 0.649023 | 0.437343 | type_expr.go | starcoder |
package privacy
import (
"crypto/elliptic"
"encoding/json"
"math/big"
"errors"
"github.com/incognitochain/incognito-chain/common"
"github.com/incognitochain/incognito-chain/common/base58"
)
var InvalidXCoordErr = errors.New("X is not an abscissa of a point on the elliptic curve")
var IsNotAnEllipticPointErr = errors.New("the point is not an elliptic point on P256 curve")
// The NIST curve P-256 will be used in the whole protocol
// https://csrc.nist.gov/publications/detail/fips/186/3/archive/2009-06-25
var Curve = elliptic.P256()
// EllipticPoint represents a point (X, y) on the elliptic curve
type EllipticPoint struct {
x, y *big.Int
}
func (ellipticPoint EllipticPoint) GetX() *big.Int {
return ellipticPoint.x
}
func (ellipticPoint EllipticPoint) GetY() *big.Int {
return ellipticPoint.y
}
// Zero returns the elliptic point (0, 0)
func (point *EllipticPoint) Zero() {
point.x = big.NewInt(0)
point.y = big.NewInt(0)
}
// UnmarshalJSON (EllipticPoint) receives bytes array of elli[tic point (it was be MarshalJSON before),
// json.Unmarshal the bytes array to string
// base58 check decode that string to bytes array
// and decompress the bytes array to elliptic point
func (point *EllipticPoint) UnmarshalJSON(data []byte) error {
dataStr := ""
_ = json.Unmarshal(data, &dataStr)
temp, _, err := base58.Base58Check{}.Decode(dataStr)
if err != nil {
return err
}
point.Decompress(temp)
return nil
}
// MarshalJSON (EllipticPoint) compresses elliptic point to bytes array,
// base58 check encode that bytes array into string
// json.Marshal the string
func (point EllipticPoint) MarshalJSON() ([]byte, error) {
data := point.Compress()
temp := base58.Base58Check{}.Encode(data, common.ZeroByte)
return json.Marshal(temp)
}
// ComputeYCoord returns Y-coordinate from X-coordinate
func (point *EllipticPoint) computeYCoord() error {
// Y = +-sqrt(x^3 - 3*x + B)
xCube := new(big.Int).Exp(point.x, big.NewInt(3), Curve.Params().P)
xCube.Add(xCube, Curve.Params().B)
xCube.Sub(xCube, new(big.Int).Mul(point.x, big.NewInt(3)))
xCube.Mod(xCube, Curve.Params().P)
// compute sqrt(x^3 - 3*x + B) mod p
// https://bitcointalk.org/index.php?topic=162805.msg1712294#msg1712294
tmpY := new(big.Int).Exp(xCube, padd1Div4(Curve.Params().P), Curve.Params().P)
// check if y is a square root of x^3 - 3*x + B.
ySquared := new(big.Int).Mul(tmpY, tmpY)
ySquared.Mod(ySquared, Curve.Params().P)
// check if (X, Y) is a point on the curve
if ySquared.Cmp(xCube) != 0 {
return InvalidXCoordErr
}
point.y = tmpY
return nil
}
// Inverse returns the inverse point of an input elliptic point
func (point EllipticPoint) inverse() (*EllipticPoint, error) {
// check if point is on the curve
if !Curve.IsOnCurve(point.x, point.y) {
return nil, IsNotAnEllipticPointErr
}
resPoint := new(EllipticPoint)
resPoint.Zero()
// the inverse of the point (x, y) mod P is the point (x, -y) mod P
resPoint.x.Set(point.x)
resPoint.y.Sub(Curve.Params().P, point.y)
resPoint.y.Mod(resPoint.y, Curve.Params().P)
return resPoint, nil
}
// Randomize generates a random elliptic point on P256 curve
// the elliptic point must be not a double point (which is the point has order is two)
func (point *EllipticPoint) randomize() {
for {
point.x = RandScalar()
err := point.computeYCoord()
if (err == nil) && (point.IsSafe()) {
break
}
}
}
// IsSafe returns true if an input elliptic point is on the curve and has order not equal to 2
func (point EllipticPoint) IsSafe() bool {
if !Curve.IsOnCurve(point.x, point.y) {
return false
}
var doublePoint EllipticPoint
doublePoint.x, doublePoint.y = Curve.Double(point.x, point.y)
zero := new(EllipticPoint)
zero.Zero()
return !doublePoint.IsEqual(zero)
}
// Compress compresses point from 64 bytes to CompressedPointSize bytes (33 bytes)
func (point EllipticPoint) Compress() []byte {
if Curve.IsOnCurve(point.x, point.y) {
b := make([]byte, 0, CompressedEllipticPointSize)
format := pointCompressed
if isOdd(point.y) {
format |= 0x1
}
b = append(b, format)
return paddedAppend(common.BigIntSize, b, point.x.Bytes())
}
return nil
}
// Decompress decompresses a byte array, which was created by CompressPoint func,
// to a point on the given curve.
func (point *EllipticPoint) Decompress(compressPointBytes []byte) error {
format := compressPointBytes[0]
yBit := (format & 0x1) == 0x1
format &= ^byte(0x1)
if format != pointCompressed {
return errors.New("invalid magic in compressed compressPoint bytes")
}
point.x = new(big.Int).SetBytes(compressPointBytes[1:33])
err := point.computeYCoord()
if err != nil {
return err
}
if yBit != isOdd(point.y) {
point.y.Sub(Curve.Params().P, point.y)
}
return nil
}
// Hash derives a new elliptic point from an elliptic point and an index using hash function
func (point EllipticPoint) Hash(index int64) *EllipticPoint {
res := new(EllipticPoint)
res.Zero()
tmp := common.AddPaddingBigInt(point.x, common.BigIntSize)
if index == 0 {
tmp = append(tmp, byte(0))
} else {
tmp = append(tmp, big.NewInt(index).Bytes()...)
}
for {
tmp = common.HashB(tmp)
res.x.SetBytes(tmp)
err := res.computeYCoord()
if (err == nil) && (res.IsSafe()) {
break
}
}
return res
}
// Set sets two coordinates to an elliptic point
func (point *EllipticPoint) Set(x, y *big.Int) {
if point.x == nil {
point.x = new(big.Int)
}
if point.y == nil {
point.y = new(big.Int)
}
point.x.Set(x)
point.y.Set(y)
}
// Add adds an elliptic point to another elliptic point
func (point EllipticPoint) Add(targetPoint *EllipticPoint) *EllipticPoint {
res := new(EllipticPoint)
res.x, res.y = Curve.Add(point.x, point.y, targetPoint.x, targetPoint.y)
return res
}
// Sub subtracts an elliptic point to another elliptic point
func (point EllipticPoint) Sub(targetPoint *EllipticPoint) (*EllipticPoint, error) {
invPoint, err := targetPoint.inverse()
if err != nil {
return nil, err
}
res := point.Add(invPoint)
return res, nil
}
// IsEqual returns true if two input elliptic points are equal, false otherwise
func (point EllipticPoint) IsEqual(p *EllipticPoint) bool {
return point.x.Cmp(p.x) == 0 && point.y.Cmp(p.y) == 0
}
// ScalarMult returns x*P for x in Z_N and P in E(Z_P)
func (point EllipticPoint) ScalarMult(factor *big.Int) *EllipticPoint {
res := new(EllipticPoint)
res.Zero()
res.x, res.y = Curve.ScalarMult(point.x, point.y, factor.Bytes())
return res
}
// Derive returns a pseudo-random elliptic curve point P = F(seed, derivator), where
// F is a pseudo-random function defined by F(x, y) = 1/(x + y)*G, where x, y are integers,
// seed and derivator are integers of size at least 32 bytes,
// G is a generating point of the group of points of an elliptic curve.
func (point EllipticPoint) Derive(seed, derivator *big.Int) *EllipticPoint {
// point must be on the curve
if !point.IsSafe() {
return nil
}
res := point.ScalarMult(new(big.Int).ModInverse(new(big.Int).Add(seed, derivator), Curve.Params().N))
return res
} | privacy/ec.go | 0.774839 | 0.505066 | ec.go | starcoder |
package rpc
import (
"fmt"
"math"
"time"
"github.com/ebay/beam/api"
"github.com/ebay/beam/logentry"
)
// KGObjectFromAPI returns a new KGObject instance that is equivilent to the
// supplied API representation of a KGObject. The rpc.KGObject encapsulates
// a binary encoding of the KGObject
func KGObjectFromAPI(from api.KGObject) KGObject {
if from.Value == nil {
return KGObject{}
}
switch t := from.Value.(type) {
case *api.KGObject_AString:
return AString(t.AString, from.LangID)
case *api.KGObject_AFloat64:
return AFloat64(t.AFloat64, from.UnitID)
case *api.KGObject_AInt64:
return AInt64(t.AInt64, from.UnitID)
case *api.KGObject_ATimestamp:
return ATimestamp(t.ATimestamp.Value, logentry.TimestampPrecision(t.ATimestamp.Precision), from.UnitID)
case *api.KGObject_ABool:
return ABool(t.ABool, from.UnitID)
case *api.KGObject_AKID:
return AKID(t.AKID)
default:
panic(fmt.Sprintf("KGObjectFromAPI encountered a KGObject with an unexpected type %T/%v", from.Value, from.Value))
}
}
// AString returns a new KGObject instance containing the supplied string and language ID.
func AString(s string, langID uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtString, 1+len(s)+20)
b.buff.WriteString(s)
b.buff.WriteByte(0)
appendUInt64(&b.buff, 19, langID)
return KGObject{b.buff.String()}
}
// AFloat64 returns a new KGObject instance containing the supplied float and Units ID.
func AFloat64(fv float64, unitID uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtFloat64, 1+19+8)
appendUInt64(&b.buff, 19, unitID)
u := math.Float64bits(fv)
if fv < 0 {
u = u ^ maskAllBits
} else {
u = u ^ maskMsbOnly
}
b.writeUInt64(u)
return KGObject{b.buff.String()}
}
// AInt64 returns a new KGObject instance containing the supplied int and Units ID.
func AInt64(v int64, unitID uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtInt64, 1+19+8)
appendUInt64(&b.buff, 19, unitID)
b.writeUInt64(uint64(v) ^ maskMsbOnly)
return KGObject{b.buff.String()}
}
// ATimestampY returns a new KGObject instance containing a Timestamp for the specified year and Units ID.
func ATimestampY(year int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC), logentry.Year, unitID)
}
// ATimestampYM returns a new KGObject instance containing a Timestamp for the specified year, month and Units ID.
func ATimestampYM(year int, month int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), 1, 0, 0, 0, 0, time.UTC), logentry.Month, unitID)
}
// ATimestampYMD returns a new KGObject instance containing a Timestamp for the specified year, month, day and Units ID.
func ATimestampYMD(year int, month int, day int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC), logentry.Day, unitID)
}
// ATimestampYMDH returns a new KGObject instance containing a Timestamp for the specified year, month, day, hour and Units ID.
func ATimestampYMDH(year, month, day, hour int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), day, hour, 0, 0, 0, time.UTC), logentry.Hour, unitID)
}
// ATimestampYMDHM returns a new KGObject instance containing a Timestamp for the specified year, month, day, hour, minutes and Units ID.
func ATimestampYMDHM(year, month, day, hour, minute int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), day, hour, minute, 0, 0, time.UTC), logentry.Minute, unitID)
}
// ATimestampYMDHMS returns a new KGObject instance containing a Timestamp for the specified year, month, day, hour, minutes, seconds and Units ID.
func ATimestampYMDHMS(year, month, day, hour, minute, second int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), day, hour, minute, second, 0, time.UTC), logentry.Second, unitID)
}
// ATimestampYMDHMSN returns a new KGObject instance containing a Timestamp for the specified year, month, day, hour, minutes, seconds, nanoseonds, and Units ID.
func ATimestampYMDHMSN(year, month, day, hour, minute, second, nsec int, unitID uint64) KGObject {
return ATimestamp(time.Date(year, time.Month(month), day, hour, minute, second, nsec, time.UTC), logentry.Nanosecond, unitID)
}
// ATimestamp returns a new KGObject instance containing a Timestamp for the supplied dateTime, precision and Units ID.
func ATimestamp(v time.Time, p logentry.TimestampPrecision, unitID uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtTimestamp, 1+19+12)
appendUInt64(&b.buff, 19, unitID)
t := v.UTC()
y, mo, d, h, mi, s, n := 0, time.January, 1, 0, 0, 0, 0
if p >= logentry.Year {
y = t.Year()
}
if p >= logentry.Month {
mo = t.Month()
}
if p >= logentry.Day {
d = t.Day()
}
if p >= logentry.Hour {
h = t.Hour()
}
if p >= logentry.Minute {
mi = t.Minute()
}
if p >= logentry.Second {
s = t.Second()
}
if p >= logentry.Nanosecond {
n = t.Nanosecond()
}
b.writeUInt16(y)
b.writeUInt8(int(mo))
b.writeUInt8(d)
b.writeUInt8(h)
b.writeUInt8(mi)
b.writeUInt8(s)
b.writeUInt32(n)
b.writeUInt8(int(p))
return KGObject{b.buff.String()}
}
// ABool returns an new KGObject intance containing a Boolean value and Units ID.
func ABool(v bool, unitID uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtBool, 1+19+1)
appendUInt64(&b.buff, 19, unitID)
if v {
b.buff.WriteByte(1)
} else {
b.buff.WriteByte(0)
}
return KGObject{b.buff.String()}
}
// AKID returns an new KGObject intance containing a KID value.
func AKID(kid uint64) KGObject {
b := new(kgObjectBuilder)
b.resetAndWriteType(KtKID, 1+8)
b.writeUInt64(kid)
return KGObject{b.buff.String()}
} | src/github.com/ebay/beam/rpc/kgobject_new.go | 0.79158 | 0.532425 | kgobject_new.go | starcoder |
package mmap
import (
"math"
"github.com/alexeymaximov/go-bio/segment"
"github.com/alexeymaximov/go-bio/transaction"
)
// MaxInt is the maximum platform dependent signed integer.
const MaxInt = int(^uint(0) >> 1)
// Mode is a mapping mode.
type Mode int
const (
// Share this mapping and allow the read-only access.
ModeReadOnly Mode = iota
// Share this mapping.
// Updates to the mapping are visible to other processes
// mapping the same region, and are carried through to the underlying file.
// To precisely control when updates are carried through to the underlying file
// requires the use of Mapping.Sync.
ModeReadWrite
// Create a private copy-on-write mapping.
// Updates to the mapping are not visible to other processes
// mapping the same region, and are not carried through to the underlying file.
// It is unspecified whether changes made to the file are visible in the mapped region.
ModeWriteCopy
)
// Flag is a mapping flag.
type Flag int
const (
// Mapped memory pages may be executed.
FlagExecutable Flag = 1 << iota
)
// generic is a cross-platform parts of a mapping.
type generic struct {
// writable specifies whether the mapped memory pages may be written.
writable bool
// executable specifies whether the mapped memory pages may be executed.
executable bool
// address specifies the pointer to the mapped memory.
address uintptr
// memory specifies the byte slice which wraps the mapped memory.
memory []byte
// segment specifies the lazily initialized data segment on top of the mapped memory.
segment *segment.Segment
}
// Writable returns true if the mapped memory pages may be written.
func (m *Mapping) Writable() bool {
return m.writable
}
// Executable returns true if the mapped memory pages may be executed.
func (m *Mapping) Executable() bool {
return m.executable
}
// Address returns the pointer to the mapped memory.
func (m *Mapping) Address() uintptr {
return m.address
}
// Length returns the mapped memory length in bytes.
func (m *Mapping) Length() uintptr {
return uintptr(len(m.memory))
}
// Memory returns the byte slice which wraps the mapped memory.
func (m *Mapping) Memory() []byte {
return m.memory
}
// Segment returns the data segment on top of the mapped memory.
func (m *Mapping) Segment() *segment.Segment {
if m.segment == nil {
m.segment = segment.New(0, m.memory)
}
return m.segment
}
// access checks given offset and length to match the available bounds
// and returns ErrOutOfBounds error at the access violation.
func (m *Mapping) access(offset int64, length int) error {
if offset < 0 || offset > math.MaxInt64-int64(length) || offset+int64(length) > int64(len(m.memory)) {
return ErrOutOfBounds
}
return nil
}
// ReadAt reads len(buf) bytes at the given offset from start of the mapped memory from the mapped memory.
// If the given offset is out of the available bounds or there are not enough bytes to read
// the ErrOutOfBounds error will be returned. Otherwise len(buf) will be returned with no errors.
// ReadAt implements the io.ReaderAt interface.
func (m *Mapping) ReadAt(buf []byte, offset int64) (int, error) {
if m.memory == nil {
return 0, ErrClosed
}
if err := m.access(offset, len(buf)); err != nil {
return 0, err
}
return copy(buf, m.memory[offset:]), nil
}
// WriteAt writes len(buf) bytes at the given offset from start of the mapped memory into the mapped memory.
// If the given offset is out of the available bounds or there are not enough space to write all given bytes
// the ErrOutOfBounds error will be returned. Otherwise len(buf) will be returned with no errors.
// WriteAt implements the io.WriterAt interface.
func (m *Mapping) WriteAt(buf []byte, offset int64) (int, error) {
if m.memory == nil {
return 0, ErrClosed
}
if !m.writable {
return 0, ErrReadOnly
}
if err := m.access(offset, len(buf)); err != nil {
return 0, err
}
return copy(m.memory[offset:], buf), nil
}
// Begin starts and returns a new transaction.
func (m *Mapping) Begin(offset int64, length uintptr) (*transaction.Tx, error) {
if m.memory == nil {
return nil, ErrClosed
}
if !m.writable {
return nil, ErrReadOnly
}
return transaction.Begin(m.memory, offset, length)
} | mmap/mmap.go | 0.76986 | 0.528168 | mmap.go | starcoder |
package types
// An object that defines a message that contains text formatted using Amazon
// Pinpoint Voice Instructions markup.
type CallInstructionsMessageType struct {
// The language to use when delivering the message. For a complete list of
// supported languages, see the Amazon Polly Developer Guide.
Text *string
}
// An object that contains information about an event destination that sends data
// to Amazon CloudWatch Logs.
type CloudWatchLogsDestination struct {
// The Amazon Resource Name (ARN) of an Amazon Identity and Access Management (IAM)
// role that is able to write event data to an Amazon CloudWatch destination.
IamRoleArn *string
// The name of the Amazon CloudWatch Log Group that you want to record events in.
LogGroupArn *string
}
// An object that defines an event destination.
type EventDestination struct {
// An object that contains information about an event destination that sends data
// to Amazon CloudWatch Logs.
CloudWatchLogsDestination *CloudWatchLogsDestination
// Indicates whether or not the event destination is enabled. If the event
// destination is enabled, then Amazon Pinpoint sends response data to the
// specified event destination.
Enabled bool
// An object that contains information about an event destination that sends data
// to Amazon Kinesis Data Firehose.
KinesisFirehoseDestination *KinesisFirehoseDestination
// An array of EventDestination objects. Each EventDestination object includes ARNs
// and other information that define an event destination.
MatchingEventTypes []EventType
// A name that identifies the event destination configuration.
Name *string
// An object that contains information about an event destination that sends data
// to Amazon SNS.
SnsDestination *SnsDestination
}
// An object that defines a single event destination.
type EventDestinationDefinition struct {
// An object that contains information about an event destination that sends data
// to Amazon CloudWatch Logs.
CloudWatchLogsDestination *CloudWatchLogsDestination
// Indicates whether or not the event destination is enabled. If the event
// destination is enabled, then Amazon Pinpoint sends response data to the
// specified event destination.
Enabled bool
// An object that contains information about an event destination that sends data
// to Amazon Kinesis Data Firehose.
KinesisFirehoseDestination *KinesisFirehoseDestination
// An array of EventDestination objects. Each EventDestination object includes ARNs
// and other information that define an event destination.
MatchingEventTypes []EventType
// An object that contains information about an event destination that sends data
// to Amazon SNS.
SnsDestination *SnsDestination
}
// An object that contains information about an event destination that sends data
// to Amazon Kinesis Data Firehose.
type KinesisFirehoseDestination struct {
// The Amazon Resource Name (ARN) of an IAM role that can write data to an Amazon
// Kinesis Data Firehose stream.
DeliveryStreamArn *string
// The Amazon Resource Name (ARN) of the Amazon Kinesis Data Firehose destination
// that you want to use in the event destination.
IamRoleArn *string
}
// An object that defines a message that contains unformatted text.
type PlainTextMessageType struct {
// The language to use when delivering the message. For a complete list of
// supported languages, see the Amazon Polly Developer Guide.
LanguageCode *string
// The plain (not SSML-formatted) text to deliver to the recipient.
Text *string
// The name of the voice that you want to use to deliver the message. For a
// complete list of supported voices, see the Amazon Polly Developer Guide.
VoiceId *string
}
// An object that contains information about an event destination that sends data
// to Amazon SNS.
type SnsDestination struct {
// The Amazon Resource Name (ARN) of the Amazon SNS topic that you want to publish
// events to.
TopicArn *string
}
// An object that defines a message that contains SSML-formatted text.
type SSMLMessageType struct {
// The language to use when delivering the message. For a complete list of
// supported languages, see the Amazon Polly Developer Guide.
LanguageCode *string
// The SSML-formatted text to deliver to the recipient.
Text *string
// The name of the voice that you want to use to deliver the message. For a
// complete list of supported voices, see the Amazon Polly Developer Guide.
VoiceId *string
}
// An object that contains a voice message and information about the recipient that
// you want to send it to.
type VoiceMessageContent struct {
// An object that defines a message that contains text formatted using Amazon
// Pinpoint Voice Instructions markup.
CallInstructionsMessage *CallInstructionsMessageType
// An object that defines a message that contains unformatted text.
PlainTextMessage *PlainTextMessageType
// An object that defines a message that contains SSML-formatted text.
SSMLMessage *SSMLMessageType
} | service/pinpointsmsvoice/types/types.go | 0.716119 | 0.565659 | types.go | starcoder |
package bitfield
import "encoding/hex"
// Bitfield provides operations for reading and manipulating bits in group of bytes.
type Bitfield struct {
b []byte
length uint32
}
// New creates a new empty Bitfield of length bits.
func New(length uint32) *Bitfield {
return &Bitfield{make([]byte, (length+7)/8), length}
}
// NewBytes returns a new Bitfield from bytes.
// Bytes in b are not copied. Unused bits in last byte are cleared.
// Panics if b is not big enough to hold "length" bits.
func NewBytes(b []byte, length uint32) *Bitfield {
nBytes, nLastBits := calcSize(length)
if uint32(len(b)) < nBytes {
panic("not enough bytes in slice for specified length")
}
if nLastBits != 0 {
b[len(b)-1] &= ^(0xff >> nLastBits)
}
return &Bitfield{b[:nBytes], length}
}
// calcSize calculates the number of bytes that is required to store length bits
// and the number of valid bits in last byte.
func calcSize(length uint32) (nBytes, nLastBits uint32) {
nBytes, nLastBits = divMod32(length, 8)
lastByteIncomplete := nLastBits != 0
if lastByteIncomplete {
nBytes++
}
return
}
// Bytes returns bytes in b. If you modify the returned slice the bits in b are modified too.
func (b *Bitfield) Bytes() []byte { return b.b }
// Len returns the number of bits as given to New.
func (b *Bitfield) Len() uint32 { return b.length }
// Hex returns bytes as string. If not all the bits in last byte are used, they encode as not set.
func (b *Bitfield) Hex() string { return hex.EncodeToString(b.b) }
// Set bit i. 0 is the most significant bit. Panics if i >= b.Len().
func (b *Bitfield) Set(i uint32) {
b.checkIndex(i)
div, mod := divMod32(i, 8)
b.b[div] |= 1 << (7 - mod)
}
// SetTo sets bit i to value. Panics if i >= b.Len().
func (b *Bitfield) SetTo(i uint32, value bool) {
b.checkIndex(i)
if value {
b.Set(i)
} else {
b.Clear(i)
}
}
// Clear bit i. 0 is the most significant bit. Panics if i >= b.Len().
func (b *Bitfield) Clear(i uint32) {
b.checkIndex(i)
div, mod := divMod32(i, 8)
b.b[div] &= ^(1 << (7 - mod))
}
// FirstSet returns the index of the first bit that is set starting from start.
func (b *Bitfield) FirstSet(start uint32) (uint32, bool) {
for i := start; i < b.length; i++ {
if b.Test(i) {
return i, true
}
}
return 0, false
}
// FirstClear returns the index of the first bit that is not set starting from start.
func (b *Bitfield) FirstClear(start uint32) (uint32, bool) {
for i := start; i < b.length; i++ {
if !b.Test(i) {
return i, true
}
}
return 0, false
}
// ClearAll clears all bits.
func (b *Bitfield) ClearAll() {
for i := range b.b {
b.b[i] = 0
}
}
// Test bit i. 0 is the most significant bit. Panics if i >= b.Len().
func (b *Bitfield) Test(i uint32) bool {
b.checkIndex(i)
div, mod := divMod32(i, 8)
return (b.b[div] & (1 << (7 - mod))) > 0
}
var countCache = [256]byte{
0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4,
1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5,
1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6,
3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
3, 4, 4, 5, 4, 5, 5, 6, 4, 5, 5, 6, 5, 6, 6, 7,
4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6, 7, 7, 8,
}
// Count returns the count of set bits.
func (b *Bitfield) Count() uint32 {
var total uint32
for _, v := range b.b {
total += uint32(countCache[v])
}
return total
}
// All returns true if all bits are set, false otherwise.
func (b *Bitfield) All() bool {
return b.Count() == b.length
}
func (b *Bitfield) checkIndex(i uint32) {
if i >= b.Len() {
panic("index out of bound")
}
}
func divMod32(a, b uint32) (uint32, uint32) { return a / b, a % b } | vendor/github.com/cenk/bitfield/bitfield.go | 0.794982 | 0.560373 | bitfield.go | starcoder |
package main
import (
"log"
"math/rand"
"time"
. "github.com/jakecoffman/cp"
"github.com/jakecoffman/cp/examples"
)
var (
tankBody, tankControlBody *Body
)
const (
width = 640
height = 480
hwidth = width / 2
hheight = height / 2
)
func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
rand.Seed(time.Now().Unix())
space := NewSpace()
//space.Iterations = 10
space.SleepTimeThreshold = 0.5
sides := []Vector{
{-hwidth, -hheight}, {-hwidth, hheight},
{hwidth, -hheight}, {hwidth, hheight},
{-hwidth, -hheight}, {hwidth, -hheight},
{-hwidth, hheight}, {hwidth, hheight},
}
for i := 0; i < len(sides); i += 2 {
var seg *Shape
seg = space.AddShape(NewSegment(space.StaticBody, sides[i], sides[i+1], 0))
seg.SetElasticity(1)
seg.SetFriction(1)
seg.SetFilter(examples.NotGrabbableFilter)
}
for i := 0; i < 50; i++ {
body := addBox(space, 20, 1)
pivot := space.AddConstraint(NewPivotJoint2(space.StaticBody, body, Vector{}, Vector{}))
pivot.SetMaxBias(0) // disable joint correction
pivot.SetMaxForce(1000.0) // emulate linear friction
gear := space.AddConstraint(NewGearJoint(space.StaticBody, body, 0.0, 1.0))
gear.SetMaxBias(0)
gear.SetMaxForce(5000.0) // emulate angular friction
}
// We joint the tank to the control body and control the tank indirectly by modifying the control body.
tankControlBody = space.AddBody(NewKinematicBody())
tankBody = addBox(space, 30, 10)
pivot := space.AddConstraint(NewPivotJoint2(tankControlBody, tankBody, Vector{}, Vector{}))
pivot.SetMaxBias(0)
pivot.SetMaxForce(10000)
gear := space.AddConstraint(NewGearJoint(tankControlBody, tankBody, 0.0, 1.0))
gear.SetErrorBias(0) // attempt to fully correct the joint each step
gear.SetMaxBias(1.2)
gear.SetMaxForce(50000)
examples.Main(space, 1.0/60.0, update, examples.DefaultDraw)
}
func addBox(space *Space, size, mass float64) *Body {
radius := (&Vector{size, size}).Length()
body := space.AddBody(NewBody(mass, MomentForBox(mass, size, size)))
body.SetPosition(Vector{rand.Float64()*(width-2*radius) - (hwidth - radius), rand.Float64()*(height-2*radius) - (hheight - radius)})
shape := space.AddShape(NewBox(body, size, size, 0))
shape.SetElasticity(0)
shape.SetFriction(0.7)
return body
}
func update(space *Space, dt float64) {
// turn the control body based on the angle relative to the actual body
mouseDelta := examples.Mouse.Sub(tankBody.Position())
turn := tankBody.Rotation().Unrotate(mouseDelta).ToAngle()
tankControlBody.SetAngle(tankBody.Angle() - turn)
// drive the tank towards the mouse
if examples.Mouse.Near(tankBody.Position(), 30.0) {
tankControlBody.SetVelocityVector(Vector{}) // stop
} else {
var direction float64
if mouseDelta.Dot(tankBody.Rotation()) > 0.0 {
direction = 1.0
} else {
direction = -1.0
}
tankControlBody.SetVelocityVector(tankBody.Rotation().Rotate(Vector{30.0 * direction, 0.0}))
}
space.Step(dt)
} | examples/tank/tank.go | 0.644784 | 0.451689 | tank.go | starcoder |
package operator
import (
"fmt"
"math"
"github.com/chewxy/math32"
"gorgonia.org/gorgonia/internal/exprgraph"
)
var (
/* float64 */
// non differentiable
absf64 = sf64UnaryOperator(math.Abs)
signf64 = sf64UnaryOperator(_signf64)
ceilf64 = sf64UnaryOperator(math.Ceil)
floorf64 = sf64UnaryOperator(math.Floor)
// differentiable
sinf64 = sf64UnaryOperator(math.Sin)
cosf64 = sf64UnaryOperator(math.Cos)
expf64 = sf64UnaryOperator(math.Exp)
lnf64 = sf64UnaryOperator(math.Log)
log2f64 = sf64UnaryOperator(math.Log2)
negf64 = sf64UnaryOperator(_negf64)
squaref64 = sf64UnaryOperator(_squaref64)
sqrtf64 = sf64UnaryOperator(math.Sqrt)
inversef64 = sf64UnaryOperator(_inversef64)
inverseSqrtf64 = sf64UnaryOperator(_inverseSqrtf64)
// activation functions
cubef64 = sf64UnaryOperator(_cubef64)
tanhf64 = sf64UnaryOperator(_tanhf64)
sigmoidf64 = sf64UnaryOperator(_sigmoidf64)
// numerical stabilization optimization
log1pf64 = sf64UnaryOperator(math.Log1p)
expm1f64 = sf64UnaryOperator(math.Expm1)
softplusf64 = sf64UnaryOperator(_softplusf64)
// softplus isn't necessarily only a numerical stabilization op
// (you can use it elsewhere), but I included it under numerical optimization
/* Float32 */
// non differentiable
absf32 = sf32UnaryOperator(math32.Abs)
signf32 = sf32UnaryOperator(_signf32)
ceilf32 = sf32UnaryOperator(math32.Ceil)
floorf32 = sf32UnaryOperator(math32.Floor)
// start differentiable
sinf32 = sf32UnaryOperator(math32.Sin)
cosf32 = sf32UnaryOperator(math32.Cos)
expf32 = sf32UnaryOperator(math32.Exp)
lnf32 = sf32UnaryOperator(math32.Log)
log2f32 = sf32UnaryOperator(math32.Log2)
negf32 = sf32UnaryOperator(_negf32)
squaref32 = sf32UnaryOperator(_squaref32)
sqrtf32 = sf32UnaryOperator(math32.Sqrt)
inversef32 = sf32UnaryOperator(_inversef32)
inverseSqrtf32 = sf32UnaryOperator(_inverseSqrtf32)
// typically used in activation functions
cubef32 = sf32UnaryOperator(_cubef32)
tanhf32 = sf32UnaryOperator(_tanhf32)
sigmoidf32 = sf32UnaryOperator(_sigmoidf32)
// numerical stabilization optimization
log1pf32 = sf32UnaryOperator(math32.Log1p)
expm1f32 = sf32UnaryOperator(math32.Expm1)
softplusf32 = sf32UnaryOperator(_softplusf32)
)
type ʘUnaryOperatorType byte
const (
absOpType ʘUnaryOperatorType = iota
signOpType
ceilOpType
floorOpType
// start differentiable
sinOpType
cosOpType
expOpType
lnOpType
log2OpType
negOpType
squareOpType
sqrtOpType
inverseOpType // multiplicative inverse
inverseSqrtOpType // 1/sqrt(x)
// typically used in activation functions
cubeOpType
tanhOpType
sigmoidOpType
// optimization related
log1pOpType
expm1OpType
softplusOpType
maxʘUnaryOperator // delimits end of all possible unary ops
)
func (u ʘUnaryOperatorType) String() string {
if u >= maxʘUnaryOperator {
return fmt.Sprintf("UNSUPPORTED UNARY OPERATOR (%d); max: %d", u, maxʘUnaryOperator)
}
return ʘUnaryOpStrs[u]
}
// ʘUnaryOpStrs is the string representation for a unaryOpType
// It should be held constant.
var ʘUnaryOpStrs = [maxʘUnaryOperator]string{
"abs", "sign", "ceil", "floor",
"sin", "cos", "exp",
"ln", "log2", "neg", "square", "sqrt",
"inv", "invSqrt",
"cube", "tanh", "sigmoid",
"log1p", "expm1", "softplus",
}
// ʘUnaryOpDifferentiable is the array of whether a unary operator is differentiable
// It should be held constant
var ʘUnaryOpDifferentiable = [maxʘUnaryOperator]bool{
true, false, false, false,
true, true, true,
true, true, true, true, true,
true, true,
true, true, true,
true, true, true,
}
var ʘUnaryOpDiffExprs = [maxʘUnaryOperator]func(x, y, gradY *exprgraph.Node) (*exprgraph.Node, error){
absDiffExpr, nondiffUnaryOpExpr, nondiffUnaryOpExpr, nondiffUnaryOpExpr,
sinDiffExpr, cosDiffExpr, expDiffExpr,
lnDiffExpr, log2DiffExpr, negDiffExpr, squareDiffExpr, sqrtDiffExpr,
inverseDiffExpr, inverseSqrtDiffExpr, cubeDiffExpr, tanhDiffExpr, sigmoidDiffExpr,
log1pDiffExpr, expm1DiffExpr, softplusDiffExpr,
}
var ʘUnaryOpDiffFns = [maxʘUnaryOperator]func(x, y *exprgraph.Node) error{
absDiff, nondiffUnaryOp, nondiffUnaryOp, nondiffUnaryOp,
sinDiff, cosDiff, expDiff,
lnDiff, log2Diff, negDiff, squareDiff, sqrtDiff,
inverseDiff, inverseSqrtDiff, cubeDiff, tanhDiff, sigmoidDiff,
log1pDiff, expm1Diff, softplusDiff,
}
var sf64UnaryOperators = [maxʘUnaryOperator]*sf64UnaryOperator{
&absf64,
&signf64,
&ceilf64,
&floorf64,
&sinf64,
&cosf64,
&expf64,
&lnf64,
&log2f64,
&negf64,
&squaref64,
&sqrtf64,
&inversef64,
&inverseSqrtf64,
&cubef64,
&tanhf64,
&sigmoidf64,
&log1pf64,
&expm1f64,
&softplusf64,
}
var sf32UnaryOperators = [maxʘUnaryOperator]*sf32UnaryOperator{
&absf32,
&signf32,
&ceilf32,
&floorf32,
&sinf32,
&cosf32,
&expf32,
&lnf32,
&log2f32,
&negf32,
&squaref32,
&sqrtf32,
&inversef32,
&inverseSqrtf32,
&cubef32,
&tanhf32,
&sigmoidf32,
&log1pf32,
&expm1f32,
&softplusf32,
} | internal/op/operator/operatorPointwise_unary_const.go | 0.539226 | 0.435481 | operatorPointwise_unary_const.go | starcoder |
package main
import (
"fmt"
"log"
torch "github.com/wangkuiyi/gotorch"
nn "github.com/wangkuiyi/gotorch/nn"
F "github.com/wangkuiyi/gotorch/nn/functional"
"github.com/wangkuiyi/gotorch/nn/initializer"
"github.com/wangkuiyi/gotorch/vision/datasets"
"github.com/wangkuiyi/gotorch/vision/transforms"
)
var device torch.Device
func generator(nz int64) *nn.SequentialModule {
return nn.Sequential(
nn.ConvTranspose2d(nz, 256, 4, 1, 0, 0, 1, false, 1, "zero"),
nn.BatchNorm2d(256, 1e-5, 0.1, true, true),
nn.Functional(torch.Relu),
nn.ConvTranspose2d(256, 128, 3, 2, 1, 0, 1, false, 1, "zero"),
nn.BatchNorm2d(128, 1e-5, 0.1, true, true),
nn.Functional(torch.Relu),
nn.ConvTranspose2d(128, 64, 4, 2, 1, 0, 1, false, 1, "zero"),
nn.BatchNorm2d(64, 1e-5, 0.1, true, true),
nn.Functional(torch.Relu),
nn.ConvTranspose2d(64, 1, 4, 2, 1, 0, 1, false, 1, "zero"),
nn.Functional(torch.Tanh),
)
}
func discriminator() *nn.SequentialModule {
return nn.Sequential(
nn.Conv2d(1, 64, 4, 2, 1, 1, 1, false, "zeros"),
nn.Functional(func(in torch.Tensor) torch.Tensor { return torch.LeakyRelu(in, 0.2) }),
nn.Conv2d(64, 128, 4, 2, 1, 1, 1, false, "zeros"),
nn.BatchNorm2d(128, 1e-5, 0.1, true, true),
nn.Functional(func(in torch.Tensor) torch.Tensor { return torch.LeakyRelu(in, 0.2) }),
nn.Conv2d(128, 256, 4, 2, 1, 1, 1, false, "zeros"),
nn.BatchNorm2d(256, 1e-5, 0.1, true, true),
nn.Functional(func(in torch.Tensor) torch.Tensor { return torch.LeakyRelu(in, 0.2) }),
nn.Conv2d(256, 1, 3, 1, 0, 1, 1, false, "zeros"),
nn.Functional(torch.Sigmoid),
)
}
func main() {
if torch.IsCUDAAvailable() {
log.Println("CUDA is valid")
device = torch.NewDevice("cuda")
} else {
log.Println("No CUDA found; CPU only")
device = torch.NewDevice("cpu")
}
mnist := datasets.MNIST("",
[]transforms.Transform{transforms.Normalize(0.5, 0.5)})
nz := int64(100)
lr := 0.0002
netG := generator(nz)
netG.To(device)
netD := discriminator()
netD.To(device)
optimizerD := torch.Adam(lr, 0.5, 0.5, 0.0)
optimizerD.AddParameters(netD.Parameters())
optimizerG := torch.Adam(lr, 0.5, 0.5, 0.0)
optimizerG.AddParameters(netG.Parameters())
epochs := 30
checkpointStep := 1000
checkpointCount := 1
batchSize := int64(64)
i := 0
for epoch := 0; epoch < epochs; epoch++ {
trainLoader := datasets.NewMNISTLoader(mnist, batchSize)
for trainLoader.Scan() {
// (1) update D network
// train with real
optimizerD.ZeroGrad()
batch := trainLoader.Batch()
data := batch.Data.CopyTo(device)
label := torch.Empty([]int64{batch.Data.Shape()[0]}, false).CopyTo(device)
initializer.Uniform(&label, 0.8, 1.0)
output := netD.Forward(data).(torch.Tensor).View([]int64{-1, 1}).Squeeze(1)
errDReal := F.BinaryCrossEntropy(output, label, torch.Tensor{}, "mean")
errDReal.Backward()
// train with fake
noise := torch.RandN([]int64{batch.Data.Shape()[0], nz, 1, 1}, false).CopyTo(device)
fake := netG.Forward(noise).(torch.Tensor)
initializer.Zeros(&label)
output = netD.Forward(fake.Detach()).(torch.Tensor).View([]int64{-1, 1}).Squeeze(1)
errDFake := F.BinaryCrossEntropy(output, label, torch.Tensor{}, "mean")
errDFake.Backward()
errD := errDReal.Item() + errDFake.Item()
optimizerD.Step()
// (2) update G network
optimizerG.ZeroGrad()
initializer.Ones(&label)
output = netD.Forward(fake).(torch.Tensor).View([]int64{-1, 1}).Squeeze(1)
errG := F.BinaryCrossEntropy(output, label, torch.Tensor{}, "mean")
errG.Backward()
optimizerG.Step()
fmt.Printf("[%d/%d][%d] D_Loss: %f G_Loss: %f\n",
epoch, epochs, i, errD, errG.Item())
if i%checkpointStep == 0 {
samples := netG.Forward(torch.RandN([]int64{10, nz, 1, 1}, false).CopyTo(device)).(torch.Tensor)
ckName := fmt.Sprintf("dcgan-sample-%d.pt", checkpointCount)
samples.Detach().Save(ckName)
checkpointCount++
}
i++
}
trainLoader.Close()
}
mnist.Close()
torch.FinishGC()
} | example/dcgan/dcgan.go | 0.756717 | 0.416263 | dcgan.go | starcoder |
package utils
import (
"bytes"
"encoding/gob"
"fmt"
"github.com/pkg/errors"
"strings"
)
// DefaultWidth defaults to a default screen dumps size
const DefaultWidth = 51
// boxLineOverheat Overheat per line when drawing boxes
const boxLineOverheat = 1 + 1
// Dump dumps a 46 char wide hex string
func BoxedDump(name string, data []byte) string {
// we substract the 2 lines at the side
dumpWidth := DefaultWidth - boxLineOverheat
return string(BoxString(name, DumpFixedWidth(data, dumpWidth), DefaultWidth))
}
// Dump dumps a 46 char wide hex string
func Dump(data []byte) string {
return DumpFixedWidth(data, DefaultWidth)
}
// BoxedDumpFixedWidth dumps a hex into a beautiful box
func BoxedDumpFixedWidth(name string, data []byte, charWidth int) string {
// we substract the 2 lines at the side
dumpWidth := charWidth - 1 - 1
return string(BoxString(name, DumpFixedWidth(data, dumpWidth), charWidth))
}
// DumpAnything dumps anything as hex
func DumpAnything(anything interface{}) string {
convertedBytes, err := toBytes(anything)
if err != nil {
panic(err)
}
return DumpFixedWidth(convertedBytes, 1)
}
// DumpFixedWidth dumps hex as hex string. Min width of string returned is 18 up to supplied charWidth
func DumpFixedWidth(data []byte, charWidth int) string {
if charWidth <= 0 {
panic("charWidth needs to be greater than 0")
}
hexString := ""
// 3 digits index plus one blank
const indexWidth = 3 + 1
// 2 hex digits + 2 blanks
const byteWidth = 2 + 2
// strings get quoate by 2 chars
const stringRenderOverheat = 2
const minWidth = indexWidth + byteWidth + stringRenderOverheat + 1
if charWidth < minWidth {
charWidth = minWidth + 6
}
// Formulary to calculate max bytes per row...
maxBytesPerRow := ((charWidth - indexWidth - stringRenderOverheat) / (byteWidth + 1)) - 1
for byteIndex, rowIndex := 0, 0; byteIndex < len(data); byteIndex, rowIndex = byteIndex+maxBytesPerRow, rowIndex+1 {
hexString += fmt.Sprintf("%03d 0x: ", byteIndex)
for columnIndex := 0; columnIndex < maxBytesPerRow; columnIndex++ {
absoluteIndex := byteIndex + columnIndex
if absoluteIndex < len(data) {
hexString += fmt.Sprintf("%02x ", data[absoluteIndex])
} else {
// align with empty byte representation
hexString += strings.Repeat(" ", byteWidth)
}
}
endIndex := byteIndex + maxBytesPerRow
if endIndex >= len(data) {
endIndex = len(data)
}
stringRepresentation := maskString(data[byteIndex:endIndex])
if len([]rune(stringRepresentation)) < maxBytesPerRow {
stringRepresentation += strings.Repeat(" ", (maxBytesPerRow-len([]rune(stringRepresentation)))%maxBytesPerRow)
}
hexString += fmt.Sprintf("'%s'\n", stringRepresentation)
}
// remove last newline
return hexString[:len(hexString)-1]
}
func maskString(data []byte) string {
for i := range data {
switch {
case data[i] < 32:
fallthrough
case data[i] > 126:
data[i] = '.'
}
}
return string(data)
}
func toBytes(anything interface{}) ([]byte, error) {
var buffer bytes.Buffer
err := gob.NewEncoder(&buffer).Encode(anything)
if err != nil {
return nil, errors.Wrap(err, "error encoding datatype")
}
return buffer.Bytes(), nil
} | plc4go/internal/plc4go/spi/utils/hex.go | 0.603114 | 0.443239 | hex.go | starcoder |
package pgsql
import (
"database/sql"
"database/sql/driver"
)
// HStoreArrayFromStringMapSlice returns a driver.Valuer that produces a PostgreSQL hstore[] from the given Go []map[string]string.
func HStoreArrayFromStringMapSlice(val []map[string]string) driver.Valuer {
return hstoreArrayFromStringMapSlice{val: val}
}
// HStoreArrayToStringMapSlice returns an sql.Scanner that converts a PostgreSQL hstore[] into a Go []map[string]string and sets it to val.
func HStoreArrayToStringMapSlice(val *[]map[string]string) sql.Scanner {
return hstoreArrayToStringMapSlice{val: val}
}
// HStoreArrayFromStringPtrMapSlice returns a driver.Valuer that produces a PostgreSQL hstore[] from the given Go []map[string]*string.
func HStoreArrayFromStringPtrMapSlice(val []map[string]*string) driver.Valuer {
return hstoreArrayFromStringPtrMapSlice{val: val}
}
// HStoreArrayToStringPtrMapSlice returns an sql.Scanner that converts a PostgreSQL hstore[] into a Go []map[string]*string and sets it to val.
func HStoreArrayToStringPtrMapSlice(val *[]map[string]*string) sql.Scanner {
return hstoreArrayToStringPtrMapSlice{val: val}
}
// HStoreArrayFromNullStringMapSlice returns a driver.Valuer that produces a PostgreSQL hstore[] from the given Go []map[string]sql.NullString.
func HStoreArrayFromNullStringMapSlice(val []map[string]sql.NullString) driver.Valuer {
return hstoreArrayFromNullStringMapSlice{val: val}
}
// HStoreArrayToNullStringMapSlice returns an sql.Scanner that converts a PostgreSQL hstore[] into a Go []map[string]sql.NullString and sets it to val.
func HStoreArrayToNullStringMapSlice(val *[]map[string]sql.NullString) sql.Scanner {
return hstoreArrayToNullStringMapSlice{val: val}
}
type hstoreArrayFromStringMapSlice struct {
val []map[string]string
}
func (v hstoreArrayFromStringMapSlice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
out := []byte{'{'}
for _, m := range v.val {
if m == nil {
out = append(out, 'N', 'U', 'L', 'L', ',')
continue
}
i, size := 0, 0
pairs := make([][]byte, len(m))
for key, val := range m {
// len(`\"\"=>\"\"`) == 10
pair := make([]byte, 0, 10+len(key)+len(val))
pair = pgAppendQuote2(pair, []byte(key))
pair = append(pair, '=', '>')
pair = pgAppendQuote2(pair, []byte(val))
pairs[i] = pair
i += 1
size += len(pair) + 1
}
var hstore []byte
if size == 0 {
hstore = []byte{'"', '"'}
} else {
hstore = make([]byte, 1, size+1)
hstore[0] = '"'
for _, pair := range pairs {
hstore = append(hstore, pair...)
hstore = append(hstore, ',')
}
hstore[len(hstore)-1] = '"'
}
out = append(out, hstore...)
out = append(out, ',')
}
out[len(out)-1] = '}' // replace last "," with "}"
return out, nil
}
type hstoreArrayToStringMapSlice struct {
val *[]map[string]string
}
func (v hstoreArrayToStringMapSlice) Scan(src interface{}) error {
data, err := srcbytes(src)
if err != nil {
return err
} else if data == nil {
return nil
}
elems := pgparsehstorearr(data)
hashes := make([]map[string]string, len(elems))
for i := 0; i < len(elems); i++ {
pairs := elems[i]
if pairs == nil {
continue
}
hash := make(map[string]string)
for j := 0; j < len(pairs); j++ {
if value := pairs[j][1]; value != nil {
hash[string(pairs[j][0])] = string(value)
} else {
hash[string(pairs[j][0])] = ""
}
}
hashes[i] = hash
}
*v.val = hashes
return nil
}
type hstoreArrayFromStringPtrMapSlice struct {
val []map[string]*string
}
func (v hstoreArrayFromStringPtrMapSlice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
out := []byte{'{'}
for _, m := range v.val {
if m == nil {
out = append(out, 'N', 'U', 'L', 'L', ',')
continue
}
i, size := 0, 0
pairs := make([][]byte, len(m))
for key, val := range m {
if val == nil {
// len(`\"\"=>NULL`) == 10
pair := make([]byte, 0, 10+len(key))
pair = pgAppendQuote2(pair, []byte(key))
pair = append(pair, '=', '>', 'N', 'U', 'L', 'L')
pairs[i] = pair
i += 1
size += len(pair) + 1
continue
}
// len(`\"\"=>\"\"`) == 10
pair := make([]byte, 0, 10+len(key)+len(*val))
pair = pgAppendQuote2(pair, []byte(key))
pair = append(pair, '=', '>')
pair = pgAppendQuote2(pair, []byte(*val))
pairs[i] = pair
i += 1
size += len(pair) + 1
}
var hstore []byte
if size == 0 {
hstore = []byte{'"', '"'}
} else {
hstore = make([]byte, 1, size+1)
hstore[0] = '"'
for _, pair := range pairs {
hstore = append(hstore, pair...)
hstore = append(hstore, ',')
}
hstore[len(hstore)-1] = '"'
}
out = append(out, hstore...)
out = append(out, ',')
}
out[len(out)-1] = '}' // replace last "," with "}"
return out, nil
}
type hstoreArrayToStringPtrMapSlice struct {
val *[]map[string]*string
}
func (v hstoreArrayToStringPtrMapSlice) Scan(src interface{}) error {
data, err := srcbytes(src)
if err != nil {
return err
} else if data == nil {
return nil
}
elems := pgparsehstorearr(data)
hashes := make([]map[string]*string, len(elems))
for i := 0; i < len(elems); i++ {
pairs := elems[i]
if pairs == nil {
continue
}
hash := make(map[string]*string)
for j := 0; j < len(pairs); j++ {
if value := pairs[j][1]; value != nil {
str := string(value)
hash[string(pairs[j][0])] = &str
} else {
hash[string(pairs[j][0])] = nil
}
}
hashes[i] = hash
}
*v.val = hashes
return nil
}
type hstoreArrayFromNullStringMapSlice struct {
val []map[string]sql.NullString
}
func (v hstoreArrayFromNullStringMapSlice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
out := []byte{'{'}
for _, m := range v.val {
if m == nil {
out = append(out, 'N', 'U', 'L', 'L', ',')
continue
}
i, size := 0, 0
pairs := make([][]byte, len(m))
for key, val := range m {
if !val.Valid {
// len(`\"\"=>NULL`) == 10
pair := make([]byte, 0, 10+len(key))
pair = pgAppendQuote2(pair, []byte(key))
pair = append(pair, '=', '>', 'N', 'U', 'L', 'L')
pairs[i] = pair
i += 1
size += len(pair) + 1
continue
}
// len(`\"\"=>\"\"`) == 10
pair := make([]byte, 0, 10+len(key)+len(val.String))
pair = pgAppendQuote2(pair, []byte(key))
pair = append(pair, '=', '>')
pair = pgAppendQuote2(pair, []byte(val.String))
pairs[i] = pair
i += 1
size += len(pair) + 1
}
var hstore []byte
if size == 0 {
hstore = []byte{'"', '"'}
} else {
hstore = make([]byte, 1, size+1)
hstore[0] = '"'
for _, pair := range pairs {
hstore = append(hstore, pair...)
hstore = append(hstore, ',')
}
hstore[len(hstore)-1] = '"'
}
out = append(out, hstore...)
out = append(out, ',')
}
out[len(out)-1] = '}' // replace last "," with "}"
return out, nil
}
type hstoreArrayToNullStringMapSlice struct {
val *[]map[string]sql.NullString
}
func (v hstoreArrayToNullStringMapSlice) Scan(src interface{}) error {
data, err := srcbytes(src)
if err != nil {
return err
} else if data == nil {
return nil
}
elems := pgparsehstorearr(data)
hashes := make([]map[string]sql.NullString, len(elems))
for i := 0; i < len(elems); i++ {
pairs := elems[i]
if pairs == nil {
continue
}
hash := make(map[string]sql.NullString)
for j := 0; j < len(pairs); j++ {
if value := pairs[j][1]; value != nil {
hash[string(pairs[j][0])] = sql.NullString{String: string(value), Valid: true}
} else {
hash[string(pairs[j][0])] = sql.NullString{String: "", Valid: false}
}
}
hashes[i] = hash
}
*v.val = hashes
return nil
} | pgsql/hstorearr.go | 0.678433 | 0.506469 | hstorearr.go | starcoder |
package examples
import (
"io"
"os"
"path"
"github.com/kva3umoda/goecharts"
"github.com/kva3umoda/goecharts/model"
"github.com/kva3umoda/goecharts/render"
)
func lineBasicLineChart() *goecharts.Charts {
weeks := []string{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
value := []int32{150, 230, 224, 218, 135, 147, 260}
charts := goecharts.NewCharts("Basic Line Chart")
charts.Dataset().
AddDataColumnString("weekday", weeks).
AddDataColumnInt("value", value)
charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue).
AddSeriesLine("line", "weekday", "value")
return charts
}
func lineSmoothedLineChart() *goecharts.Charts {
weeks := []string{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
value := []int32{150, 230, 224, 218, 135, 147, 260}
charts := goecharts.NewCharts("Smoothed Line Chart")
charts.Dataset().
AddDataColumnString("weekday", weeks).
AddDataColumnInt("value", value)
charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue).
AddSeriesLine("line", "weekday", "value").
Smooth(true)
return charts
}
func lineBasicAreaChart() *goecharts.Charts {
weeks := []string{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
value := []int32{150, 230, 224, 218, 135, 147, 260}
charts := goecharts.NewCharts("Basic Area Chart")
charts.Dataset().
AddDataColumnString("weekday", weeks).
AddDataColumnInt("value", value)
chart2d := charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue)
chart2d.YAxis().BoundaryGap(true)
chart2d.AddSeriesLine("line", "weekday", "value").
Area(true)
return charts
}
func lineStackedLineChart() *goecharts.Charts {
weeks := []string{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
valueEmail := []int32{120, 132, 101, 134, 90, 230, 210}
valueUnionAds := []int32{220, 182, 191, 234, 290, 330, 310}
valueVideoAds := []int32{150, 232, 201, 154, 190, 330, 410}
valueDirect := []int32{320, 332, 301, 334, 390, 330, 320}
valueSearchEngine := []int32{820, 932, 901, 934, 1290, 1330, 1320}
charts := goecharts.NewCharts("Stacked Line Chart").
ToolTip("axis", model.PointerTypeCross)
charts.Dataset().
AddDataColumnString("weekday", weeks).
AddDataColumnInt("email", valueEmail).
AddDataColumnInt("unionAds", valueUnionAds).
AddDataColumnInt("videoAds", valueVideoAds).
AddDataColumnInt("direct", valueDirect).
AddDataColumnInt("searchEngine", valueSearchEngine)
charts.ToolTip("axis", model.PointerTypeCross)
chart2d := charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue)
chart2d.AddSeriesLine("email", "weekday", "email").Stack(true)
chart2d.AddSeriesLine("unionAds", "weekday", "unionAds").Stack(true)
chart2d.AddSeriesLine("videoAds", "weekday", "videoAds").Stack(true)
chart2d.AddSeriesLine("direct", "weekday", "direct").Stack(true)
chart2d.AddSeriesLine("searchEngine", "weekday", "searchEngine").Stack(true)
return charts
}
func lineStackedAreaChart() *goecharts.Charts {
weeks := []string{"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}
valueEmail := []int32{120, 132, 101, 134, 90, 230, 210}
valueUnionAds := []int32{220, 182, 191, 234, 290, 330, 310}
valueVideoAds := []int32{150, 232, 201, 154, 190, 330, 410}
valueDirect := []int32{320, 332, 301, 334, 390, 330, 320}
valueSearchEngine := []int32{820, 932, 901, 934, 1290, 1330, 1320}
charts := goecharts.NewCharts("Stacked Area Chart").
ToolTip("axis", model.PointerTypeCross)
charts.Dataset().
AddDataColumnString("weekday", weeks).
AddDataColumnInt("email", valueEmail).
AddDataColumnInt("unionAds", valueUnionAds).
AddDataColumnInt("videoAds", valueVideoAds).
AddDataColumnInt("direct", valueDirect).
AddDataColumnInt("searchEngine", valueSearchEngine)
charts.ToolTip("axis", model.PointerTypeCross)
chart2d := charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue)
chart2d.AddSeriesLine("email", "weekday", "email").
Stack(true).
Area(true).
EmphasisScale(true).
EmphasisFocus(model.FocusSeries).
EmphasisBlurScope(model.BlurScopeCoordinateSystem)
chart2d.AddSeriesLine("unionAds", "weekday", "unionAds").
Stack(true).
Area(true).
EmphasisScale(true).
EmphasisFocus(model.FocusSeries).
EmphasisBlurScope(model.BlurScopeCoordinateSystem)
chart2d.AddSeriesLine("videoAds", "weekday", "videoAds").
Stack(true).
Area(true).
EmphasisScale(true).
EmphasisFocus(model.FocusSeries).
EmphasisBlurScope(model.BlurScopeCoordinateSystem)
chart2d.AddSeriesLine("direct", "weekday", "direct").
Stack(true).
Area(true).
EmphasisScale(true).
EmphasisFocus(model.FocusSeries).
EmphasisBlurScope(model.BlurScopeCoordinateSystem)
chart2d.AddSeriesLine("searchEngine", "weekday", "searchEngine").
Stack(true).
Area(true).
EmphasisScale(true).
EmphasisFocus(model.FocusSeries).
EmphasisBlurScope(model.BlurScopeCoordinateSystem).
LabelShow(true).
LabelPosition(model.PositionTop)
return charts
}
func lineLineWithMarkLines() *goecharts.Charts {
dataX := []string{"A", "B", "C", "D", "E", "F"}
dataY := []float64{0.3, 1.4, 1.2, 1, 0.6}
charts := goecharts.NewCharts("Line With Mark Lines").
ToolTip("axis", model.PointerTypeCross)
charts.Dataset().
AddDataColumnString("x", dataX).
AddDataColumnFloat("y", dataY)
chart2d := charts.AddChart2D("", model.AxisTypeCategory, model.AxisTypeValue)
chart2d.XAxis().BoundaryGap(true)
chart2d.YAxis().Max(2)
series := chart2d.AddSeriesLine("l", "x", "y")
series.MarkLines().
AddSpecial("max", model.MarkLineTypeMax).
AddSpecial("min", model.MarkLineTypeMin).
AddSpecial("median", model.MarkLineTypeMedian).
AddSpecial("average", model.MarkLineTypeAverage).
AddVertical("vertical", "B").
AddHorizontal("horizontal", 1.8).
AddByPoints("points", "B", 0.3, model.SymbolCircle, "F", 1.7, model.SymbolArrow)
return charts
}
type LineExamples struct{}
func (LineExamples) Examples(workdir string) {
page := render.NewPage()
page.AddCharts(
lineBasicLineChart(),
lineSmoothedLineChart(),
lineBasicAreaChart(),
lineStackedLineChart(),
lineStackedAreaChart(),
lineLineWithMarkLines(),
)
f, err := os.Create(path.Join(workdir, "line.html"))
if err != nil {
panic(err)
}
page.Render(io.MultiWriter(f))
} | examples/examples/line.go | 0.654011 | 0.440168 | line.go | starcoder |
This example program shows how the `finder` and `property` packages can
be used to navigate a vSphere inventory structure using govmomi.
*/
package main
import (
"flag"
"fmt"
"net/url"
"os"
"strings"
"text/tabwriter"
"github.com/vmware/govmomi"
"github.com/vmware/govmomi/find"
"github.com/vmware/govmomi/property"
"github.com/vmware/govmomi/vim25/mo"
"github.com/vmware/govmomi/vim25/types"
"golang.org/x/net/context"
)
// GetEnvString returns string from environment variable.
func GetEnvString(v string, def string) string {
r := os.Getenv(v)
if r == "" {
return def
}
return r
}
// GetEnvBool returns boolean from environment variable.
func GetEnvBool(v string, def bool) bool {
r := os.Getenv(v)
if r == "" {
return def
}
switch strings.ToLower(r[0:1]) {
case "t", "y", "1":
return true
}
return false
}
// Humanize converts a number in bytes to a more readable format.
func Humanize(v int64) string {
const KB = 1024
const MB = 1024 * KB
const GB = 1024 * MB
const TB = 1024 * GB
const PB = 1024 * TB
switch {
case v < KB:
return fmt.Sprintf("%dB", v)
case v < MB:
return fmt.Sprintf("%.1fKB", float32(v)/KB)
case v < GB:
return fmt.Sprintf("%.1fMB", float32(v)/MB)
case v < TB:
return fmt.Sprintf("%.1fGB", float32(v)/GB)
case v < PB:
return fmt.Sprintf("%.1fTB", float32(v)/TB)
default:
return "a lot"
}
}
var urlVar = "GOVMOMI_URL"
var urlDescription = fmt.Sprintf("ESX or vCenter URL [%s]", urlVar)
var urlFlag = flag.String("url", GetEnvString(urlVar, "https://username:password@host/sdk"), urlDescription)
var insecureVar = "GOVMOMI_INSECURE"
var insecureDescription = fmt.Sprintf("Don't verify the server's certificate chain [%s]", insecureVar)
var insecureFlag = flag.Bool("insecure", GetEnvBool(insecureVar, false), insecureDescription)
func exit(err error) {
fmt.Fprintf(os.Stderr, "Error: %s\n", err)
os.Exit(1)
}
func main() {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
flag.Parse()
// Parse URL from string
u, err := url.Parse(*urlFlag)
if err != nil {
exit(err)
}
// Connect and log in to ESX or vCenter
c, err := govmomi.NewClient(ctx, u, *insecureFlag)
if err != nil {
exit(err)
}
f := find.NewFinder(c.Client, true)
// Find one and only datacenter
dc, err := f.DefaultDatacenter(ctx)
if err != nil {
exit(err)
}
// Make future calls local to this datacenter
f.SetDatacenter(dc)
// Find datastores in datacenter
dss, err := f.DatastoreList(ctx, "*")
if err != nil {
exit(err)
}
pc := property.DefaultCollector(c.Client)
// Convert datastores into list of references
var refs []types.ManagedObjectReference
for _, ds := range dss {
refs = append(refs, ds.Reference())
}
// Retrieve summary property for all datastores
var dst []mo.Datastore
err = pc.Retrieve(ctx, refs, []string{"summary"}, &dst)
if err != nil {
exit(err)
}
// Print summary per datastore
tw := tabwriter.NewWriter(os.Stdout, 2, 0, 2, ' ', 0)
fmt.Fprintf(tw, "Name:\tType:\tCapacity:\tFree:\n")
for _, ds := range dst {
fmt.Fprintf(tw, "%s\t", ds.Summary.Name)
fmt.Fprintf(tw, "%s\t", ds.Summary.Type)
fmt.Fprintf(tw, "%s\t", Humanize(ds.Summary.Capacity))
fmt.Fprintf(tw, "%s\t", Humanize(ds.Summary.FreeSpace))
fmt.Fprintf(tw, "\n")
}
tw.Flush()
} | Godeps/_workspace/src/github.com/vmware/govmomi/examples/datastores/main.go | 0.736969 | 0.439326 | main.go | starcoder |
package plan
import (
"github.com/Sndav/go-mysql-server/sql"
"github.com/Sndav/go-mysql-server/sql/expression"
)
// TransformUp applies a transformation function to the given tree from the
// bottom up.
func TransformUp(node sql.Node, f sql.TransformNodeFunc) (sql.Node, error) {
if o, ok := node.(sql.OpaqueNode); ok && o.Opaque() {
return f(node)
}
children := node.Children()
if len(children) == 0 {
return f(node)
}
newChildren := make([]sql.Node, len(children))
for i, c := range children {
c, err := TransformUp(c, f)
if err != nil {
return nil, err
}
newChildren[i] = c
}
node, err := node.WithChildren(newChildren...)
if err != nil {
return nil, err
}
return f(node)
}
// TransformNodeWithParentFunc is an analog to sql.TransformNodeFunc that also includes the parent of the node being
// transformed. The parent is for inspection only, and cannot be altered.
type TransformNodeWithParentFunc func(n sql.Node, parent sql.Node, childNum int) (sql.Node, error)
// TransformUpWithParent applies a transformation function to the given tree from the bottom up, with the additional
// context of the parent node of the node under inspection.
func TransformUpWithParent(node sql.Node, f TransformNodeWithParentFunc) (sql.Node, error) {
return transformUpWithParent(node, nil, -1, f)
}
// transformUpWithParent is the internal implementation of TransformUpWithParent that allows passing a parent node.
func transformUpWithParent(node sql.Node, parent sql.Node, childNum int, f TransformNodeWithParentFunc) (sql.Node, error) {
if o, ok := node.(sql.OpaqueNode); ok && o.Opaque() {
return f(node, parent, childNum)
}
children := node.Children()
if len(children) == 0 {
return f(node, parent, childNum)
}
newChildren := make([]sql.Node, len(children))
for i, c := range children {
c, err := transformUpWithParent(c, node, i, f)
if err != nil {
return nil, err
}
newChildren[i] = c
}
node, err := node.WithChildren(newChildren...)
if err != nil {
return nil, err
}
return f(node, parent, childNum)
}
// ChildSelector is a func that returns whether the child of a parent node should be walked as part of a transformation.
// If not, that child and its portion of the subtree is skipped.
type ChildSelector func(parent sql.Node, child sql.Node, childNum int) bool
// TransformUpWithSelector works like TransformUp, but allows the caller to decide which children of a node are walked.
func TransformUpWithSelector(node sql.Node, selector ChildSelector, f sql.TransformNodeFunc) (sql.Node, error) {
if o, ok := node.(sql.OpaqueNode); ok && o.Opaque() {
return f(node)
}
children := node.Children()
if len(children) == 0 {
return f(node)
}
newChildren := make([]sql.Node, len(children))
for i, c := range children {
if selector(node, c, i) {
c, err := TransformUpWithSelector(c, selector, f)
if err != nil {
return nil, err
}
newChildren[i] = c
} else {
newChildren[i] = c
}
}
node, err := node.WithChildren(newChildren...)
if err != nil {
return nil, err
}
return f(node)
}
// TransformExpressionsUp applies a transformation function to all expressions
// on the given tree from the bottom up.
func TransformExpressionsUpWithNode(ctx *sql.Context, node sql.Node, f expression.TransformExprWithNodeFunc) (sql.Node, error) {
if o, ok := node.(sql.OpaqueNode); ok && o.Opaque() {
return TransformExpressionsWithNode(ctx, node, f)
}
children := node.Children()
if len(children) == 0 {
return TransformExpressionsWithNode(ctx, node, f)
}
newChildren := make([]sql.Node, len(children))
for i, c := range children {
c, err := TransformExpressionsUpWithNode(ctx, c, f)
if err != nil {
return nil, err
}
newChildren[i] = c
}
node, err := node.WithChildren(newChildren...)
if err != nil {
return nil, err
}
return TransformExpressionsWithNode(ctx, node, f)
}
// TransformExpressionsUp applies a transformation function to all expressions
// on the given tree from the bottom up.
func TransformExpressionsUp(ctx *sql.Context, node sql.Node, f sql.TransformExprFunc) (sql.Node, error) {
if o, ok := node.(sql.OpaqueNode); ok && o.Opaque() {
return TransformExpressions(ctx, node, f)
}
children := node.Children()
if len(children) == 0 {
return TransformExpressions(ctx, node, f)
}
newChildren := make([]sql.Node, len(children))
for i, c := range children {
c, err := TransformExpressionsUp(ctx, c, f)
if err != nil {
return nil, err
}
newChildren[i] = c
}
node, err := node.WithChildren(newChildren...)
if err != nil {
return nil, err
}
return TransformExpressions(ctx, node, f)
}
// TransformExpressions applies a transformation function to all expressions
// on the given node.
func TransformExpressions(ctx *sql.Context, node sql.Node, f sql.TransformExprFunc) (sql.Node, error) {
e, ok := node.(sql.Expressioner)
if !ok {
return node, nil
}
exprs := e.Expressions()
if len(exprs) == 0 {
return node, nil
}
newExprs := make([]sql.Expression, len(exprs))
for i, e := range exprs {
e, err := expression.TransformUp(ctx, e, f)
if err != nil {
return nil, err
}
newExprs[i] = e
}
return e.WithExpressions(newExprs...)
}
// TransformExpressions applies a transformation function to all expressions
// on the given node.
func TransformExpressionsWithNode(ctx *sql.Context, n sql.Node, f expression.TransformExprWithNodeFunc) (sql.Node, error) {
e, ok := n.(sql.Expressioner)
if !ok {
return n, nil
}
exprs := e.Expressions()
if len(exprs) == 0 {
return n, nil
}
newExprs := make([]sql.Expression, len(exprs))
for i, e := range exprs {
e, err := expression.TransformUpWithNode(ctx, n, e, f)
if err != nil {
return nil, err
}
newExprs[i] = e
}
return e.WithExpressions(newExprs...)
} | sql/plan/transform.go | 0.68342 | 0.479869 | transform.go | starcoder |
package db
import "time"
type TimePeriod struct {
Start time.Time
End time.Time
}
func (tp TimePeriod) Includes(u time.Time) bool {
return u.Sub(tp.Start) >= 0 && tp.End.Sub(u) > 0
}
func (tp TimePeriod) Equal(o TimePeriod) bool {
return tp.Start.Equal(o.Start) && tp.End.Equal(o.End)
}
type TimeOfDayPeriod TimePeriod
func (tp TimeOfDayPeriod) Equal(o TimeOfDayPeriod) bool {
return TimePeriod(tp).Equal(TimePeriod(o))
}
func (d TimeOfDayPeriod) String() string {
return d.Start.Format(time.Kitchen) + " - " + d.End.Format(time.Kitchen)
}
type DatePeriod TimePeriod
const DateFormat = "1/2/06"
func (tp DatePeriod) Equal(o DatePeriod) bool {
return TimePeriod(tp).Equal(TimePeriod(o))
}
func (d DatePeriod) String() string {
return d.Start.Format(DateFormat) + " - " + d.End.Format(DateFormat)
}
type Calendar interface {
RuleAt(t time.Time) (isOn bool, period TimePeriod)
}
type timeClock struct {
location *time.Location
schoolDayHours TimeOfDayPeriod // Monday hours
vacationHours TimeOfDayPeriod // Saturday hours
holidays []DatePeriod
}
// This configuration information should be in a database someday, but for now
// it's convenient to set it at compile time.
type TimeOfDayPeriodConfig struct {
StartTime string
EndTime string
}
type DateRangeConfig struct {
StartDay string
// This is inclusive
EndDay string
}
type CalendarConfig struct {
Location string
SchoolDayHours TimeOfDayPeriodConfig
VacationHours TimeOfDayPeriodConfig
Holidays []DateRangeConfig
}
func NewCalendar(cc *CalendarConfig) (tc Calendar, err error) {
tc, err = ParseTimeClock(cc)
return
}
func ParseTimeClock(cc *CalendarConfig) (tc *timeClock, err error) {
location, err := time.LoadLocation(cc.Location)
if err != nil {
return
}
schoolDayHours, err := ParseTimeOfDayPeriod(cc.SchoolDayHours)
if err != nil {
return
}
vacationHours, err := ParseTimeOfDayPeriod(cc.VacationHours)
if err != nil {
return
}
holidays, err := parseHolidays(cc.Holidays, location)
if err != nil {
return
}
tc = &timeClock{location, schoolDayHours, vacationHours, holidays}
return
}
func ParseTimeOfDay(tod string) (t time.Time, err error) {
return time.Parse(time.Kitchen, tod)
}
func ParseTimeOfDayPeriod(todc TimeOfDayPeriodConfig) (tp TimeOfDayPeriod, err error) {
tp.Start, err = ParseTimeOfDay(todc.StartTime)
if err != nil {
return
}
tp.End, err = ParseTimeOfDay(todc.EndTime)
if err != nil {
return
}
return
}
func beginningOfPreviousDay(t time.Time) time.Time {
year, month, day := t.Date()
return time.Date(year, month, day-1, 0, 0, 0, 0, t.Location())
}
func beginningOfNextDay(t time.Time) time.Time {
year, month, day := t.Date()
return time.Date(year, month, day+1, 0, 0, 0, 0, t.Location())
}
func parseHolidays(hc []DateRangeConfig, location *time.Location) (holidays []DatePeriod, err error) {
for _, h := range hc {
var holiday DatePeriod
holiday, err = ParseDateRange(h, location)
if err != nil {
return
}
holidays = append(holidays, holiday)
}
return
}
func ParseDate(date string, location *time.Location) (t time.Time, err error) {
return time.ParseInLocation(DateFormat, date, location)
}
func ParseDateRange(dr DateRangeConfig, location *time.Location) (dp DatePeriod, err error) {
var start, end time.Time
start, err = ParseDate(dr.StartDay, location)
if err != nil {
return
}
end, err = ParseDate(dr.EndDay, location)
if err != nil {
return
}
dp = DatePeriod{start, beginningOfNextDay(end)}
return
}
func (tc *timeClock) RuleAt(t time.Time) (isOn bool, period TimePeriod) {
activeTime := tc.activeTimeFor(t)
if activeTime.Includes(t) {
isOn, period = true, activeTime
return
}
if t.Before(activeTime.Start) {
prevDayEnd := tc.endTimeFor(beginningOfPreviousDay(t))
return false, TimePeriod{prevDayEnd, activeTime.Start}
} else {
nextDayStart := tc.startTimeFor(beginningOfNextDay(t))
return false, TimePeriod{activeTime.End, nextDayStart}
}
}
func (tc *timeClock) activeTimeFor(t time.Time) TimePeriod {
return TimePeriod{tc.startTimeFor(t), tc.endTimeFor(t)}
}
func (tc *timeClock) isSchoolDay(t time.Time) bool {
if tc.isHoliday(t) {
return false
}
weekDay := t.Weekday()
return weekDay >= time.Monday && weekDay <= time.Friday
}
func (tc *timeClock) isSchoolNight(t time.Time) bool {
return tc.isSchoolDay(beginningOfNextDay(t))
}
func (tc *timeClock) startTimeOfDayFor(t time.Time) time.Time {
return tc.activeHoursForDayType(tc.isSchoolDay(t)).Start
}
func (tc *timeClock) endTimeOfDayFor(t time.Time) time.Time {
return tc.activeHoursForDayType(tc.isSchoolNight(t)).End
}
func (tc *timeClock) startTimeFor(t time.Time) time.Time {
return tc.mergeDateAndTimeOfDay(t, tc.startTimeOfDayFor(t))
}
func (tc *timeClock) endTimeFor(t time.Time) time.Time {
return tc.mergeDateAndTimeOfDay(t, tc.endTimeOfDayFor(t))
}
func (tc *timeClock) isHoliday(t time.Time) bool {
for _, h := range tc.holidays {
includes := TimePeriod(h).Includes(t)
if includes {
return true
}
}
return false
}
func (tc *timeClock) activeHoursForDayType(isSchoolDay bool) (tod TimeOfDayPeriod) {
if isSchoolDay {
return tc.schoolDayHours
} else {
return tc.vacationHours
}
return
}
func (tc *timeClock) mergeDateAndTimeOfDay(date time.Time, timeOfDay time.Time) time.Time {
year, month, day := date.Date()
hour, minute, second := timeOfDay.Hour(), timeOfDay.Minute(), timeOfDay.Second()
return time.Date(year, month, day, hour, minute, second, 0, tc.location)
} | db/calendar.go | 0.777427 | 0.566139 | calendar.go | starcoder |
package pos
import "github.com/chewxy/lingo"
type perceptron struct {
// weights map[feature]*[lingo.MAXTAG]float64 // it's a pointer to a static array because map values are immutable, and cannot be edited
weightsSF map[singleFeature]*[lingo.MAXTAG]float64
weightsTF map[tupleFeature]*[lingo.MAXTAG]float64
totals map[fctuple]float64
steps map[fctuple]float64
instancesSeen float64
}
// feature-class tuple is a tuple that contains a feature and a class. This makes calculation of the averaging easier
type fctuple struct {
feature
lingo.POSTag
}
func newPerceptron() *perceptron {
return &perceptron{
// weights: make(map[feature]*[lingo.MAXTAG]float64),
weightsSF: make(map[singleFeature]*[lingo.MAXTAG]float64),
weightsTF: make(map[tupleFeature]*[lingo.MAXTAG]float64),
totals: make(map[fctuple]float64),
steps: make(map[fctuple]float64),
}
}
func (p *perceptron) updateWeightsSF(f singleFeature, tag lingo.POSTag, weight, value float64) {
tuple := fctuple{f, tag}
p.totals[tuple] += (p.instancesSeen - p.steps[tuple]) * weight
p.steps[tuple] = p.instancesSeen
if _, ok := p.weightsSF[f]; !ok {
p.weightsSF[f] = new([lingo.MAXTAG]float64)
}
p.weightsSF[f][tag] = weight + value
}
func (p *perceptron) updateWeightsTF(f tupleFeature, tag lingo.POSTag, weight, value float64) {
tuple := fctuple{f, tag}
p.totals[tuple] += (p.instancesSeen - p.steps[tuple]) * weight
p.steps[tuple] = p.instancesSeen
if _, ok := p.weightsTF[f]; !ok {
p.weightsTF[f] = new([lingo.MAXTAG]float64)
}
p.weightsTF[f][tag] = weight + value
}
func (p *perceptron) update(guess, truth lingo.POSTag, sf sfFeatures, tf tfFeatures) {
p.instancesSeen++
if truth == guess {
return
}
for _, f := range sf {
var truthValue float64
var guessValue float64
if weights, ok := p.weightsSF[f]; ok {
truthValue = weights[truth]
guessValue = weights[guess]
}
p.updateWeightsSF(f, truth, truthValue, 1)
p.updateWeightsSF(f, guess, guessValue, -1)
}
for _, f := range tf {
var truthValue float64
var guessValue float64
if weights, ok := p.weightsTF[f]; ok {
truthValue = weights[truth]
guessValue = weights[guess]
}
p.updateWeightsTF(f, truth, truthValue, 1)
p.updateWeightsTF(f, guess, guessValue, -1)
}
}
func (p *perceptron) predict(sf sfFeatures, tf tfFeatures) lingo.POSTag {
var scores [lingo.MAXTAG]float64
for _, f := range sf {
if weights, ok := p.weightsSF[f]; ok {
for label, weight := range weights {
scores[label] += weight
}
}
}
for _, f := range tf {
if weights, ok := p.weightsTF[f]; ok {
for label, weight := range weights {
scores[label] += weight
}
}
}
return maxScore(&scores)
}
func (p *perceptron) average() {
for f, weights := range p.weightsSF {
for c, weight := range weights {
tuple := fctuple{f, lingo.POSTag(c)}
total := p.totals[tuple]
total += (p.instancesSeen - p.steps[tuple]) * weight
avg := total / p.instancesSeen
weights[c] = avg
}
}
for f, weights := range p.weightsTF {
for c, weight := range weights {
tuple := fctuple{f, lingo.POSTag(c)}
total := p.totals[tuple]
total += (p.instancesSeen - p.steps[tuple]) * weight
avg := total / p.instancesSeen
weights[c] = avg
}
}
} | pos/perceptron.go | 0.5794 | 0.506164 | perceptron.go | starcoder |
package manta
import (
"math"
)
type fieldDecoder func(*reader) interface{}
type fieldFactory func(*field) fieldDecoder
var fieldTypeFactories = map[string]fieldFactory{
"float32": floatFactory,
"CNetworkedQuantizedFloat": quantizedFactory,
"Vector": vectorFactory(3),
"Vector2D": vectorFactory(2),
"Vector4D": vectorFactory(4),
"uint64": unsigned64Factory,
"QAngle": qangleFactory,
"CHandle": unsignedFactory,
"CStrongHandle": unsigned64Factory,
"CEntityHandle": unsignedFactory,
}
var fieldNameDecoders = map[string]fieldDecoder{}
var fieldTypeDecoders = map[string]fieldDecoder{
"bool": booleanDecoder,
"char": stringDecoder,
"color32": unsignedDecoder,
"int16": signedDecoder,
"int32": signedDecoder,
"int64": signedDecoder,
"int8": signedDecoder,
"uint16": unsignedDecoder,
"uint32": unsignedDecoder,
"uint8": unsignedDecoder,
"CBodyComponent": componentDecoder,
"CGameSceneNodeHandle": unsignedDecoder,
"Color": unsignedDecoder,
"CPhysicsComponent": componentDecoder,
"CRenderComponent": componentDecoder,
"CUtlString": stringDecoder,
"CUtlStringToken": unsignedDecoder,
"CUtlSymbolLarge": stringDecoder,
}
func unsignedFactory(f *field) fieldDecoder {
return unsignedDecoder
}
func unsigned64Factory(f *field) fieldDecoder {
switch f.encoder {
case "fixed64":
return fixed64Decoder
}
return unsigned64Decoder
}
func floatFactory(f *field) fieldDecoder {
switch f.encoder {
case "coord":
return floatCoordDecoder
case "simtime":
return simulationTimeDecoder
case "runetime":
return runeTimeDecoder
}
if f.bitCount == nil || (*f.bitCount <= 0 || *f.bitCount >= 32) {
return noscaleDecoder
}
return quantizedFactory(f)
}
func quantizedFactory(f *field) fieldDecoder {
qfd := newQuantizedFloatDecoder(f.bitCount, f.encodeFlags, f.lowValue, f.highValue)
return func(r *reader) interface{} {
return qfd.decode(r)
}
}
func vectorFactory(n int) fieldFactory {
return func(f *field) fieldDecoder {
if n == 3 && f.encoder == "normal" {
return vectorNormalDecoder
}
d := floatFactory(f)
return func(r *reader) interface{} {
x := make([]float32, n)
for i := 0; i < n; i++ {
x[i] = d(r).(float32)
}
return x
}
}
}
func vectorNormalDecoder(r *reader) interface{} {
return r.read3BitNormal()
}
func fixed64Decoder(r *reader) interface{} {
return r.readLeUint64()
}
func handleDecoder(r *reader) interface{} {
return r.readVarUint32()
}
func booleanDecoder(r *reader) interface{} {
return r.readBoolean()
}
func stringDecoder(r *reader) interface{} {
return r.readString()
}
func defaultDecoder(r *reader) interface{} {
return r.readVarUint32()
}
func signedDecoder(r *reader) interface{} {
return r.readVarInt32()
}
func floatCoordDecoder(r *reader) interface{} {
return r.readCoord()
}
func noscaleDecoder(r *reader) interface{} {
return math.Float32frombits(r.readBits(32))
}
func runeTimeDecoder(r *reader) interface{} {
return math.Float32frombits(r.readBits(4))
}
func simulationTimeDecoder(r *reader) interface{} {
return float32(r.readVarUint32()) * (1.0 / 30)
}
func qangleFactory(f *field) fieldDecoder {
if f.encoder == "qangle_pitch_yaw" {
n := uint32(*f.bitCount)
return func(r *reader) interface{} {
return []float32{
r.readAngle(n),
r.readAngle(n),
0.0,
}
}
}
if f.bitCount != nil && *f.bitCount != 0 {
n := uint32(*f.bitCount)
return func(r *reader) interface{} {
return []float32{
r.readAngle(n),
r.readAngle(n),
r.readAngle(n),
}
}
}
return func(r *reader) interface{} {
ret := make([]float32, 3)
rX := r.readBoolean()
rY := r.readBoolean()
rZ := r.readBoolean()
if rX {
ret[0] = r.readCoord()
}
if rY {
ret[1] = r.readCoord()
}
if rZ {
ret[2] = r.readCoord()
}
return ret
}
}
func vector2Decoder(r *reader) interface{} {
return []float32{r.readFloat(), r.readFloat()}
}
func unsignedDecoder(r *reader) interface{} {
return uint64(r.readVarUint32())
}
func unsigned64Decoder(r *reader) interface{} {
return r.readVarUint64()
}
func componentDecoder(r *reader) interface{} {
return r.readBits(1)
}
func findDecoder(f *field) fieldDecoder {
if v, ok := fieldTypeFactories[f.fieldType.baseType]; ok {
return v(f)
}
if v, ok := fieldNameDecoders[f.varName]; ok {
return v
}
if v, ok := fieldTypeDecoders[f.fieldType.baseType]; ok {
return v
}
return defaultDecoder
}
func findDecoderByBaseType(baseType string) fieldDecoder {
if v, ok := fieldTypeDecoders[baseType]; ok {
return v
}
return defaultDecoder
} | field_decoder.go | 0.595845 | 0.416381 | field_decoder.go | starcoder |
package merkle
import (
"errors"
"fmt"
"math"
"strings"
"github.com/ravdin/programmingbitcoin/util"
)
// Tree represents a merkle tree.
type Tree struct {
Total int
MaxDepth int
Nodes [][][]byte
CurrentDepth int
CurrentIndex int
}
// NewTree initializes a new merkle tree.
// total: number of nodes in the tree.
func NewTree(total int) *Tree {
result := &Tree{Total: total}
result.MaxDepth = int(math.Ceil(math.Log2(float64(total))))
result.Nodes = make([][][]byte, result.MaxDepth+1)
denom := math.Pow(2.0, float64(result.MaxDepth))
for i := range result.Nodes {
numItems := int(math.Ceil(float64(total) / denom))
result.Nodes[i] = make([][]byte, numItems)
denom /= 2
}
result.CurrentDepth = 0
result.CurrentIndex = 0
return result
}
func (tree *Tree) String() string {
result := make([]string, len(tree.Nodes))
for depth, level := range tree.Nodes {
items := make([]string, len(level))
for index, h := range level {
if len(h) == 0 {
items[index] = "None"
} else if depth == tree.CurrentDepth && index == tree.CurrentIndex {
items[index] = fmt.Sprintf("*%x.*", h[:4])
} else {
items[index] = fmt.Sprintf("%x...", h[:4])
}
}
result[depth] = strings.Join(items, ", ")
}
return strings.Join(result, "\n")
}
// PopulateTree populates the tree.
func (tree *Tree) PopulateTree(flagBits []byte, hashes [][]byte) error {
for len(tree.Root()) == 0 {
if tree.isLeaf() {
flagBits = flagBits[1:]
tree.setCurrentNode(hashes[0])
hashes = hashes[1:]
tree.up()
continue
}
leftHash := tree.getLeftNode()
if len(leftHash) == 0 {
flagBit := flagBits[0]
flagBits = flagBits[1:]
if flagBit == 0 {
tree.setCurrentNode(hashes[0])
hashes = hashes[1:]
tree.up()
} else {
tree.left()
}
} else if tree.rightExists() {
rightHash := tree.getRightNode()
if len(rightHash) == 0 {
tree.right()
} else {
tree.setCurrentNode(util.MerkleParent(leftHash, rightHash))
tree.up()
}
} else {
tree.setCurrentNode(util.MerkleParent(leftHash, leftHash))
tree.up()
}
}
if len(hashes) != 0 {
return fmt.Errorf("hashes not all consumed %d", len(hashes))
}
for _, flagBit := range flagBits {
if flagBit != 0 {
return errors.New("flagBits not all consumed")
}
}
return nil
}
func (tree *Tree) up() {
tree.CurrentDepth--
tree.CurrentIndex >>= 1
}
func (tree *Tree) left() {
tree.CurrentDepth++
tree.CurrentIndex <<= 1
}
func (tree *Tree) right() {
tree.CurrentDepth++
tree.CurrentIndex <<= 1
tree.CurrentIndex++
}
// Root returns the root node.
func (tree *Tree) Root() []byte {
return tree.Nodes[0][0]
}
func (tree *Tree) setCurrentNode(value []byte) {
tree.Nodes[tree.CurrentDepth][tree.CurrentIndex] = value
}
func (tree *Tree) getCurrentNode() []byte {
return tree.Nodes[tree.CurrentDepth][tree.CurrentIndex]
}
func (tree *Tree) getLeftNode() []byte {
return tree.Nodes[tree.CurrentDepth+1][tree.CurrentIndex*2]
}
func (tree *Tree) getRightNode() []byte {
return tree.Nodes[tree.CurrentDepth+1][tree.CurrentIndex*2+1]
}
func (tree *Tree) isLeaf() bool {
return tree.CurrentDepth == tree.MaxDepth
}
func (tree *Tree) rightExists() bool {
return len(tree.Nodes[tree.CurrentDepth+1]) > tree.CurrentIndex*2+1
} | merkle/tree.go | 0.703142 | 0.440229 | tree.go | starcoder |
package buffer
// View is a slice of a buffer, with convenience methods
type View []byte
// NewView allocates a new buffer and returns an initialized view that convers
// the whole buffer
func NewView(size int) View {
return make(View, size)
}
// CapLength irreversibly reduces the length of the visible section of the
// buffer to the value specified
func (v *View) CapLength(length int) {
// We also set the slice cap because if we don't, one would be able to
// expand the view back to include the region just excluded. We want to
// prevent that to avoid potential data leak if we have uninitialized
// data in excluding region
*v = (*v)[:length:length]
}
// TrimFront removes the first "count" bytes from the visible section of the
// buffer
func (v *View) TrimFront(count int) {
*v = (*v)[count:]
}
// ToVectorisedView transforms a View in a VectorisedView from an
// already-allocated slice of View
func (v *View) ToVectorisedView(views [1]View) VectorisedView {
views[0] = *v
return NewVectorisedView(views[:], len(*v))
}
// VectorisedView is a vectorised version of View using non contigous memory
// It supports all the convenience methods supported by View
type VectorisedView struct {
views []View
size int
}
// NewVectorisedView creates a new vectorised view from an already-allocated slice
// of View and sets its size
func NewVectorisedView(views []View, size int) VectorisedView {
return VectorisedView{views: views, size: size}
}
// Views returns the slice containing the all views
func (vv *VectorisedView) Views() []View {
return vv.views
}
// SetSize unsafely sets the size of the VectorisedView
func (vv *VectorisedView) SetSize(size int) {
vv.size = size
}
// SetViews unsafely sets the views of the VectorisedView
func (vv *VectorisedView) SetViews(views []View) {
vv.views = views
}
// First returns the first view of the vectorised view
// It panics if the vectorised view is empty
func (vv *VectorisedView) First() View {
if len(vv.views) == 0 {
panic("vview is empty")
}
return vv.views[0]
}
// TrimFront removes the first "count" bytes of the vectorised view
func (vv *VectorisedView) TrimFront(count int) {
for count > 0 && len(vv.views) > 0 {
if count < len(vv.views[0]) {
vv.size -= count
vv.views[0].TrimFront(count)
return
}
count -= len(vv.views[0])
vv.RemoveFirst()
}
}
// RemoveFirst removes the first view of the vectorised view
func (vv *VectorisedView) RemoveFirst() {
if len(vv.views) == 0 {
return
}
vv.size -= len(vv.views[0])
vv.views = vv.views[1:]
}
// ToView returns a single view containing the content of the vectorised view
func (vv *VectorisedView) ToView() View {
v := make([]byte, vv.size)
u := v
for i := range vv.views {
n := copy(u, vv.views[i])
u = u[n:]
}
return v
}
// Size returns the size in bytes of the entire content stored in the vectorised view
func (vv *VectorisedView) Size() int {
return vv.size
}
// Clone returns a clone of this VectorisedView
// If the buffer argument is large enough to contain all the Views of this VectorisedView
// the method will avoid allocations and use the buffer to store the Views of the clone
func (vv *VectorisedView) Clone(buffer []View) VectorisedView {
var views []View
if len(buffer) >= len(vv.views) {
views = buffer[:len(vv.views)]
} else {
views = make([]View, len(vv.views))
}
for i, v := range vv.views {
views[i] = v
}
return VectorisedView{views: views, size: vv.size}
} | buffer/view.go | 0.89058 | 0.729447 | view.go | starcoder |
package dh
import (
"crypto/rand"
"encoding/hex"
curve "gitlab.com/yawning/x448.git"
)
// dhlen448 defines the DHLEN for x448.
const dhlen448 = 56
// publicKey448 implements the PublicKey interface.
type publicKey448 struct {
raw [dhlen448]byte
}
// Bytes turns the underlying bytes array into a slice.
func (pk *publicKey448) Bytes() []byte {
return pk.raw[:]
}
// loadBytes takes the input data and copies it into a dhlen448-byte array.
func (pk *publicKey448) loadBytes(data []byte) error {
if len(data) != dhlen448 {
return errMismatchedKey("public", dhlen448, len(data))
}
copy(pk.raw[:], data)
return nil
}
// Hex returns the public key in hexstring.
func (pk *publicKey448) Hex() string {
return hex.EncodeToString(pk.Bytes())
}
// privateKey448 implements the PrivateKey interface.
type privateKey448 struct {
raw [dhlen448]byte
pub *publicKey448
}
// Bytes turns the underlying bytes array into a slice.
func (pk *privateKey448) Bytes() []byte {
return pk.raw[:]
}
// DH performs a Diffie-Hellman calculation between the private key in the
// key pair and the public key.
func (pk *privateKey448) DH(pub []byte) ([]byte, error) {
var pubKey publicKey448
// validate public key
if err := pubKey.loadBytes(pub); err != nil {
return nil, err
}
var shared [dhlen448]byte
curve.ScalarMult(&shared, &pk.raw, &pubKey.raw)
return shared[:], nil
}
// PubKey returns the corresponding public key.
func (pk *privateKey448) PubKey() PublicKey {
return pk.pub
}
// update writes secret to the private key.
func (pk *privateKey448) update(data []byte) {
copy(pk.raw[:], data[:dhlen448])
// calcuate the public key
curve.ScalarBaseMult(&pk.pub.raw, &pk.raw)
}
// curve448 implements the DH interface(aka "X448").
type curve448 struct {
DHLEN int
}
// GenerateKeyPair creates a key pair from entropy. If the entropy is not
// supplied, it will use rand.Read to generate a new private key.
func (c *curve448) GenerateKeyPair(entropy []byte) (PrivateKey, error) {
secret := make([]byte, dhlen448)
if entropy != nil {
// entropy is given, use it to create the private key.
copy(secret, entropy[:dhlen448])
} else {
// no entropy given, use the default rand.Read.
if _, err := rand.Read(secret); err != nil {
return nil, err
}
}
// set the raw data for both private and public keys.
priv := &privateKey448{pub: &publicKey448{}}
priv.update(secret)
return priv, nil
}
// LoadPrivateKey uses the data provided to create a new private key.
func (c *curve448) LoadPrivateKey(data []byte) (PrivateKey, error) {
p := &privateKey448{pub: &publicKey448{}}
if len(data) != dhlen448 {
return nil, errMismatchedKey("private", dhlen448, len(data))
}
p.update(data)
return p, nil
}
// LoadPublicKey uses the data provided to create a new public key.
func (c *curve448) LoadPublicKey(data []byte) (PublicKey, error) {
p := &publicKey448{}
if err := p.loadBytes(data); err != nil {
return nil, err
}
return p, nil
}
// Size returns the DHLEN.
func (c *curve448) Size() int {
return c.DHLEN
}
func (c *curve448) String() string {
return "448"
}
func newCurve448() Curve {
return &curve448{DHLEN: dhlen448}
}
func init() {
Register("448", newCurve448)
} | dh/curve448.go | 0.865849 | 0.457197 | curve448.go | starcoder |
package google
import (
"fmt"
"github.com/infracost/infracost/internal/schema"
"github.com/shopspring/decimal"
)
type NetworkEgressUsage struct {
Address string
Region string
PrefixName string
}
type StorageBucketNetworkEgressUsage struct {
SameContinent *float64 `infracost_usage:"same_continent"`
Asia *float64 `infracost_usage:"asia"`
Worldwide *float64 `infracost_usage:"worldwide"`
China *float64 `infracost_usage:"china"`
Australia *float64 `infracost_usage:"australia"`
NetworkEgressUsage
}
var StorageBucketNetworkEgressUsageSchema = []*schema.UsageItem{
{ValueType: schema.Float64, DefaultValue: 0, Key: "same_continent"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "asia"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "worldwide"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "china"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "australia"},
}
func (r *StorageBucketNetworkEgressUsage) BuildResource() *schema.Resource {
regionsData := []*egressRegionData{
{
gRegion: fmt.Sprintf("%s to worldwide excluding Asia, Australia", r.PrefixName),
apiDescription: "Download Worldwide Destinations (excluding Asia & Australia)",
usageKey: "worldwide",
},
{
gRegion: fmt.Sprintf("%s to Asia excluding China, but including Hong Kong", r.PrefixName),
apiDescription: "Download APAC",
usageKey: "asia",
},
{
gRegion: fmt.Sprintf("%s to China excluding Hong Kong", r.PrefixName),
apiDescription: "Download China",
usageKey: "china",
},
{
gRegion: fmt.Sprintf("%s to Australia", r.PrefixName),
apiDescription: "Download Australia",
usageKey: "australia",
},
}
usageFiltersData := []*egressRegionUsageFilterData{
{
usageName: "first 1TB",
usageNumber: 1024,
},
{
usageName: "next 9TB",
usageNumber: 10240,
},
{
usageName: "over 10TB",
usageNumber: 0,
},
}
serviceName := "Cloud Storage"
resource := &schema.Resource{
Name: r.Address,
CostComponents: []*schema.CostComponent{},
}
// Same continent
var quantity *decimal.Decimal
if r.SameContinent != nil {
quantity = decimalPtr(decimal.NewFromFloat(*r.SameContinent))
}
resource.CostComponents = append(resource.CostComponents, &schema.CostComponent{
Name: fmt.Sprintf("%s in same continent", r.PrefixName),
Unit: "GB",
UnitMultiplier: decimal.NewFromInt(1),
MonthlyQuantity: quantity,
ProductFilter: &schema.ProductFilter{
VendorName: strPtr("gcp"),
Region: strPtr("global"),
Service: strPtr("Cloud Storage"),
AttributeFilters: []*schema.AttributeFilter{
{Key: "description", Value: strPtr("Networking Traffic Egress GCP Inter Region within Europe")},
},
},
})
for _, regData := range regionsData {
usageKey := regData.usageKey
usage := GetFloatFieldValueByUsageTag(usageKey, *r)
newCostComponents := egressStepPricingHelper(usage, usageFiltersData, regData, "", serviceName)
resource.CostComponents = append(resource.CostComponents, newCostComponents...)
}
return resource
}
type ContainerRegistryNetworkEgressUsage struct {
SameContinent *float64 `infracost_usage:"same_continent"`
Asia *float64 `infracost_usage:"asia"`
Worldwide *float64 `infracost_usage:"worldwide"`
China *float64 `infracost_usage:"china"`
Australia *float64 `infracost_usage:"australia"`
NetworkEgressUsage
}
var ContainerRegistryNetworkEgressUsageSchema = []*schema.UsageItem{
{ValueType: schema.Float64, DefaultValue: 0, Key: "same_continent"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "asia"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "worldwide"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "china"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "australia"},
}
func (r *ContainerRegistryNetworkEgressUsage) BuildResource() *schema.Resource {
regionsData := []*egressRegionData{
{
gRegion: fmt.Sprintf("%s to worldwide excluding Asia, Australia", r.PrefixName),
apiDescription: "Download Worldwide Destinations (excluding Asia & Australia)",
usageKey: "worldwide",
},
{
gRegion: fmt.Sprintf("%s to Asia excluding China, but including Hong Kong", r.PrefixName),
apiDescription: "Download APAC",
usageKey: "asia",
},
{
gRegion: fmt.Sprintf("%s to China excluding Hong Kong", r.PrefixName),
apiDescription: "Download China",
usageKey: "china",
},
{
gRegion: fmt.Sprintf("%s to Australia", r.PrefixName),
apiDescription: "Download Australia",
usageKey: "australia",
},
}
usageFiltersData := []*egressRegionUsageFilterData{
{
usageName: "first 1TB",
usageNumber: 1024,
},
{
usageName: "next 9TB",
usageNumber: 10240,
},
{
usageName: "over 10TB",
usageNumber: 0,
},
}
serviceName := "Cloud Storage"
resource := &schema.Resource{
Name: r.Address,
CostComponents: []*schema.CostComponent{},
}
// Same continent
var quantity *decimal.Decimal
if r.SameContinent != nil {
quantity = decimalPtr(decimal.NewFromFloat(*r.SameContinent))
}
resource.CostComponents = append(resource.CostComponents, &schema.CostComponent{
Name: fmt.Sprintf("%s in same continent", r.PrefixName),
Unit: "GB",
UnitMultiplier: decimal.NewFromInt(1),
MonthlyQuantity: quantity,
ProductFilter: &schema.ProductFilter{
VendorName: strPtr("gcp"),
Region: strPtr("global"),
Service: strPtr("Cloud Storage"),
AttributeFilters: []*schema.AttributeFilter{
{Key: "description", Value: strPtr("Networking Traffic Egress GCP Inter Region within Europe")},
},
},
})
for _, regData := range regionsData {
usageKey := regData.usageKey
usage := GetFloatFieldValueByUsageTag(usageKey, *r)
newCostComponents := egressStepPricingHelper(usage, usageFiltersData, regData, "", serviceName)
resource.CostComponents = append(resource.CostComponents, newCostComponents...)
}
return resource
}
type ComputeVPNGatewayNetworkEgressUsage struct {
SameRegion *float64 `infracost_usage:"same_region"`
USOrCanada *float64 `infracost_usage:"us_or_canada"`
Europe *float64 `infracost_usage:"europe"`
Asia *float64 `infracost_usage:"asia"`
SouthAmerica *float64 `infracost_usage:"south_america"`
Oceania *float64 `infracost_usage:"oceania"`
Worldwide *float64 `infracost_usage:"worldwide"`
NetworkEgressUsage
}
var ComputeVPNGatewayNetworkEgressUsageSchema = []*schema.UsageItem{
{ValueType: schema.Float64, DefaultValue: 0, Key: "same_region"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "us_or_canada"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "europe"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "asia"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "south_america"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "oceania"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "worldwide"},
}
func (r *ComputeVPNGatewayNetworkEgressUsage) BuildResource() *schema.Resource {
regionsData := []*egressRegionData{
{
gRegion: fmt.Sprintf("%s within the same region", r.PrefixName),
// There is no same region option in APIs, so we always take this price in us-central1 region.
apiDescription: "Network Vpn Inter Region Egress from Americas to Americas",
usageKey: "same_region",
fixedRegion: "us-central1",
},
{
gRegion: fmt.Sprintf("%s within the US or Canada", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from Americas to Montreal",
usageKey: "us_or_canada",
fixedRegion: "us-central1",
},
{
gRegion: fmt.Sprintf("%s within Europe", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from EMEA to EMEA",
usageKey: "europe",
fixedRegion: "europe-west1",
},
{
gRegion: fmt.Sprintf("%s within Asia", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from Japan to Seoul",
usageKey: "asia",
fixedRegion: "asia-northeast1",
},
{
gRegion: fmt.Sprintf("%s within South America", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from Sao Paulo to Sao Paulo",
usageKey: "south_america",
fixedRegion: "southamerica-east1",
},
{
gRegion: fmt.Sprintf("%s to/from Indonesia and Oceania", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from Sydney to Jakarta",
usageKey: "oceania",
fixedRegion: "australia-southeast1",
},
{
gRegion: fmt.Sprintf("%s between continents (excludes Oceania)", r.PrefixName),
apiDescription: "Network Vpn Inter Region Egress from Finland to Singapore",
usageKey: "worldwide",
fixedRegion: "europe-north1",
},
}
usageFiltersData := []*egressRegionUsageFilterData{
{
usageNumber: 0,
},
}
defaultAPIRegionName := r.Region
serviceName := "Compute Engine"
resource := &schema.Resource{
Name: r.Address,
CostComponents: []*schema.CostComponent{},
}
for _, regData := range regionsData {
usageKey := regData.usageKey
usage := GetFloatFieldValueByUsageTag(usageKey, *r)
newCostComponents := egressStepPricingHelper(usage, usageFiltersData, regData, defaultAPIRegionName, serviceName)
resource.CostComponents = append(resource.CostComponents, newCostComponents...)
}
return resource
}
type ComputeExternalVPNGatewayNetworkEgressUsage struct {
Asia *float64 `infracost_usage:"asia"`
Worldwide *float64 `infracost_usage:"worldwide"`
China *float64 `infracost_usage:"china"`
Australia *float64 `infracost_usage:"australia"`
NetworkEgressUsage
}
var ComputeExternalVPNGatewayNetworkEgressUsageSchema = []*schema.UsageItem{
{ValueType: schema.Float64, DefaultValue: 0, Key: "asia"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "worldwide"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "china"},
{ValueType: schema.Float64, DefaultValue: 0, Key: "australia"},
}
func (r *ComputeExternalVPNGatewayNetworkEgressUsage) BuildResource() *schema.Resource {
regionsData := []*egressRegionData{
{
gRegion: fmt.Sprintf("%s to worldwide excluding China, Australia but including Hong Kong", r.PrefixName),
// There is no worldwide option in APIs, so we take a random region.
apiDescriptionRegex: "/Vpn Internet Egress .* to Americas/",
usageKey: "worldwide",
},
{
gRegion: fmt.Sprintf("%s to China excluding Hong Kong", r.PrefixName),
apiDescriptionRegex: "/Vpn Internet Egress .* to China/",
usageKey: "china",
},
{
gRegion: fmt.Sprintf("%s to Australia", r.PrefixName),
apiDescriptionRegex: "/Vpn Internet Egress .* to Australia/",
usageKey: "australia",
},
}
usageFiltersData := []*egressRegionUsageFilterData{
{
usageName: "first 1TB",
usageNumber: 1024,
},
{
usageName: "next 9TB",
usageNumber: 10240,
},
{
usageName: "over 10TB",
usageNumber: 0,
},
}
defaultAPIRegionName := r.Region
serviceName := "Compute Engine"
resource := &schema.Resource{
Name: r.Address,
CostComponents: []*schema.CostComponent{},
}
for _, regData := range regionsData {
usageKey := regData.usageKey
usage := GetFloatFieldValueByUsageTag(usageKey, *r)
newCostComponents := egressStepPricingHelper(usage, usageFiltersData, regData, defaultAPIRegionName, serviceName)
resource.CostComponents = append(resource.CostComponents, newCostComponents...)
}
return resource
}
type egressRegionData struct {
gRegion string // gRegion is the name used in pricing pages that is more human friendly.
apiDescription string
apiDescriptionRegex string
usageKey string
fixedRegion string // fixedRegion is the region used in pricing API.
}
type egressRegionUsageFilterData struct {
usageNumber float64
usageName string
}
func egressStepPricingHelper(usage float64, usageFiltersData []*egressRegionUsageFilterData, regData *egressRegionData, defaultAPIRegionName, serviceName string) []*schema.CostComponent {
costComponents := make([]*schema.CostComponent, 0)
// TODO: Reformat to use tier helpers.
var used float64
var lastEndUsageAmount float64
for idx, usageFilter := range usageFiltersData {
usageName := usageFilter.usageName
endUsageAmount := usageFilter.usageNumber
var quantity *decimal.Decimal
if endUsageAmount != 0 && usage >= endUsageAmount {
used = endUsageAmount - used
lastEndUsageAmount = endUsageAmount
quantity = decimalPtr(decimal.NewFromFloat(used))
} else if usage > lastEndUsageAmount {
used = usage - lastEndUsageAmount
lastEndUsageAmount = endUsageAmount
quantity = decimalPtr(decimal.NewFromFloat(used))
}
var usageFilter string
if endUsageAmount != 0 {
usageFilter = fmt.Sprint(endUsageAmount)
} else {
usageFilter = ""
}
if quantity == nil && idx > 0 {
continue
}
var apiRegion *string
if regData.fixedRegion != "" {
apiRegion = strPtr(regData.fixedRegion)
} else if defaultAPIRegionName != "" {
apiRegion = strPtr(defaultAPIRegionName)
}
var name string
if usageName != "" {
name = fmt.Sprintf("%v (%v)", regData.gRegion, usageName)
} else {
name = fmt.Sprintf("%v", regData.gRegion)
}
attributeFilters := make([]*schema.AttributeFilter, 0)
if regData.apiDescriptionRegex != "" {
attributeFilters = append(attributeFilters, &schema.AttributeFilter{Key: "description", ValueRegex: strPtr(regData.apiDescriptionRegex)})
} else {
attributeFilters = append(attributeFilters, &schema.AttributeFilter{Key: "description", Value: strPtr(regData.apiDescription)})
}
costComponents = append(costComponents, &schema.CostComponent{
Name: name,
Unit: "GB",
UnitMultiplier: decimal.NewFromInt(1),
MonthlyQuantity: quantity,
ProductFilter: &schema.ProductFilter{
Region: apiRegion,
VendorName: strPtr("gcp"),
Service: strPtr(serviceName),
AttributeFilters: attributeFilters,
},
PriceFilter: &schema.PriceFilter{
EndUsageAmount: strPtr(usageFilter),
},
})
}
return costComponents
} | internal/resources/google/network_egress_internal.go | 0.516595 | 0.440289 | network_egress_internal.go | starcoder |
package parsers
import (
"fmt"
"sort"
"strings"
)
// IntRange is a representation of a range that has a lower and upper bound (Min, Max).
// Min and Max are elements of the range.
type IntRange struct {
Min int
Max int
}
func (ir *IntRange) String() string {
return fmt.Sprintf("[%d:%d]", ir.Min, ir.Max)
}
func (ir *IntRange) Contains(i int) bool {
return ir.Min <= i && i <= ir.Max
}
func (ir *IntRange) Below(i int) bool {
return ir.Max < i
}
func (ir *IntRange) Above(i int) bool {
return i < ir.Min
}
// sorting
type byIntRange []IntRange
func (a byIntRange) Len() int { return len(a) }
func (a byIntRange) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byIntRange) Less(i, j int) bool {
if a[i].Min == a[j].Min {
return a[i].Max < a[j].Max
}
return a[i].Min < a[j].Min
}
type DistinctRangeListInt struct {
r []IntRange
}
func (d *DistinctRangeListInt) Contains(i int) bool {
return binarySearchRangeInt(d.r, i)
}
func (d *DistinctRangeListInt) String() string {
var sb strings.Builder
const expectedChars = 7
sb.Grow(expectedChars * len(d.r))
for idx, r := range d.r {
sb.WriteString(r.String())
if idx < len(d.r)-1 {
sb.WriteString(", ")
}
}
return sb.String()
}
func NewDistinctRangeListInt(minMaxRanges ...int) DistinctRangeListInt {
if len(minMaxRanges)%2 != 0 {
panic(fmt.Errorf("passed parameter list 'minMaxRanges' must contain an even number of parameters"))
}
rangesList := make([]IntRange, 0, len(minMaxRanges)/2)
for i := 0; i < len(minMaxRanges); i += 2 {
min := minMaxRanges[i]
max := minMaxRanges[i+1]
if max < min {
min, max = max, min
}
rangesList = append(rangesList, IntRange{Min: min, Max: max})
}
distinctList := make([]IntRange, 0, len(rangesList)/2)
sort.Sort(byIntRange(rangesList))
for idx := range rangesList {
currentRange := &rangesList[idx]
if idx == 0 {
distinctList = append(distinctList, *currentRange)
continue
}
lastProcessedRange := &distinctList[len(distinctList)-1]
// [1:12], [9:13]
// [1:12], [2:12]
if lastProcessedRange.Max >= currentRange.Min {
if lastProcessedRange.Max > currentRange.Max {
// skip, as element lies within previous range
continue
}
// expand previous range to a lager range than before
lastProcessedRange.Max = currentRange.Max
// skip current range after updating previously processed one
continue
}
distinctList = append(distinctList, *currentRange)
}
return DistinctRangeListInt{distinctList}
}
// binarySearchRangeInt requires a sorted list of ranges
func binarySearchRangeInt(a []IntRange, x int) bool {
start := 0
end := len(a) - 1
for start <= end {
mid := (start + end) / 2
if a[mid].Contains(x) {
return true
} else if a[mid].Below(x) {
start = mid + 1
} else if a[mid].Above(x) {
end = mid - 1
}
}
return false
} | parsers/helpers_ranges.go | 0.684264 | 0.499695 | helpers_ranges.go | starcoder |
package graph
import (
"bytes"
"errors"
"fmt"
"log"
"math/rand"
"strconv"
"time"
)
type Graph struct {
nodes int
edges int
}
func New(numberOfNodes, numberOfEdges int) Graph {
return Graph{numberOfNodes, numberOfEdges}
}
func (graph Graph) NumberOfNodes() int {
return graph.nodes
}
func (graph Graph) NumberOfEdges() int {
return graph.edges
}
func (graph *Graph) SetNumberOfNodes(numberOfNodes int) {
// The test for -1 is done in isEdgePossible()
graph.nodes = numberOfNodes
}
func (graph *Graph) SetNumberOfEdges(numberOfEdges int) {
// The test for -1 is done in isEdgePossible()
graph.edges = numberOfEdges
}
func (graph *Graph) Clear() {
graph.nodes = 0
graph.edges = 0
}
func (graph Graph) String() string {
return "{ nodes: " + strconv.Itoa(graph.nodes) + ", edges: " + strconv.Itoa(graph.edges) + " }"
}
func (graph Graph) UndirectedGraph() (adjacencyMatrix [][]int, err error) {
if graph.nodes == 0 || graph.edges == 0 {
return nil, errors.New("Nodes and edges must not be 0.")
}
if graph.nodes < 0 {
return nil, errors.New("Node graph can be generated with a negative amount of nodes.")
}
if graph.edges < 0 {
return nil, errors.New("No graph can be generated with a negative amount of edges.")
}
actualNumberOfEdges := 0
randomObject := rand.New(rand.NewSource(time.Now().UnixNano()))
var destinationNode int
adjacencyMatrix = make([][]int, graph.nodes)
/*
Sadly, I have to create the two dimensional array instead of doing that at runtime.
The problem is, that I have to add values twice to the array because the graph is
undirected. One for go there and one for going back. And because of the way back is
not present when creating it at runtime, I have to initialize it before.
*/
for index := range adjacencyMatrix {
adjacencyMatrix[index] = make([]int, graph.nodes)
}
for index := range adjacencyMatrix {
if index == 0 {
continue
}
destinationNode = randomObject.Intn(index)
if _, err := graph.isEdgePossible(index, destinationNode, adjacencyMatrix); err != nil {
//log.Println(err)
continue
}
if err := graph.addUndirectedEdge(index, destinationNode, adjacencyMatrix); err != nil {
log.Fatalln(err)
}
actualNumberOfEdges++
}
for actualNumberOfEdges < graph.edges {
sourceNode := randomObject.Intn(graph.nodes)
for destinationNode := range adjacencyMatrix[sourceNode] {
if possible, _ := graph.isEdgePossible(sourceNode, destinationNode, adjacencyMatrix); possible {
if err := graph.addUndirectedEdge(sourceNode, destinationNode, adjacencyMatrix); err != nil {
log.Fatalln(err)
}
actualNumberOfEdges++
}
}
}
return
}
func (graph Graph) UndirectedGraphAsDotLanguageString() (string, error) {
stringBuffer := bytes.NewBufferString("graph G {\n")
adjacencyMatrix, err := graph.UndirectedGraph()
if err != nil {
return "", err
}
for fromIndex := range adjacencyMatrix {
for toIndex := range adjacencyMatrix[fromIndex] {
if adjacencyMatrix[fromIndex][toIndex] == 1 {
from := fromIndex + 1
to := toIndex + 1
stringBuffer.WriteString(fmt.Sprintf("\t%d -- %d;\n", from, to))
//fmt.Printf("FROM: %d TO: %d\n", fromIndex, toIndex)
adjacencyMatrix[fromIndex][toIndex] = 0
adjacencyMatrix[toIndex][fromIndex] = 0
}
}
}
stringBuffer.WriteString("}")
return stringBuffer.String(), nil
}
func (graph Graph) DirectedGraph() (adjacencyMatrix [][]int, err error) {
if graph.nodes == 0 || graph.edges == 0 {
return nil, errors.New("Nodes and edges must not be 0.")
}
if graph.nodes < 0 {
return nil, errors.New("Node graph can be generated with a negative amount of nodes.")
}
if graph.edges < 0 {
return nil, errors.New("No graph can be generated with a negative amount of edges.")
}
actualNumberOfEdges := 0
randomObject := rand.New(rand.NewSource(time.Now().UnixNano()))
var destinationNode int
adjacencyMatrix = make([][]int, graph.nodes)
/*
Sadly, I have to create the two dimensional array instead of doing that at runtime.
The problem is, that I have to add values twice to the array because the graph is
undirected. One for go there and one for going back. And because of the way back is
not present when creating it at runtime, I have to initialize it before.
*/
for index := range adjacencyMatrix {
adjacencyMatrix[index] = make([]int, graph.nodes)
}
for index := range adjacencyMatrix {
if index == 0 {
continue
}
destinationNode = randomObject.Intn(index)
if _, err := graph.isEdgePossible(index, destinationNode, adjacencyMatrix); err != nil {
//log.Println(err)
continue
}
if err := graph.addDirectedEdge(index, destinationNode, adjacencyMatrix); err != nil {
log.Fatalln(err)
}
actualNumberOfEdges++
}
for actualNumberOfEdges <= graph.edges {
sourceNode := randomObject.Intn(graph.nodes)
for destinationNode := range adjacencyMatrix[sourceNode] {
if possible, _ := graph.isEdgePossible(sourceNode, destinationNode, adjacencyMatrix); possible {
if err := graph.addDirectedEdge(sourceNode, destinationNode, adjacencyMatrix); err != nil {
log.Fatalln(err)
}
actualNumberOfEdges++
}
}
}
return
}
func (graph Graph) DirectedGraphAsDotLanguageString() (string, error) {
adjacencyMatrix, err := graph.DirectedGraph()
if err != nil {
return "", err
}
stringBuffer := bytes.NewBufferString("digraph G {\n")
for fromIndex := range adjacencyMatrix {
for toIndex := range adjacencyMatrix[fromIndex] {
if adjacencyMatrix[fromIndex][toIndex] == 1 {
from := fromIndex + 1
to := toIndex + 1
stringBuffer.WriteString(fmt.Sprintf("\t%d -> %d;\n", from, to))
//fmt.Printf("FROM: %d TO: %d\n", fromIndex, toIndex)
adjacencyMatrix[fromIndex][toIndex] = 0
}
}
}
stringBuffer.WriteString("}")
return stringBuffer.String(), nil
}
func (graph *Graph) addUndirectedEdge(source, destination int, adjacencyMatrix [][]int) error {
if _, err := graph.isEdgePossible(source, destination, adjacencyMatrix); err != nil {
return err
}
adjacencyMatrix[source][destination] = 1
adjacencyMatrix[destination][source] = 1
return nil
}
func (graph *Graph) addDirectedEdge(source, destination int, adjacencyMatrix [][]int) error {
if _, err := graph.isEdgePossible(source, destination, adjacencyMatrix); err != nil {
return err
}
adjacencyMatrix[source][destination] = 1
return nil
}
func (graph Graph) isEdgePossible(source, destination int, adjacencyMatrix [][]int) (bool, error) {
if source < 0 {
return false, errors.New("Source may not be negative.")
}
if destination < 0 {
return false, errors.New("Destination my not be negative.")
}
if source > len(adjacencyMatrix) {
return false, errors.New("Source may not be larger than the array.")
}
if destination > len(adjacencyMatrix) {
return false, errors.New("Destination may not be larger than the array.")
}
if source == destination {
return false, errors.New("Source and destination have to be different.")
}
for index := range adjacencyMatrix {
if len(adjacencyMatrix[index]) < source {
return false, errors.New("Source may not be larger than the second dimension of the array.")
}
if len(adjacencyMatrix[index]) < destination {
return false, errors.New("Destination may not be larger than the second dimension of the array.")
}
}
if adjacencyMatrix[source][destination] == 1 {
return false, errors.New(fmt.Sprintf("A edge already exists for \"%d -- %d\"", source, destination))
}
return true, nil
} | graph/graph.go | 0.656768 | 0.521227 | graph.go | starcoder |
package raytrace
import (
"math"
"math/rand"
"github.com/markzuber/zgotrace/raytrace/vectorextensions"
"github.com/go-gl/mathgl/mgl64"
)
var ranFloat [256]float64
var ranVector [256]mgl64.Vec3
var permX [256]int
var permY [256]int
var permZ [256]int
func perlinGenerate() [256]float64 {
var p [256]float64
for i := 0; i < 256; i++ {
p[i] = rand.Float64()
}
return p
}
func perlinVectorGenerate() [256]mgl64.Vec3 {
var p [256]mgl64.Vec3
for i := 0; i < 256; i++ {
p[i] = vectorextensions.ToUnitVector(mgl64.Vec3{-1.0 + (2.0 * rand.Float64()), -1.0 + (2.0 * rand.Float64()), -1.0 + (2.0 * rand.Float64())})
}
return p
}
func perlinGeneratePerm() [256]int {
var p [256]int
for i := 0; i < 256; i++ {
p[i] = i
}
permute(p)
return p
}
func permute(p [256]int) {
for i := len(p) - 1; i > 0; i-- {
target := int(rand.Float64() * float64(i+1))
tmp := p[i]
p[i] = p[target]
p[target] = tmp
}
}
func init() {
ranFloat = perlinGenerate()
ranVector = perlinVectorGenerate()
permX = perlinGeneratePerm()
permY = perlinGeneratePerm()
permZ = perlinGeneratePerm()
}
func PerlinNoise(p mgl64.Vec3, interpolate bool) float64 {
u := p.X() - math.Floor(p.X())
v := p.Y() - math.Floor(p.Y())
w := p.Z() - math.Floor(p.Z())
if interpolate {
i := int(math.Floor(p.X()))
j := int(math.Floor(p.Y()))
k := int(math.Floor(p.Z()))
u = u * u * (3.0 - (2.0 * u))
v = v * v * (3.0 - (2.0 * v))
w = w * w * (3.0 - (2.0 * w))
var o [2][2][2]float64
for di := 0; di < 2; di++ {
for dj := 0; dj < 2; dj++ {
for dk := 0; dk < 2; dk++ {
o[di][dj][dk] = ranFloat[permX[(i+di)&255]^permY[(j+dj)&255]^permZ[(k+dk)&255]]
}
}
}
return trilinearInterpolate(o, u, v, w)
} else {
i := int(4.0*p.X()) & 255
j := int(4.0*p.Y()) & 255
k := int(4.0*p.Z()) & 255
return ranFloat[permX[i]^permY[j]^permZ[k]]
}
}
func PerlinVectorNoise(p mgl64.Vec3) float64 {
u := p.X() - math.Floor(p.X())
v := p.Y() - math.Floor(p.Y())
w := p.Z() - math.Floor(p.Z())
i := int(math.Floor(p.X()))
j := int(math.Floor(p.Y()))
k := int(math.Floor(p.Z()))
var c [2][2][2]mgl64.Vec3
for di := 0; di < 2; di++ {
for dj := 0; dj < 2; dj++ {
for dk := 0; dk < 2; dk++ {
c[di][dj][dk] = ranVector[permX[(i+di)&255]^permY[(j+dj)&255]^permZ[(k+dk)&255]]
}
}
}
return perlinVectorInterpolate(c, u, v, w)
}
func PerlinVectorTurbulence(p mgl64.Vec3) float64 {
return PerlinVectorTurbulenceCustomDepth(p, 7)
}
func PerlinVectorTurbulenceCustomDepth(p mgl64.Vec3, depth int) float64 {
accum := 0.0
weight := 1.0
for i := 0; i < depth; i++ {
accum += weight * PerlinVectorNoise(p)
weight *= 0.5
p = vectorextensions.MulScalar(p, 2.0)
}
return math.Abs(accum)
}
func perlinVectorInterpolate(c [2][2][2]mgl64.Vec3, u float64, v float64, w float64) float64 {
uu := u * u * (3.0 - (2.0 * u))
vv := v * v * (3.0 - (2.0 * v))
ww := w * w * (3.0 - (2.0 * w))
accum := 0.0
for i := 0; i < 2; i++ {
dubi := float64(i)
for j := 0; j < 2; j++ {
dubj := float64(j)
for k := 0; k < 2; k++ {
dubk := float64(k)
weightVec := mgl64.Vec3{u - dubi, v - dubj, w - dubk}
accum += ((dubi * uu) + ((1.0 - dubi) * (1.0 - uu))) *
((dubj * vv) + ((1.0 - dubj) * (1.0 - vv))) *
((dubk * ww) + ((1.0 - dubk) * (1.0 - ww))) *
c[i][j][k].Dot(weightVec)
}
}
}
return accum
}
func trilinearInterpolate(o [2][2][2]float64, u float64, v float64, w float64) float64 {
accum := 0.0
for i := 0; i < 2; i++ {
dubi := float64(i)
for j := 0; j < 2; j++ {
dubj := float64(j)
for k := 0; k < 2; k++ {
dubk := float64(k)
accum += ((dubi * u) + ((1.0 - dubi) * (1.0 - u))) *
((dubj * v) + ((1.0 - dubj) * (1.0 - v))) *
((dubk * w) + ((1.0 - dubk) * (1.0 - w))) * o[i][j][k]
}
}
}
return accum
} | raytrace/perlin.go | 0.569134 | 0.400837 | perlin.go | starcoder |
package simplify
type Matrix struct {
x00, x01, x02, x03 float64
x10, x11, x12, x13 float64
x20, x21, x22, x23 float64
x30, x31, x32, x33 float64
}
func (a Matrix) QuadricError(v Vector) float64 {
return (v.X*a.x00*v.X + v.Y*a.x10*v.X + v.Z*a.x20*v.X + a.x30*v.X +
v.X*a.x01*v.Y + v.Y*a.x11*v.Y + v.Z*a.x21*v.Y + a.x31*v.Y +
v.X*a.x02*v.Z + v.Y*a.x12*v.Z + v.Z*a.x22*v.Z + a.x32*v.Z +
v.X*a.x03 + v.Y*a.x13 + v.Z*a.x23 + a.x33)
}
func (a Matrix) QuadricVector() Vector {
b := Matrix{
a.x00, a.x01, a.x02, a.x03,
a.x10, a.x11, a.x12, a.x13,
a.x20, a.x21, a.x22, a.x23,
0, 0, 0, 1,
}
return b.Inverse().MulPosition(Vector{})
}
func (a Matrix) Add(b Matrix) Matrix {
return Matrix{
a.x00 + b.x00, a.x10 + b.x10, a.x20 + b.x20, a.x30 + b.x30,
a.x01 + b.x01, a.x11 + b.x11, a.x21 + b.x21, a.x31 + b.x31,
a.x02 + b.x02, a.x12 + b.x12, a.x22 + b.x22, a.x32 + b.x32,
a.x03 + b.x03, a.x13 + b.x13, a.x23 + b.x23, a.x33 + b.x33,
}
}
func (a Matrix) MulPosition(b Vector) Vector {
x := a.x00*b.X + a.x01*b.Y + a.x02*b.Z + a.x03
y := a.x10*b.X + a.x11*b.Y + a.x12*b.Z + a.x13
z := a.x20*b.X + a.x21*b.Y + a.x22*b.Z + a.x23
return Vector{x, y, z}
}
func (a Matrix) Determinant() float64 {
return (a.x00*a.x11*a.x22*a.x33 - a.x00*a.x11*a.x23*a.x32 +
a.x00*a.x12*a.x23*a.x31 - a.x00*a.x12*a.x21*a.x33 +
a.x00*a.x13*a.x21*a.x32 - a.x00*a.x13*a.x22*a.x31 -
a.x01*a.x12*a.x23*a.x30 + a.x01*a.x12*a.x20*a.x33 -
a.x01*a.x13*a.x20*a.x32 + a.x01*a.x13*a.x22*a.x30 -
a.x01*a.x10*a.x22*a.x33 + a.x01*a.x10*a.x23*a.x32 +
a.x02*a.x13*a.x20*a.x31 - a.x02*a.x13*a.x21*a.x30 +
a.x02*a.x10*a.x21*a.x33 - a.x02*a.x10*a.x23*a.x31 +
a.x02*a.x11*a.x23*a.x30 - a.x02*a.x11*a.x20*a.x33 -
a.x03*a.x10*a.x21*a.x32 + a.x03*a.x10*a.x22*a.x31 -
a.x03*a.x11*a.x22*a.x30 + a.x03*a.x11*a.x20*a.x32 -
a.x03*a.x12*a.x20*a.x31 + a.x03*a.x12*a.x21*a.x30)
}
func (a Matrix) Inverse() Matrix {
m := Matrix{}
r := 1 / a.Determinant()
m.x00 = (a.x12*a.x23*a.x31 - a.x13*a.x22*a.x31 + a.x13*a.x21*a.x32 - a.x11*a.x23*a.x32 - a.x12*a.x21*a.x33 + a.x11*a.x22*a.x33) * r
m.x01 = (a.x03*a.x22*a.x31 - a.x02*a.x23*a.x31 - a.x03*a.x21*a.x32 + a.x01*a.x23*a.x32 + a.x02*a.x21*a.x33 - a.x01*a.x22*a.x33) * r
m.x02 = (a.x02*a.x13*a.x31 - a.x03*a.x12*a.x31 + a.x03*a.x11*a.x32 - a.x01*a.x13*a.x32 - a.x02*a.x11*a.x33 + a.x01*a.x12*a.x33) * r
m.x03 = (a.x03*a.x12*a.x21 - a.x02*a.x13*a.x21 - a.x03*a.x11*a.x22 + a.x01*a.x13*a.x22 + a.x02*a.x11*a.x23 - a.x01*a.x12*a.x23) * r
m.x10 = (a.x13*a.x22*a.x30 - a.x12*a.x23*a.x30 - a.x13*a.x20*a.x32 + a.x10*a.x23*a.x32 + a.x12*a.x20*a.x33 - a.x10*a.x22*a.x33) * r
m.x11 = (a.x02*a.x23*a.x30 - a.x03*a.x22*a.x30 + a.x03*a.x20*a.x32 - a.x00*a.x23*a.x32 - a.x02*a.x20*a.x33 + a.x00*a.x22*a.x33) * r
m.x12 = (a.x03*a.x12*a.x30 - a.x02*a.x13*a.x30 - a.x03*a.x10*a.x32 + a.x00*a.x13*a.x32 + a.x02*a.x10*a.x33 - a.x00*a.x12*a.x33) * r
m.x13 = (a.x02*a.x13*a.x20 - a.x03*a.x12*a.x20 + a.x03*a.x10*a.x22 - a.x00*a.x13*a.x22 - a.x02*a.x10*a.x23 + a.x00*a.x12*a.x23) * r
m.x20 = (a.x11*a.x23*a.x30 - a.x13*a.x21*a.x30 + a.x13*a.x20*a.x31 - a.x10*a.x23*a.x31 - a.x11*a.x20*a.x33 + a.x10*a.x21*a.x33) * r
m.x21 = (a.x03*a.x21*a.x30 - a.x01*a.x23*a.x30 - a.x03*a.x20*a.x31 + a.x00*a.x23*a.x31 + a.x01*a.x20*a.x33 - a.x00*a.x21*a.x33) * r
m.x22 = (a.x01*a.x13*a.x30 - a.x03*a.x11*a.x30 + a.x03*a.x10*a.x31 - a.x00*a.x13*a.x31 - a.x01*a.x10*a.x33 + a.x00*a.x11*a.x33) * r
m.x23 = (a.x03*a.x11*a.x20 - a.x01*a.x13*a.x20 - a.x03*a.x10*a.x21 + a.x00*a.x13*a.x21 + a.x01*a.x10*a.x23 - a.x00*a.x11*a.x23) * r
m.x30 = (a.x12*a.x21*a.x30 - a.x11*a.x22*a.x30 - a.x12*a.x20*a.x31 + a.x10*a.x22*a.x31 + a.x11*a.x20*a.x32 - a.x10*a.x21*a.x32) * r
m.x31 = (a.x01*a.x22*a.x30 - a.x02*a.x21*a.x30 + a.x02*a.x20*a.x31 - a.x00*a.x22*a.x31 - a.x01*a.x20*a.x32 + a.x00*a.x21*a.x32) * r
m.x32 = (a.x02*a.x11*a.x30 - a.x01*a.x12*a.x30 - a.x02*a.x10*a.x31 + a.x00*a.x12*a.x31 + a.x01*a.x10*a.x32 - a.x00*a.x11*a.x32) * r
m.x33 = (a.x01*a.x12*a.x20 - a.x02*a.x11*a.x20 + a.x02*a.x10*a.x21 - a.x00*a.x12*a.x21 - a.x01*a.x10*a.x22 + a.x00*a.x11*a.x22) * r
return m
} | matrix.go | 0.808974 | 0.784855 | matrix.go | starcoder |
package websocket
import (
"encoding/binary"
"math/bits"
)
func maskBytesGo(key32 uint32, b []byte) uint32 {
if len(b) >= 16 {
key64 := uint64(key32) | uint64(key32)<<32
// At some point in the future we can clean these unrolled loops up.
// See https://github.com/golang/go/issues/31586#issuecomment-487436401
// Then we xor until b is less than 128 bytes.
for len(b) >= 128 {
v := binary.LittleEndian.Uint64(b)
binary.LittleEndian.PutUint64(b, v^key64)
v = binary.LittleEndian.Uint64(b[8:16])
binary.LittleEndian.PutUint64(b[8:16], v^key64)
v = binary.LittleEndian.Uint64(b[16:24])
binary.LittleEndian.PutUint64(b[16:24], v^key64)
v = binary.LittleEndian.Uint64(b[24:32])
binary.LittleEndian.PutUint64(b[24:32], v^key64)
v = binary.LittleEndian.Uint64(b[32:40])
binary.LittleEndian.PutUint64(b[32:40], v^key64)
v = binary.LittleEndian.Uint64(b[40:48])
binary.LittleEndian.PutUint64(b[40:48], v^key64)
v = binary.LittleEndian.Uint64(b[48:56])
binary.LittleEndian.PutUint64(b[48:56], v^key64)
v = binary.LittleEndian.Uint64(b[56:64])
binary.LittleEndian.PutUint64(b[56:64], v^key64)
v = binary.LittleEndian.Uint64(b[64:72])
binary.LittleEndian.PutUint64(b[64:72], v^key64)
v = binary.LittleEndian.Uint64(b[72:80])
binary.LittleEndian.PutUint64(b[72:80], v^key64)
v = binary.LittleEndian.Uint64(b[80:88])
binary.LittleEndian.PutUint64(b[80:88], v^key64)
v = binary.LittleEndian.Uint64(b[88:96])
binary.LittleEndian.PutUint64(b[88:96], v^key64)
v = binary.LittleEndian.Uint64(b[96:104])
binary.LittleEndian.PutUint64(b[96:104], v^key64)
v = binary.LittleEndian.Uint64(b[104:112])
binary.LittleEndian.PutUint64(b[104:112], v^key64)
v = binary.LittleEndian.Uint64(b[112:120])
binary.LittleEndian.PutUint64(b[112:120], v^key64)
v = binary.LittleEndian.Uint64(b[120:128])
binary.LittleEndian.PutUint64(b[120:128], v^key64)
b = b[128:]
}
// Then we xor until b is less than 64 bytes.
for len(b) >= 64 {
v := binary.LittleEndian.Uint64(b)
binary.LittleEndian.PutUint64(b, v^key64)
v = binary.LittleEndian.Uint64(b[8:16])
binary.LittleEndian.PutUint64(b[8:16], v^key64)
v = binary.LittleEndian.Uint64(b[16:24])
binary.LittleEndian.PutUint64(b[16:24], v^key64)
v = binary.LittleEndian.Uint64(b[24:32])
binary.LittleEndian.PutUint64(b[24:32], v^key64)
v = binary.LittleEndian.Uint64(b[32:40])
binary.LittleEndian.PutUint64(b[32:40], v^key64)
v = binary.LittleEndian.Uint64(b[40:48])
binary.LittleEndian.PutUint64(b[40:48], v^key64)
v = binary.LittleEndian.Uint64(b[48:56])
binary.LittleEndian.PutUint64(b[48:56], v^key64)
v = binary.LittleEndian.Uint64(b[56:64])
binary.LittleEndian.PutUint64(b[56:64], v^key64)
b = b[64:]
}
// Then we xor until b is less than 32 bytes.
for len(b) >= 32 {
v := binary.LittleEndian.Uint64(b)
binary.LittleEndian.PutUint64(b, v^key64)
v = binary.LittleEndian.Uint64(b[8:16])
binary.LittleEndian.PutUint64(b[8:16], v^key64)
v = binary.LittleEndian.Uint64(b[16:24])
binary.LittleEndian.PutUint64(b[16:24], v^key64)
v = binary.LittleEndian.Uint64(b[24:32])
binary.LittleEndian.PutUint64(b[24:32], v^key64)
b = b[32:]
}
// Then we xor until b is less than 16 bytes.
for len(b) >= 16 {
v := binary.LittleEndian.Uint64(b)
binary.LittleEndian.PutUint64(b, v^key64)
v = binary.LittleEndian.Uint64(b[8:16])
binary.LittleEndian.PutUint64(b[8:16], v^key64)
b = b[16:]
}
// Then we xor until b is less than 8 bytes.
for len(b) >= 8 {
v := binary.LittleEndian.Uint64(b)
binary.LittleEndian.PutUint64(b, v^key64)
b = b[8:]
}
}
for i := range b {
b[i] ^= byte(key32)
key32 = bits.RotateLeft32(key32, -8)
}
return key32
} | mask.go | 0.571527 | 0.472744 | mask.go | starcoder |
package internal
import "github.com/mokiat/gomath/sprec"
const (
initialShapePointCount = 1024
initialShapeSubShapeCount = 4
)
func newShape() *Shape {
return &Shape{
points: make([]ShapePoint, 0, initialShapePointCount),
subShapes: make([]SubShape, 0, initialShapeSubShapeCount),
}
}
type Shape struct {
fill Fill
points []ShapePoint
subShapes []SubShape
}
func (s *Shape) Init(fill Fill) {
s.fill = fill
s.points = s.points[:0]
s.subShapes = s.subShapes[:0]
}
func (s *Shape) MoveTo(position sprec.Vec2) {
s.startSubShape()
s.addPoint(ShapePoint{
coords: position,
})
}
func (s *Shape) LineTo(position sprec.Vec2) {
s.addPoint(ShapePoint{
coords: position,
})
}
func (s *Shape) QuadTo(control, position sprec.Vec2) {
// TODO: Evaluate tessellation based on curvature and size
const tessellation = 5
lastPoint := s.lastPoint()
vecCS := sprec.Vec2Diff(lastPoint.coords, control)
vecCE := sprec.Vec2Diff(position, control)
// start and end are excluded from this loop on purpose
for i := 1; i < tessellation; i++ {
t := float32(i) / float32(tessellation)
alpha := (1 - t) * (1 - t)
beta := t * t
s.addPoint(ShapePoint{
coords: sprec.Vec2Sum(
control,
sprec.Vec2Sum(
sprec.Vec2Prod(vecCS, alpha),
sprec.Vec2Prod(vecCE, beta),
),
),
})
}
s.addPoint(ShapePoint{
coords: position,
})
}
func (s *Shape) CubeTo(control1, control2, position sprec.Vec2) {
// TODO: Evaluate tessellation based on curvature and size
const tessellation = 5
lastPoint := s.lastPoint()
// start and end are excluded from this loop on purpose
for i := 1; i < tessellation; i++ {
t := float32(i) / float32(tessellation)
alpha := (1 - t) * (1 - t) * (1 - t)
beta := 3 * (1 - t) * (1 - t) * t
gamma := 3 * (1 - t) * t * t
delta := t * t * t
s.addPoint(ShapePoint{
coords: sprec.Vec2Sum(
sprec.Vec2Sum(
sprec.Vec2Prod(lastPoint.coords, alpha),
sprec.Vec2Prod(control1, beta),
),
sprec.Vec2Sum(
sprec.Vec2Prod(control2, gamma),
sprec.Vec2Prod(position, delta),
),
),
})
}
s.addPoint(ShapePoint{
coords: position,
})
}
func (s *Shape) startSubShape() {
s.subShapes = append(s.subShapes, SubShape{
pointOffset: len(s.points),
pointCount: 0,
})
}
func (s *Shape) addPoint(point ShapePoint) {
s.points = append(s.points, point)
s.subShapes[len(s.subShapes)-1].pointCount++
}
func (s *Shape) lastPoint() ShapePoint {
return s.points[len(s.points)-1]
}
type ShapePoint struct {
coords sprec.Vec2
}
type SubShape struct {
pointOffset int
pointCount int
}
type Fill struct {
mode StencilMode
color sprec.Vec4
image *Image
}
type StencilMode int
const (
StencilModeNone StencilMode = iota
StencilModeNonZero
StencilModeOdd
) | framework/opengl/ui/internal/shape.go | 0.531209 | 0.581422 | shape.go | starcoder |
package transformer
import (
"gopkg.in/Clever/optimus.v3"
"gopkg.in/Clever/optimus.v3/transforms"
)
// A Transformer allows you to easily chain multiple transforms on a table.
type Transformer struct {
table optimus.Table
}
// Table returns the terminating Table in a Transformer chain.
func (t Transformer) Table() optimus.Table {
return t.table
}
// Apply applies a given TransformFunc to the Transformer.
func (t *Transformer) Apply(transform optimus.TransformFunc) *Transformer {
// TODO: Should this return a new transformer instead of modifying the existing one?
t.table = optimus.Transform(t.table, transform)
return t
}
// Fieldmap Applies a Fieldmap transform.
func (t *Transformer) Fieldmap(mappings map[string][]string) *Transformer {
return t.Apply(transforms.Fieldmap(mappings))
}
// Map Applies a Map transform.
func (t *Transformer) Map(transform func(optimus.Row) (optimus.Row, error)) *Transformer {
return t.Apply(transforms.Map(transform))
}
// Each Applies an Each transform.
func (t *Transformer) Each(transform func(optimus.Row) error) *Transformer {
return t.Apply(transforms.Each(transform))
}
// TableTransform Applies a TableTransform transform.
func (t *Transformer) TableTransform(transform func(optimus.Row, chan<- optimus.Row) error) *Transformer {
return t.Apply(transforms.TableTransform(transform))
}
// Select Applies a Select transform.
func (t *Transformer) Select(filter func(optimus.Row) (bool, error)) *Transformer {
return t.Apply(transforms.Select(filter))
}
// Valuemap Applies a Valuemap transform.
func (t *Transformer) Valuemap(mappings map[string]map[interface{}]interface{}) *Transformer {
return t.Apply(transforms.Valuemap(mappings))
}
// Reduce Applies a Reduce transform.
func (t *Transformer) Reduce(fn func(optimus.Row, optimus.Row) error) *Transformer {
return t.Apply(transforms.Reduce(fn))
}
// Concurrently Applies a Concurrent transform.
func (t *Transformer) Concurrently(fn optimus.TransformFunc, concurrency int) *Transformer {
return t.Apply(transforms.Concurrently(fn, concurrency))
}
// Concat Applies a Concat transform.
func (t *Transformer) Concat(tables ...optimus.Table) *Transformer {
return t.Apply(transforms.Concat(tables...))
}
// Pair Applies a Pair transform.
func (t *Transformer) Pair(rightTable optimus.Table, leftID, rightID transforms.RowIdentifier,
filterFn func(optimus.Row) (bool, error)) *Transformer {
return t.Apply(transforms.Pair(rightTable, leftID, rightID, filterFn))
}
// Sort Applies a Sort transform.
func (t *Transformer) Sort(less func(i, j optimus.Row) (bool, error)) *Transformer {
return t.Apply(transforms.Sort(less))
}
// StableSort Applies a StableSort transform.
func (t *Transformer) StableSort(less func(i, j optimus.Row) (bool, error)) *Transformer {
return t.Apply(transforms.StableSort(less))
}
// GroupBy Applies a GroupBy transform.
func (t *Transformer) GroupBy(identifier transforms.RowIdentifier) *Transformer {
return t.Apply(transforms.GroupBy(identifier))
}
// Sink consumes all the Rows.
func (t *Transformer) Sink(sink optimus.Sink) error {
return sink(t.table)
}
// New returns a Transformer that allows you to chain transformations on a Table.
func New(table optimus.Table) *Transformer {
return &Transformer{table}
} | plugins/data/transform/optimus/transformer/transformer.go | 0.82925 | 0.504578 | transformer.go | starcoder |
package accumulator
import (
"bytes"
"fmt"
"sort"
"time"
"github.com/FactomProject/factomd/util/atomic"
"github.com/PaulSnow/ValidatorAccumulator/ValAcc/database"
"github.com/PaulSnow/ValidatorAccumulator/ValAcc/merkleDag"
"github.com/PaulSnow/ValidatorAccumulator/ValAcc/node"
"github.com/PaulSnow/ValidatorAccumulator/ValAcc/types"
)
// Accumulator
// The accumulator takes a feed of EntryHash objects to construct the cryptographic structure proving the order
// and content of the entries submitted to the Validators. Validators validate the data, and store the data into
// key/value stores, and send streams of hashes to the Accumulators. Validators are assumed to be knowledgeable
// of the actual use case of the system, and able to validate the data prior to submission to the accumulator.
// Of course, the Accumulator does secure and order the data, so it is reasonable that a validator may optimistically
// record entries that might be invalidated by applications after recording.
type Accumulator struct {
DB *database.DB // Database to hold and index the data collected by the Accumulator
chainID *types.Hash // Digital ID of the Accumulator.
height types.BlockHeight // Height of the current block
chains map[types.Hash]*ChainAcc // Chains with new entries in this block
entryFeed chan node.EntryHash // Stream of entries to be placed into chains
control chan bool // We are sent a "true" when it is time to end the block
mdFeed chan *types.Hash // Give back the MD Hashes as they are produced
previous *node.Node // Previous Directory Block
EntryCnt atomic.AtomicInt64 // Count of entries written
ChainsInBlock atomic.AtomicInt64 // Count of chains written to
ChainCnt atomic.AtomicInt64 // Count of all chains
}
// Allocate the HashMap and Channels for this accumulator
// The ChainID is the Digital Identity of the Accumulator. We will want to integrate
// useful digital IDs into the accumulator structure to ensure the integrity of the data
// collected.
func (a *Accumulator) Init(db *database.DB, chainID *types.Hash) (
EntryFeed chan node.EntryHash, // Return the EntryFeed channel to send ANode Hashes to the accumulator
control chan bool, // The control channel signals End of Block to the accumulator
mdFeed chan *types.Hash) { // the Merkle DAG Feed (mdFeed) returns block merkle DAG roots
a.DB = db
a.chainID = chainID
headHash := db.Get(types.NodeHead, chainID[:])
if headHash != nil {
head := db.Get(types.Node, headHash)
if head == nil {
panic("no head found for the directory blocks in the database")
}
var headNode node.Node
_, err := headNode.Unmarshal(head)
if err != nil {
panic(fmt.Sprintf("error unmarshaling the head of the directory block.\n%v", err))
}
a.previous = &headNode
a.height = headNode.BHeight + 1
}
a.chains = make(map[types.Hash]*ChainAcc, 1000)
a.entryFeed = make(chan node.EntryHash, 10000)
a.control = make(chan bool, 1)
a.mdFeed = make(chan *types.Hash, 1)
fmt.Sprintf("Starting the Accumulator at height %d\n", a.height)
return a.entryFeed, a.control, a.mdFeed
}
func (a *Accumulator) GetEntryFeed() chan node.EntryHash {
return a.entryFeed
}
func (a *Accumulator) Run() {
var totalEntries int64 // We count the entries and chains as we go, but update the atomic counts
var ChainsInBlock int64 // at the end of each block
var goWrites atomic.AtomicInt
for {
// While we are processing a block
block:
for {
// Block processing involves pulling Entries out of the entryFeed and adding
// it to the Merkle DAG (MD)
select {
case ctl := <-a.control: // Have we been asked to end the block?
if ctl {
println("Processing EOB ", a.height)
a.height++
break block // Break block processing
}
default:
select {
case entry := <-a.entryFeed: // Get the next ANode
chain := a.chains[entry.ChainID] // See if we have a chain for it
totalEntries++
if chain == nil { // If we don't have a chain for it, then we add one to our tmp state
ChainsInBlock++
chain = NewChainAcc(*a.DB, entry, a.height) // Create our collector for this chain
a.chains[entry.ChainID] = chain // Add it to our tmp state
chain.MD.AddToChain(entry.EntryHash) // Add this entry to our chain state
} else {
// This is where we make sure every Entry added to a chain is a non-duplicate to all
// entries. This assumes that the chains for an accumulator are unique to that accumulator,
// which is true by design. So if the entry isn't in the chain right now, and not in the db,
// then it is unique.
if chain.entries[entry.EntryHash] == 0 { // Added this entry to this chain already?
if a.DB.Get(types.EntryNode, entry.EntryHash.Bytes()) == nil { // Have the entry in the DB already?
chain.entries[entry.EntryHash] = 1 // No? Then mark it in the chain
chain.MD.AddToChain(entry.EntryHash) // Add it to the chain
}
}
}
default:
time.Sleep(100 * time.Millisecond) // If there is nothing to do, pause a bit
}
}
}
if goWrites.Load() > 0 {
fmt.Println("Waiting on", goWrites.Load(), "database updates.")
for goWrites.Load() > 0 {
time.Sleep(1 * time.Second)
}
}
var chainEntries []node.NEList
for _, v := range a.chains {
v.Node.ListMDRoot = *v.MD.GetMDRoot()
v.Node.EntryList = v.MD.HashList
v.Node.IsNode = false
tNode := v.Node
go func() {
goWrites.Add(1)
tNode.Put(a.DB)
goWrites.Add(-1)
}()
ne := new(node.NEList)
ne.ChainID = v.Node.ChainID
ne.MDRoot = v.Node.ListMDRoot
chainEntries = append(chainEntries, *ne)
}
sort.Slice(chainEntries, func(i, j int) bool {
return bytes.Compare(chainEntries[i].ChainID[:], chainEntries[j].ChainID[:]) < 0
})
// Print some statistics
var sum int
for _, v := range a.chains {
sum += len(v.MD.HashList)
}
a.EntryCnt.Store(totalEntries)
a.ChainsInBlock.Store(ChainsInBlock)
a.ChainCnt.Add(ChainsInBlock)
ChainsInBlock = 0
// Calculate the ListMDRoot for all the accumulated MDRoots for all the chains
MDAcc := new(merkleDag.MD)
for _, v := range chainEntries {
MDAcc.AddToChain(v.MDRoot)
}
// Populate the directory block with the data collected over the last block period.
directoryBlock := new(node.Node)
directoryBlock.Version = types.Version
directoryBlock.ChainID = *a.chainID
directoryBlock.BHeight = a.height
if directoryBlock.SequenceNum > 0 {
directoryBlock.Previous = *a.previous.GetHash()
}
directoryBlock.SequenceNum = types.Sequence(a.height)
directoryBlock.TimeStamp = types.TimeStamp(time.Now().UnixNano())
directoryBlock.IsNode = true
lMDR := MDAcc.GetMDRoot()
if lMDR != nil {
directoryBlock.ListMDRoot = *lMDR
}
// Write the directory
directoryBlock.Put(a.DB)
a.mdFeed <- directoryBlock.GetMDRoot()
// Clear out all the chain heads, to start another round of accumulation in the next block
a.chains = make(map[types.Hash]*ChainAcc, 1000)
}
} | ValAcc/accumulator/accumulator.go | 0.601945 | 0.546194 | accumulator.go | starcoder |
package main
import (
"fmt"
"strconv"
"strings"
)
/*
Problem:
Implement atoi which converts a string to an integer.
The function first discards as many whitespace characters as necessary until the first non-whitespace character is found. Then, starting from this character, takes an optional initial plus or minus sign followed by as many numerical digits as possible, and interprets them as a numerical value.
The string can contain additional characters after those that form the integral number, which are ignored and have no effect on the behavior of this function.
If the first sequence of non-whitespace characters in str is not a valid integral number, or if no such sequence exists because either str is empty or it contains only whitespace characters, no conversion is performed.
If no valid conversion could be performed, a zero value is returned.
Note:
Only the space character ' ' is considered as whitespace character.
Assume we are dealing with an environment which could only store integers within the 32-bit signed integer range: [−231, 231 − 1]. If the numerical value is out of the range of representable values, INT_MAX (231 − 1) or INT_MIN (−231) is returned.
Example 1:
Input: "42"
Output: 42
Example 2:
Input: " -42"
Output: -42
Explanation: The first non-whitespace character is '-', which is the minus sign.
Then take as many numerical digits as possible, which gets 42.
Example 3:
Input: "4193 with words"
Output: 4193
Explanation: Conversion stops at digit '3' as the next character is not a numerical digit.
Example 4:
Input: "words and 987"
Output: 0
Explanation: The first non-whitespace character is 'w', which is not a numerical
digit or a +/- sign. Therefore no valid conversion could be performed.
Example 5:
Input: "-91283472332"
Output: -2147483648
Explanation: The number "-91283472332" is out of the range of a 32-bit signed integer.
Thefore INT_MIN (−231) is returned.
翻译:
题目太长,直接搬leetcode中文站的翻译了
实现atoi方法,用于转化一个字符串为整数
首先,该函数会根据需要丢弃无用的开头空格字符,直到寻找到第一个非空格的字符为止。
当我们寻找到的第一个非空字符为正或者负号时,则将该符号与之后面尽可能多的连续数字组合起来,作为该整数的正负号;假如第一个非空字符是数字,则直接将其与之后连续的数字字符组合起来,形成整数。
该字符串除了有效的整数部分之后也可能会存在多余的字符,这些字符可以被忽略,它们对于函数不应该造成影响。
注意:假如该字符串中的第一个非空格字符不是一个有效整数字符、字符串为空或字符串仅包含空白字符时,则你的函数不需要进行转换。
在任何情况下,若函数不能进行有效的转换时,请返回 0。
说明:
假设我们的环境只能存储 32 位大小的有符号整数,那么其数值范围为 [−231, 231 − 1]。如果数值超过这个范围,qing返回 INT_MAX (231 − 1) 或 INT_MIN (−231) 。
*/
//题目比较没意思,go语言int在我这台上是64位,判断溢出没有必要,官方标准库已经很好用了,所以这题调用标准库
// 感觉go的标准库相对python还是难用多了,所以还是自己判断吧
// 主要问题在于判断各种情况比较麻烦
func myAtoi(str string) int {
// 左边如果有空格妨碍判断,先去掉
str = strings.TrimLeft(str, " ")
// 如果是空串,那么直接返回0
if str == "" {
return 0
}
// 定义一个byte的切片,用于存放
var numByte []byte
// 去除空格后,如果首位是正负号,要加入byte切片中
if str[0] == '-' || str[0] == '+' {
numByte = append(numByte, str[0])
// 后续碰到不是数字的就退出
for i := 1; i < len(str); i++ {
if str[i] >= '0' && str[i] <= '9' {
numByte = append(numByte, str[i])
} else {
break
}
}
} else {
// 如果首位不是正负号,那么还是碰到不是数字的就退出
for i := 0; i < len(str); i++ {
if str[i] >= '0' && str[i] <= '9' {
numByte = append(numByte, str[i])
} else {
break
}
}
}
resNum, _ := strconv.Atoi(string(numByte))
if resNum < -2147483648 {
return -2147483648
} else if resNum > 2147483647 {
return 2147483647
}
return resNum
}
func main() {
input := " -42"
input2 := "4193 with words"
input3 := "+1"
fmt.Println(myAtoi(input))
fmt.Println(myAtoi(input2))
fmt.Println(myAtoi(input3))
} | Programs/008String to Integer (atoi)/008String to Integer (atoi).go | 0.544317 | 0.697596 | 008String to Integer (atoi).go | starcoder |
package gql
import (
"github.com/graphql-go/graphql"
"time"
"math/rand"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
func ResultTypeConfig() graphql.ObjectConfig {
return graphql.ObjectConfig{
Name: "Result",
Fields: graphql.Fields{
"result": &graphql.Field{
Type: graphql.String,
},
"message": &graphql.Field{
Type: graphql.String,
},
},
}
}
func TenantTypeConfig() graphql.ObjectConfig {
return graphql.ObjectConfig{
Name: "Tenant",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.String,
},
"name": &graphql.Field{
Type: graphql.String,
},
"zaid": &graphql.Field{
Type: graphql.String,
},
"mobile": &graphql.Field{
Type: graphql.String,
},
"telephone": &graphql.Field{
Type: graphql.String,
},
"site": &graphql.Field{
Type: graphql.String,
},
"room": &graphql.Field{
Type: graphql.String,
},
"gender": &graphql.Field{
Type: graphql.String,
},
"status": &graphql.Field{
Type: graphql.String,
},
"invoices": &graphql.Field{
Type: graphql.NewList(graphql.NewObject(InvoiceTypeConfig())),
},
"payments": &graphql.Field{
Type: graphql.NewList(graphql.NewObject(PaymentTypeConfig())),
},
"outstanding": &graphql.Field{
Type: graphql.Float,
},
"overdue": &graphql.Field{
Type: graphql.Float,
},
},
}
}
func InvoiceTypeConfig() graphql.ObjectConfig {
return graphql.ObjectConfig{
Name: "Invoice",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.String,
},
"tenantid": &graphql.Field{
Type: graphql.String,
},
"tenantname": &graphql.Field{
Type: graphql.String,
},
"number": &graphql.Field{
Type: graphql.String,
},
"reference": &graphql.Field{
Type: graphql.String,
},
"total": &graphql.Field{
Type: graphql.Float,
},
"balance": &graphql.Field{
Type: graphql.Float,
},
"date": &graphql.Field{
Type: graphql.String,
},
"duedate": &graphql.Field{
Type: graphql.String,
},
"periodindex": &graphql.Field{
Type: graphql.Int,
},
"periodname": &graphql.Field{
Type: graphql.String,
},
"status": &graphql.Field{
Type: graphql.String,
},
"lineitems": &graphql.Field{
Type: graphql.NewList(graphql.NewObject(PaymentTypeConfig())),
},
},
}
}
func PaymentTypeConfig() graphql.ObjectConfig {
return graphql.ObjectConfig{
Name: "Payment",
Fields: graphql.Fields{
"id": &graphql.Field{
Type: graphql.String,
},
"tenantid": &graphql.Field{
Type: graphql.String,
},
"invoiceid": &graphql.Field{
Type: graphql.String,
},
"number": &graphql.Field{
Type: graphql.String,
},
"description": &graphql.Field{
Type: graphql.String,
},
"amount": &graphql.Field{
Type: graphql.Float,
},
"date": &graphql.Field{
Type: graphql.String,
},
"mode": &graphql.Field{
Type: graphql.String,
},
"status": &graphql.Field{
Type: graphql.String,
},
},
}
}
func TenantFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.String,
},
"name": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"zaid": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"moveindate": &graphql.ArgumentConfig{
Type: graphql.String,
},
"telephone": &graphql.ArgumentConfig{
Type: graphql.String,
},
"mobile": &graphql.ArgumentConfig{
Type: graphql.String,
},
"site": &graphql.ArgumentConfig{
Type: graphql.String,
},
"room": &graphql.ArgumentConfig{
Type: graphql.String,
},
"gender": &graphql.ArgumentConfig{
Type: graphql.String,
},
"status": &graphql.ArgumentConfig{
Type: graphql.String,
},
"moveoutdate": &graphql.ArgumentConfig{
Type: graphql.String,
},
"imageurl": &graphql.ArgumentConfig{
Type: graphql.String,
},
}
}
func InvoiceFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.String,
},
"tenantid": &graphql.ArgumentConfig{
Type: graphql.String,
},
"tenantname": &graphql.ArgumentConfig{
Type: graphql.String,
},
"number": &graphql.ArgumentConfig{
Type: graphql.String,
},
"reference": &graphql.ArgumentConfig{
Type: graphql.String,
},
"total": &graphql.ArgumentConfig{
Type: graphql.Float,
},
"balance": &graphql.ArgumentConfig{
Type: graphql.Float,
},
"lineitems": &graphql.ArgumentConfig{
Type: graphql.String,
},
"date": &graphql.ArgumentConfig{
Type: graphql.String,
},
"duedate": &graphql.ArgumentConfig{
Type: graphql.String,
},
"periodindex": &graphql.ArgumentConfig{
Type: graphql.Int,
},
"periodname": &graphql.ArgumentConfig{
Type: graphql.String,
},
"status": &graphql.ArgumentConfig{
Type: graphql.String,
},
}
}
func PaymentFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.String,
},
"tenantid": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"invoiceid": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"number": &graphql.ArgumentConfig{
Type: graphql.String,
},
"description": &graphql.ArgumentConfig{
Type: graphql.String,
},
"amount": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.Float),
},
"date": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"mode": &graphql.ArgumentConfig{
Type: graphql.String,
},
"status": &graphql.ArgumentConfig{
Type: graphql.String,
},
}
}
func ItemFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"id": &graphql.ArgumentConfig{
Type: graphql.String,
},
"name": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"description": &graphql.ArgumentConfig{
Type: graphql.String,
},
"rate": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.Float),
},
"status": &graphql.ArgumentConfig{
Type: graphql.String,
},
}
}
func LineItemFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"name": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"description": &graphql.ArgumentConfig{
Type: graphql.String,
},
"rate": &graphql.ArgumentConfig{
Type: graphql.Float,
},
"quantity": &graphql.ArgumentConfig{
Type: graphql.Float,
},
"total": &graphql.ArgumentConfig{
Type: graphql.Float,
},
"discount": &graphql.ArgumentConfig{
Type: graphql.String,
},
}
}
func PaymentExtensionFieldArguments() graphql.FieldConfigArgument {
return graphql.FieldConfigArgument{
"invoiceid": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"paybydate": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"requestdate": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"requestby": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
"requestmode": &graphql.ArgumentConfig{
Type: graphql.NewNonNull(graphql.String),
},
}
} | gql/gql.go | 0.536313 | 0.418637 | gql.go | starcoder |
package colorgrad
// Diverging
func BrBG() Gradient {
colors := []string{"#543005", "#8c510a", "#bf812d", "#dfc27d", "#f6e8c3", "#f5f5f5", "#c7eae5", "#80cdc1", "#35978f", "#01665e", "#003c30"}
return presetSpline(colors)
}
func PRGn() Gradient {
colors := []string{"#40004b", "#762a83", "#9970ab", "#c2a5cf", "#e7d4e8", "#f7f7f7", "#d9f0d3", "#a6dba0", "#5aae61", "#1b7837", "#00441b"}
return presetSpline(colors)
}
func PiYG() Gradient {
colors := []string{"#8e0152", "#c51b7d", "#de77ae", "#f1b6da", "#fde0ef", "#f7f7f7", "#e6f5d0", "#b8e186", "#7fbc41", "#4d9221", "#276419"}
return presetSpline(colors)
}
func PuOr() Gradient {
colors := []string{"#2d004b", "#542788", "#8073ac", "#b2abd2", "#d8daeb", "#f7f7f7", "#fee0b6", "#fdb863", "#e08214", "#b35806", "#7f3b08"}
return presetSpline(colors)
}
func RdBu() Gradient {
colors := []string{"#67001f", "#b2182b", "#d6604d", "#f4a582", "#fddbc7", "#f7f7f7", "#d1e5f0", "#92c5de", "#4393c3", "#2166ac", "#053061"}
return presetSpline(colors)
}
func RdGy() Gradient {
colors := []string{"#67001f", "#b2182b", "#d6604d", "#f4a582", "#fddbc7", "#ffffff", "#e0e0e0", "#bababa", "#878787", "#4d4d4d", "#1a1a1a"}
return presetSpline(colors)
}
func RdYlBu() Gradient {
colors := []string{"#a50026", "#d73027", "#f46d43", "#fdae61", "#fee090", "#ffffbf", "#e0f3f8", "#abd9e9", "#74add1", "#4575b4", "#313695"}
return presetSpline(colors)
}
func RdYlGn() Gradient {
colors := []string{"#a50026", "#d73027", "#f46d43", "#fdae61", "#fee08b", "#ffffbf", "#d9ef8b", "#a6d96a", "#66bd63", "#1a9850", "#006837"}
return presetSpline(colors)
}
func Spectral() Gradient {
colors := []string{"#9e0142", "#d53e4f", "#f46d43", "#fdae61", "#fee08b", "#ffffbf", "#e6f598", "#abdda4", "#66c2a5", "#3288bd", "#5e4fa2"}
return presetSpline(colors)
}
// Sequential (Single Hue)
func Blues() Gradient {
colors := []string{"#f7fbff", "#deebf7", "#c6dbef", "#9ecae1", "#6baed6", "#4292c6", "#2171b5", "#08519c", "#08306b"}
return presetSpline(colors)
}
func Greens() Gradient {
colors := []string{"#f7fcf5", "#e5f5e0", "#c7e9c0", "#a1d99b", "#74c476", "#41ab5d", "#238b45", "#006d2c", "#00441b"}
return presetSpline(colors)
}
func Greys() Gradient {
colors := []string{"#ffffff", "#f0f0f0", "#d9d9d9", "#bdbdbd", "#969696", "#737373", "#525252", "#252525", "#000000"}
return presetSpline(colors)
}
func Oranges() Gradient {
colors := []string{"#fff5eb", "#fee6ce", "#fdd0a2", "#fdae6b", "#fd8d3c", "#f16913", "#d94801", "#a63603", "#7f2704"}
return presetSpline(colors)
}
func Purples() Gradient {
colors := []string{"#fcfbfd", "#efedf5", "#dadaeb", "#bcbddc", "#9e9ac8", "#807dba", "#6a51a3", "#54278f", "#3f007d"}
return presetSpline(colors)
}
func Reds() Gradient {
colors := []string{"#fff5f0", "#fee0d2", "#fcbba1", "#fc9272", "#fb6a4a", "#ef3b2c", "#cb181d", "#a50f15", "#67000d"}
return presetSpline(colors)
}
// Sequential (Multi-Hue)
func Viridis() Gradient {
colors := []string{"#440154", "#482777", "#3f4a8a", "#31678e", "#26838f", "#1f9d8a", "#6cce5a", "#b6de2b", "#fee825"}
return presetSpline(colors)
}
func Inferno() Gradient {
colors := []string{"#000004", "#170b3a", "#420a68", "#6b176e", "#932667", "#bb3654", "#dd513a", "#f3771a", "#fca50a", "#f6d644", "#fcffa4"}
return presetSpline(colors)
}
func Magma() Gradient {
colors := []string{"#000004", "#140e37", "#3b0f70", "#641a80", "#8c2981", "#b63679", "#de4968", "#f66f5c", "#fe9f6d", "#fece91", "#fcfdbf"}
return presetSpline(colors)
}
func Plasma() Gradient {
colors := []string{"#0d0887", "#42039d", "#6a00a8", "#900da3", "#b12a90", "#cb4678", "#e16462", "#f1834b", "#fca636", "#fccd25", "#f0f921"}
return presetSpline(colors)
}
func BuGn() Gradient {
colors := []string{"#f7fcfd", "#e5f5f9", "#ccece6", "#99d8c9", "#66c2a4", "#41ae76", "#238b45", "#006d2c", "#00441b"}
return presetSpline(colors)
}
func BuPu() Gradient {
colors := []string{"#f7fcfd", "#e0ecf4", "#bfd3e6", "#9ebcda", "#8c96c6", "#8c6bb1", "#88419d", "#810f7c", "#4d004b"}
return presetSpline(colors)
}
func GnBu() Gradient {
colors := []string{"#f7fcf0", "#e0f3db", "#ccebc5", "#a8ddb5", "#7bccc4", "#4eb3d3", "#2b8cbe", "#0868ac", "#084081"}
return presetSpline(colors)
}
func OrRd() Gradient {
colors := []string{"#fff7ec", "#fee8c8", "#fdd49e", "#fdbb84", "#fc8d59", "#ef6548", "#d7301f", "#b30000", "#7f0000"}
return presetSpline(colors)
}
func PuBuGn() Gradient {
colors := []string{"#fff7fb", "#ece2f0", "#d0d1e6", "#a6bddb", "#67a9cf", "#3690c0", "#02818a", "#016c59", "#014636"}
return presetSpline(colors)
}
func PuBu() Gradient {
colors := []string{"#fff7fb", "#ece7f2", "#d0d1e6", "#a6bddb", "#74a9cf", "#3690c0", "#0570b0", "#045a8d", "#023858"}
return presetSpline(colors)
}
func PuRd() Gradient {
colors := []string{"#f7f4f9", "#e7e1ef", "#d4b9da", "#c994c7", "#df65b0", "#e7298a", "#ce1256", "#980043", "#67001f"}
return presetSpline(colors)
}
func RdPu() Gradient {
colors := []string{"#fff7f3", "#fde0dd", "#fcc5c0", "#fa9fb5", "#f768a1", "#dd3497", "#ae017e", "#7a0177", "#49006a"}
return presetSpline(colors)
}
func YlGnBu() Gradient {
colors := []string{"#ffffd9", "#edf8b1", "#c7e9b4", "#7fcdbb", "#41b6c4", "#1d91c0", "#225ea8", "#253494", "#081d58"}
return presetSpline(colors)
}
func YlGn() Gradient {
colors := []string{"#ffffe5", "#f7fcb9", "#d9f0a3", "#addd8e", "#78c679", "#41ab5d", "#238443", "#006837", "#004529"}
return presetSpline(colors)
}
func YlOrBr() Gradient {
colors := []string{"#ffffe5", "#fff7bc", "#fee391", "#fec44f", "#fe9929", "#ec7014", "#cc4c02", "#993404", "#662506"}
return presetSpline(colors)
}
func YlOrRd() Gradient {
colors := []string{"#ffffcc", "#ffeda0", "#fed976", "#feb24c", "#fd8d3c", "#fc4e2a", "#e31a1c", "#bd0026", "#800026"}
return presetSpline(colors)
} | vendor/github.com/mazznoer/colorgrad/preset.go | 0.61115 | 0.582075 | preset.go | starcoder |
package bunbig
import (
"database/sql"
"database/sql/driver"
"fmt"
"math/big"
"gopkg.in/yaml.v3"
)
type Int big.Int
func NewInt() *Int {
return new(Int)
}
func newBigint(x *big.Int) *Int {
return (*Int)(x)
}
// same as NewBigint()
func FromMathBig(x *big.Int) *Int {
return (*Int)(x)
}
func FromInt64(x int64) *Int {
return FromMathBig(big.NewInt(x))
}
func (i *Int) FromString(x string) (*Int, error) {
if x == "" {
return FromInt64(0), nil
}
a := big.NewInt(0)
b, ok := a.SetString(x, 10)
if !ok {
return nil, fmt.Errorf("cannot create Int from string")
}
return newBigint(b), nil
}
func (b *Int) Value() (driver.Value, error) {
return (*big.Int)(b).String(), nil
}
func (b *Int) Scan(value interface{}) error {
var i sql.NullString
if err := i.Scan(value); err != nil {
return err
}
if _, ok := (*big.Int)(b).SetString(i.String, 10); ok {
return nil
}
return fmt.Errorf("Error converting type %T into Bigint", value)
}
func (b *Int) ToMathBig() *big.Int {
return (*big.Int)(b)
}
func (b *Int) Sub(x *Int) *Int {
return (*Int)(big.NewInt(0).Sub(b.ToMathBig(), x.ToMathBig()))
}
func (b *Int) Add(x *Int) *Int {
return (*Int)(big.NewInt(0).Add(b.ToMathBig(), x.ToMathBig()))
}
func (b *Int) Mul(x *Int) *Int {
return (*Int)(big.NewInt(0).Mul(b.ToMathBig(), x.ToMathBig()))
}
func (b *Int) Div(x *Int) *Int {
return (*Int)(big.NewInt(0).Div(b.ToMathBig(), x.ToMathBig()))
}
func (b *Int) Neg() *Int {
return (*Int)(big.NewInt(0).Neg(b.ToMathBig()))
}
func (b *Int) ToUInt64() uint64 {
return b.ToMathBig().Uint64()
}
func (b *Int) ToInt64() int64 {
return b.ToMathBig().Int64()
}
func (b *Int) String() string {
return b.ToMathBig().String()
}
func (b *Int) Abs() *Int {
return (*Int)(new(big.Int).Abs(b.ToMathBig()))
}
var _ yaml.Unmarshaler = (*Int)(nil)
// @todo , this part needs to be fixed
func (b *Int) UnmarshalYAML(value *yaml.Node) error {
var str string
if err := value.Decode(&str); err != nil {
return err
}
// ineffassign
// ignored to be fixed later
// b, err := NewInt().FromString(str)
return nil
}
func (b *Int) Cmp(target *Int) Cmp {
return &cmpInt{r: b.ToMathBig().Cmp(target.ToMathBig())}
}
func (c *cmpInt) Eq() bool {
return c.r == 0
}
func (c *cmpInt) Lt() bool {
return c.r < 0
}
func (c *cmpInt) Gt() bool {
return c.r > 0
}
func (c *cmpInt) Leq() bool {
return c.r == 0 || c.r < 0
}
func (c *cmpInt) Geq() bool {
return c.r == 0 || c.r > 0
} | extra/bunbig/int.go | 0.609059 | 0.400017 | int.go | starcoder |
package main
/*
* Quadtree will give us a primitive for spatially indexing pointers of game objects; our primary use case for
* this data structure will be in the world map where we will lean on it for lookups instead of using the usual
* room character/object/entity lists for a given room.
*/
type QuadTree struct {
Northwest *QuadTree `json:"nw"`
Northeast *QuadTree `json:"ne"`
Southwest *QuadTree `json:"sw"`
Southeast *QuadTree `json:"se"`
Boundary *Rect `json:"boundary"`
Nodes *LinkedList `json:"data"`
Capacity int `json:"capacity"`
Parent *QuadTree `json:"parent"`
}
const QuadTreeNodeMaxElements = 4
type Rect struct {
X float64 `json:"x"`
Y float64 `json:"y"`
W float64 `json:"w"`
H float64 `json:"h"`
}
type Point struct {
X float64 `json:"x"`
Y float64 `json:"y"`
Value interface{} `json:"value"`
}
func NewPoint(x float64, y float64, value interface{}) *Point {
return &Point{X: x, Y: y, Value: value}
}
// Subdivide redistributes the nodes among four child trees for each subdivided rect
func (qt *QuadTree) Subdivide() bool {
if qt.Northwest != nil {
return false
}
qt.Northwest = NewQuadTree(qt.Boundary.W/2, qt.Boundary.H/2)
qt.Northeast = NewQuadTree(qt.Boundary.W/2, qt.Boundary.H/2)
qt.Southwest = NewQuadTree(qt.Boundary.W/2, qt.Boundary.H/2)
qt.Southeast = NewQuadTree(qt.Boundary.W/2, qt.Boundary.H/2)
qt.Northwest.Boundary = NewRect(qt.Boundary.X, qt.Boundary.Y, qt.Boundary.W/2, qt.Boundary.H/2)
qt.Northeast.Boundary = NewRect(qt.Boundary.X+(qt.Boundary.W/2), qt.Boundary.Y, qt.Boundary.W/2, qt.Boundary.H/2)
qt.Southwest.Boundary = NewRect(qt.Boundary.X, qt.Boundary.Y+(qt.Boundary.H/2), qt.Boundary.W/2, qt.Boundary.H/2)
qt.Southeast.Boundary = NewRect(qt.Boundary.X+(qt.Boundary.W/2), qt.Boundary.Y+(qt.Boundary.H/2), qt.Boundary.W/2, qt.Boundary.H/2)
qt.Northwest.Parent = qt
qt.Northeast.Parent = qt
qt.Southwest.Parent = qt
qt.Southeast.Parent = qt
// Repartition the nodes at this level to the appropriate child quad
for iter := qt.Nodes.Head; iter != nil; iter = iter.Next {
point := iter.Value.(*Point)
if qt.Northwest.Boundary.ContainsPoint(point) {
qt.Northwest.Nodes.Insert(point)
} else if qt.Northeast.Boundary.ContainsPoint(point) {
qt.Northeast.Nodes.Insert(point)
} else if qt.Southwest.Boundary.ContainsPoint(point) {
qt.Southwest.Nodes.Insert(point)
} else if qt.Southeast.Boundary.ContainsPoint(point) {
qt.Southeast.Nodes.Insert(point)
}
qt.Nodes.Remove(point)
}
return true
}
func NewRect(x float64, y float64, w float64, h float64) *Rect {
return &Rect{X: x, Y: y, W: w, H: h}
}
func (r *Rect) Contains(x float64, y float64) bool {
return x >= r.X && x <= r.X+r.W && y >= r.Y && y <= r.Y+r.H
}
func (r *Rect) CollidesRect(other *Rect) bool {
minAx := r.X
minBx := other.X
maxAx := r.X + r.W
maxBx := other.X + other.W
minAy := r.Y
minBy := other.Y
maxAy := r.Y + r.H
maxBy := other.Y + other.H
return !(maxAx < minBx || minAx > maxBx || minAy > maxBy || maxAy < minBy)
}
func (r *Rect) ContainsRect(other *Rect) bool {
return (other.X+other.W) < r.X+r.W && other.X > r.X && other.Y > r.Y && other.Y+other.H < r.Y+r.H
}
func (r *Rect) ContainsPoint(p *Point) bool {
return r.Contains(p.X, p.Y)
}
// Insert adds a new value to the quadtree at point p
func (qt *QuadTree) Insert(p *Point) bool {
if !qt.Boundary.ContainsPoint(p) {
return false
}
if qt.Nodes.Count < qt.Capacity && qt.Northwest == nil {
qt.Nodes.Insert(p)
return true
}
if qt.Northwest == nil {
qt.Subdivide()
}
if qt.Northwest.Insert(p) {
return true
} else if qt.Northeast.Insert(p) {
return true
} else if qt.Southwest.Insert(p) {
return true
} else if qt.Southeast.Insert(p) {
return true
}
return false
}
// Recursively collapse quads
func (qt *QuadTree) Collapse() bool {
// Retrieve all points within this quad
results := qt.QueryRect(qt.Boundary)
// If the boundary is empty, then collapse again
if len(results) == 0 && qt.Parent != nil {
return qt.Parent.Collapse()
}
// If there are fewer results than the capacity of a single quad, grab them and terminate here
if len(results) < qt.Capacity {
qt.Northwest = nil
qt.Northeast = nil
qt.Southwest = nil
qt.Southeast = nil
qt.Nodes = NewLinkedList()
for _, p := range results {
qt.Nodes.Insert(p)
}
return true
}
// No operation
return true
}
// Remove removes a value from the quadtree, recursively removing nodes as necessary to "collapse" empty divisions
func (qt *QuadTree) Remove(p *Point) bool {
// If point not in our boundary, we can't remove it
if !qt.Boundary.ContainsPoint(p) {
return false
}
// If we are in a leaf node, then remove the value
if qt.Northwest == nil {
if qt.Nodes.Contains(p) {
qt.Nodes.Remove(p)
if qt.Nodes.Count > 0 {
return true
}
return qt.Collapse()
}
return false
}
// Try to remove from this tree's quadrants
if qt.Northwest.Remove(p) == true {
return true
} else if qt.Northeast.Remove(p) == true {
return true
} else if qt.Southwest.Remove(p) == true {
return true
} else if qt.Southeast.Remove(p) == true {
return true
}
return false
}
// QueryRect retrieves all data within the rect defined by r
func (qt *QuadTree) QueryRect(r *Rect) []*Point {
results := make([]*Point, 0)
// This quadtree's boundary rect does not intersect with the query rect
if !qt.Boundary.CollidesRect(r) {
return results
}
for iter := qt.Nodes.Head; iter != nil; iter = iter.Next {
p := iter.Value.(*Point)
if r.ContainsPoint(p) {
results = append(results, p)
}
}
// This is a leaf, return results for this tree
if qt.Northwest == nil {
return results
}
// Recurse and append child tree query contents
results = append(results, qt.Northwest.QueryRect(r)...)
results = append(results, qt.Northeast.QueryRect(r)...)
results = append(results, qt.Southwest.QueryRect(r)...)
results = append(results, qt.Southeast.QueryRect(r)...)
return results
}
// NewQuadTree creates a new quadtree instance
func NewQuadTree(width float64, height float64) *QuadTree {
qt := &QuadTree{
Capacity: QuadTreeNodeMaxElements,
Nodes: NewLinkedList(),
Boundary: &Rect{X: 0, Y: 0, W: width, H: height},
Parent: nil,
}
return qt
} | src/quad_tree.go | 0.846165 | 0.415551 | quad_tree.go | starcoder |
package ta
type MovingAverage interface {
Study
ma()
}
type implMA struct {
noMulti
}
func (implMA) ma() {}
// MovingAverageFunc defines a function that returns am updatable moving average for the given period
type MovingAverageFunc func(period int) MovingAverage
func (ta *TA) MovingAverage(fn MovingAverageFunc, period int) (*TA, MovingAverage) {
ma := fn(period)
ta = ApplyStudy(ma, ta)
return ta, ma
}
// SMA - Simple Moving Average
func SMA(period int) MovingAverage {
checkPeriod(period, 2)
return &sma{
data: NewCapped(period),
period: period,
}
}
var _ Study = (*sma)(nil)
type sma struct {
implMA
data *TA
idx int
sum Decimal
period int
count int
}
func (l *sma) Update(vs ...Decimal) Decimal {
for _, v := range vs {
prev := l.data.Update(v)
if l.count < l.period {
l.count++
}
l.sum = l.sum - prev + v
}
return l.sum / Decimal(l.count)
}
func (l *sma) Len() int { return l.period }
// EMA - Exponential Moving Average
// An alias for CustomEMA(period, 2 / (period+1))
func EMA(period int) MovingAverage {
return CustomEMA(period, 0)
}
// CustomEMA - returns an updatable EMA with the given k
func CustomEMA(period int, k Decimal) MovingAverage {
checkPeriod(period, 2)
if k == 0 {
k = Decimal(2 / float64(period+1))
}
return &ema{
k: k,
period: period,
}
}
var _ Study = (*ema)(nil)
type ema struct {
implMA
k Decimal
prevMA Decimal
period int
idx int
set bool
}
func (l *ema) Setup(d *TA) *TA {
l.set = true
l.prevMA = d.Slice(0, l.period).Avg()
d = d.Slice(l.period, 0).Map(func(d Decimal) Decimal { return l.Update(d) }, false)
return d
}
func (l *ema) Update(vs ...Decimal) Decimal {
for _, v := range vs {
if l.set {
l.prevMA = v.Sub(l.prevMA).Mul(l.k).Add(l.prevMA)
return l.prevMA
}
l.prevMA += v
if l.idx++; l.idx == l.period {
l.set = true
l.prevMA = l.prevMA / Decimal(l.period)
}
}
return l.prevMA
}
func (l *ema) Len() int { return l.period }
func (l *ema) copy() ema {
return *l
}
// WMA - Exponential Moving Average
// An alias for CustomWMA(period, (period * (period + 1)) >> 1)
func WMA(period int) MovingAverage {
w := Decimal((period * (period + 1)) >> 1)
return CustomWMA(period, w)
}
// CustomWMA returns an updatable WMA with the given weight
func CustomWMA(period int, weight Decimal) MovingAverage {
checkPeriod(period, 2)
return &wma{
data: NewSize(period, false),
weight: weight,
period: period,
}
}
var _ Study = (*wma)(nil)
type wma struct {
implMA
data *TA
weight Decimal
sum Decimal
weightedSum Decimal
idx int
period int
set bool
}
func (l *wma) Setup(d *TA) *TA {
l.set = true
var sum, wsum Decimal
for i := 0; i < l.period-1; i++ {
v := d.Get(i)
wsum += v * Decimal(i+1)
sum += v
l.data.Set(i, v)
}
l.sum, l.weightedSum = sum, wsum
l.idx = l.period - 2
d = d.Slice(l.period-1, 0).Map(func(d Decimal) Decimal { return l.Update(d) }, false)
return d
}
func (l *wma) Update(vs ...Decimal) (rv Decimal) {
for _, v := range vs {
if !l.set {
l.data.Set(l.idx, v)
if l.idx < l.period-1 {
l.idx++
l.weightedSum += v * Decimal(l.idx)
l.sum += v
return l.weightedSum / (l.weight * Decimal(l.idx))
}
l.idx = l.period - 2
l.set = true
}
l.idx = (l.idx + 1) % l.period
l.data.Set(l.idx, v)
l.weightedSum += v * Decimal(l.period)
l.sum += v
rv = l.weightedSum / l.weight
l.weightedSum -= l.sum
pidx := (l.idx + 1) % l.period
l.sum -= l.data.Get(pidx)
}
return rv
}
func (l *wma) Len() int { return l.period }
// DEMA - Double Exponential Moving Average
func DEMA(period int) MovingAverage {
return DoubleMA(period, EMA)
}
// DoubleMA - Double Moving Average
func DoubleMA(period int, ma MovingAverageFunc) MovingAverage {
checkPeriod(period, 2)
return &dxma{
e1: ma(period),
e2: ma(period),
}
}
var _ Study = (*dxma)(nil)
type dxma struct {
implMA
e1, e2 Study
idx int
}
func (l *dxma) Update(vs ...Decimal) Decimal {
period := l.Len()
var e1, e2 Decimal
for _, v := range vs {
e1 = l.e1.Update(v)
if l.idx < period {
e2 = l.e2.Update(v)
l.idx++
continue
}
e2 = l.e2.Update(e1)
}
return e1*2 - e2
}
func (l *dxma) Len() int { return l.e1.Len() }
// TEMA - Triple Exponential Moving Average
func TEMA(period int) MovingAverage {
return TripleMA(period, EMA)
}
// TripleMA - Triple Moving Average
func TripleMA(period int, ma MovingAverageFunc) MovingAverage {
checkPeriod(period, 2)
return &txma{
e1: ma(period),
e2: ma(period),
e3: ma(period),
period: period,
}
}
var _ Study = (*txma)(nil)
type txma struct {
implMA
e1, e2, e3 Study
period int
idx int
max2 int
max3 int
}
func (l *txma) Update(vs ...Decimal) Decimal {
var e1, e2, e3 Decimal
for _, v := range vs {
e1 = l.e1.Update(v)
if l.idx < l.period {
l.idx++
e2 = l.e2.Update(v)
e3 = l.e3.Update(v)
continue
}
e2 = l.e2.Update(e1)
e3 = l.e3.Update(e2)
}
return 3*e1 - 3*e2 + e3
}
func (l *txma) Len() int { return l.period }
// TODO:
// - Trima
// - KAMA
// - MAMA/FAMA
// - T3 | studies_ma.go | 0.850903 | 0.457924 | studies_ma.go | starcoder |
package math
import (
"math"
"strconv"
"github.com/flowonyx/functional/option"
"golang.org/x/exp/constraints"
)
type numeric interface {
constraints.Float | constraints.Integer
}
// Abs returns the absolute value of x.
func Abs[T constraints.Signed](x T) T {
if x < 0 {
return -x
}
return x
}
// RoundInt returns the nearest integer as an int, rounding half away from zero.
func RoundInt[T constraints.Float](x T) int {
return int(math.Round(float64(x)))
}
// Round returns the nearest integer as the float type of x, rounding half away from zero.
func Round[T constraints.Float](x T) T {
return T(math.Round(float64(x)))
}
// RoundToEven returns the nearest integer as the float type of x, rounding ties to even.
func RoundToEven[T constraints.Float](x T) T {
return T(math.RoundToEven(float64(x)))
}
// RoundToEvenInt returns the nearest integer as an int, rounding ties to even.
func RoundToEvenInt[T constraints.Float](x T) int {
return int(math.RoundToEven(float64(x)))
}
// Cbrt returns the cube root of x.
func Cbrt[T numeric](x T) T {
return T(math.Cbrt(float64(x)))
}
// CopySign returns a value with the magnitude of x and the sign of y.
func CopySign[T1, T2 constraints.Signed](x T1, y T2) T1 {
return T1(math.Copysign(float64(x), float64(y)))
}
// Dim returns the maximum of x-y or 0.
func Dim[T numeric](x, y T) T {
return T(math.Dim(float64(x), float64(y)))
}
// Pow returns x**y, the base-x exponential of y.
func Pow[T numeric](x, y T) T {
return T(math.Pow(float64(x), float64(y)))
}
// Pow10 returns 10**n, the base-10 exponential of n.
func Pow10[T constraints.Integer](x T) T {
return T(math.Pow10(int(x)))
}
// Remainder returns the IEEE 754 floating-point remainder of x/y.
func Remainder[T numeric](x, y T) T {
return T(math.Remainder(float64(x), float64(y)))
}
// Sqrt returns the square root of x.
func Sqrt[T constraints.Integer](x T) T {
return T(math.Sqrt(float64(x)))
}
// Max returns the maximum value of x or y.
func Max[T constraints.Ordered](x, y T) T {
if x > y {
return x
}
return y
}
// Min returns the minimum value of x or y.
func Min[T constraints.Ordered](x, y T) T {
if x < y {
return x
}
return y
}
// TryParseInt returns the integer parsed from s as an Option.
// If parsing fails, it returns None.
func TryParseInt[T constraints.Integer](s string) option.Option[T] {
r, err := strconv.Atoi(s)
if err != nil {
return option.None[T]()
}
return option.Some(T(r))
} | math/math.go | 0.900307 | 0.699588 | math.go | starcoder |
package jsonlogic
// customOperators holds custom operators
var customOperators = make(map[string]func(values, data interface{}) (result interface{}))
// AddOperator allows for custom operators to be used
func AddOperator(key string, cb func(values, data interface{}) (result interface{})) {
customOperators[key] = cb
}
func operation(operator string, values, data interface{}) interface{} {
// Check against any custom operators
for index, customOperation := range customOperators {
if operator == index {
return customOperation(values, data)
}
}
if operator == "missing" {
return missing(values, data)
}
if operator == "missing_some" {
return missingSome(values, data)
}
if operator == "var" {
return getVar(values, data)
}
if operator == "set" {
return setProperty(values, data)
}
if operator == "cat" {
return concat(values)
}
if operator == "substr" {
return substr(values)
}
if operator == "merge" {
return merge(values, 0)
}
if operator == "if" {
return conditional(values, data)
}
if isPrimitive(values) {
return unary(operator, values)
}
if operator == "max" {
return max(values)
}
if operator == "min" {
return min(values)
}
if values == nil {
return nil
}
parsed := values.([]interface{})
if operator == "and" {
return _and(parsed)
}
if operator == "or" {
return _or(parsed)
}
if len(parsed) == 1 {
return unary(operator, parsed[0])
}
if operator == "?:" {
if parsed[0].(bool) {
return parsed[1]
}
return parsed[2]
}
if operator == "+" {
return sum(values)
}
if operator == "-" {
return minus(values)
}
if operator == "*" {
return mult(values)
}
if operator == "/" {
return div(values)
}
if operator == "in" {
return _in(parsed[0], parsed[1])
}
if operator == "in_sorted" {
return _inSorted(parsed[0], parsed[1])
}
if operator == "%" {
return mod(parsed[0], parsed[1])
}
if len(parsed) == 3 {
return between(operator, parsed, data)
}
if operator == "<" {
return less(parsed[0], parsed[1])
}
if operator == ">" {
return less(parsed[1], parsed[0])
}
if operator == "<=" {
return less(parsed[0], parsed[1]) || equals(parsed[0], parsed[1])
}
if operator == ">=" {
return less(parsed[1], parsed[0]) || equals(parsed[0], parsed[1])
}
if operator == "===" {
return hardEquals(parsed[0], parsed[1])
}
if operator == "!=" {
return !equals(parsed[0], parsed[1])
}
if operator == "!==" {
return !hardEquals(parsed[0], parsed[1])
}
if operator == "==" {
return equals(parsed[0], parsed[1])
}
panic(ErrInvalidOperator{
operator: operator,
})
} | operation.go | 0.517571 | 0.522385 | operation.go | starcoder |
package checks
const searchChecksYAML = `
- name: unmount-volume
description: Checks if Mesos agents had problems unmounting local persistent volumes. MESOS-8830
fileTypeName: mesos-agent-log
errorPattern: 'Failed to remove rootfs mount point'
cure: Please, refer to the KB article https://support.d2iq.com/s/article/DC-OS-Impacted-by-a-Mesos-Agent-Garbage-Collection-Issue and MESOS-8830
- name: exhibitor-disk-space
description: Checks for disk space errors in Exhibitor logs
fileTypeName: exhibitor-log
errorPattern: 'No space left on device'
cure: Please check that there is sufficient free space on the disk.
- name: migration-in-progress
description: Detects marathon-upgrade-in-progress flag on failed cluster after upgrade
fileTypeName: marathon
errorPattern: 'Migration Failed: Migration is already in progress'
cure: Please refer to the KB article https://support.d2iq.com/s/article/marathon-migration-in-progress-error
- name: networking-errors
description: Identifies errors in dcos-net logs
fileTypeName: net-log
errorPattern: '\[(?P<Level>error|emergency|critical|alert)\]'
isErrorPatternRegexp: true
cure: 'Please, collect the crash dumps with "sudo tar -czvf 172.29.108.26_master_dcos_net.tgz -C /opt/mesosphere/active/dcos-net/ ." and contact the networking team.'
- name: zookeeper-fsync
description: Detects ZooKeeper problems with the write-ahead log
fileTypeName: exhibitor-log
errorPattern: 'fsync-ing the write ahead log in'
max: 1
cure: 'Zookeeper fsync threshold exceeded events detected. Zookeeper is swapping or disk IO is saturated. See more here https://jira.mesosphere.com/browse/COPS-4403'
- name: cockroach-time-sync
description: Detects CockroachDB time sync issues
fileTypeName: cockroach-log
errorPattern: 'fewer than half the known nodes are within the maximum offset'
cure: CockroachDB logs indicate that there is or was an issue with time sync. Please ensure that time is in sync and CockroachDB is healthy on all Masters
- name: time-sync
description: Checks if time is synchronised on the host machine.
fileTypeName: net-log
errorPattern: '(internal consistency is broken|Unable to determine clock sync|Time is not synchronized|Clock is less stable than allowed|Clock is out of sync)'
isErrorPatternRegexp: true
curePattern: 'Time is in sync'
cure: Check NTP settings and NTP server availability.
- name: zookeeper-instances
description: Checks if all ZooKeeper instances are up and running
fileTypeName: net-log
errorPattern: 'Exception: Expected.*servers'
isErrorPatternRegexp: true
curePattern: 'Zookeeper connection established, state: CONNECTED'
cure: Make all ZooKeeper instances run and available for each other through the network.
- name: mesos-agent-invalid-cert
description: Checks if there are errors for invalid certificate when fetching artifacts
fileTypeName: mesos-agent-log
errorPattern: 'Container.*Failed to perform ''curl''.*SSL certificate problem: self signed certificate'
isErrorPatternRegexp: true
cure: 'Mesos agent is using certificates which does not allow to fetch an artifact from some repository. Please see https://jira.mesosphere.com/browse/COPS-2315 and https://jira.mesosphere.com/browse/COPS-2106 for more information.'
- name: overlay-network-recovery
description: Checks if the DC/OS overlay network master is in recovery state
fileTypeName: mesos-master-log
errorPattern: 'overlay-master in.*RECOVERING.*state'
isErrorPatternRegexp: true
curePattern: 'Moving overlay-master.* to .*RECOVERED.* state.'
isCurePatternRegexp: true
cure: 'Mesos master Overlay module cannot recover. Please see the KB articles https://support.d2iq.com/s/article/Known-Issue-Invalid-DNS-Resolvers-MSPH-2018-0012 and https://support.d2iq.com/s/article/Critical-Issue-with-Overlay-Networking for more information.'
- name: kmem-errors
description: Detects kernel memory (kmem) errors in dmesg log
fileTypeName: dmesg-log
errorPattern: 'SLUB: Unable to allocate memory on node -1'
cure: 'Please see KB articles https://support.d2iq/s/article/Critical-Issue-KMEM-MSPH-2018-0006 and https://support.d2iq/s/article/Known-Issue-KMEM-with-Kubernetes-MSPH-2019-0002'
- name: oom-kills
description: Detects out of memory kills in dmesg log
fileTypeName: dmesg-log
errorPattern: 'invoked oom-killer'
cure: 'The operating system is killing processes which exceed system or container memory limits. Please check which processes are getting killed. If it is a DC/OS container, increase its memory limit.'
- name: docker-not-running
description: Checks if docker is running
fileTypeName: ps
errorPattern: 'dockerd'
failIfNotFound: true
cure: 'Docker daemon should be running on all DC/OS nodes.'
- name: nscd-running
description: Detects if Name Service Cache Daemon (nscd) is running on a DC/OS node
fileTypeName: ps
errorPattern: 'nscd'
cure: 'Please ensure that nscd is stopped and disabled.'
- name: firewalld-running
description: Detects if firewalld is running on a DC/OS node
fileTypeName: ps
errorPattern: 'firewalld'
cure: 'Please ensure that firewalld is stopped and disabled.'
- name: task-blocked
description: Detects if tasks were blocked for a long time, which, most probably, a manifestation of slow I/O.
fileTypeName: dmesg-log
errorPattern: 'task .+ blocked for more than .+ seconds'
isErrorPatternRegexp: true
cure: 'Please see this article for more information: https://helpful.knobs-dials.com/index.php/INFO:_task_blocked_for_more_than_120_seconds'
- name: overlay-ip-pool
description: Detects if Mesos Overlay module exhausted its IP address pool
fileTypeName: mesos-master-log
errorPattern: 'Unable to reserve VTEP'
cure: 'Please see this article for more information: https://support.d2iq.com/s/article/What-to-do-if-the-DC-OS-Overlay-IP-pool-is-exhausted'
` | checks/search_checks_yaml.go | 0.794425 | 0.405037 | search_checks_yaml.go | starcoder |
package smd
import (
"math"
"os"
"time"
"github.com/ChristopherRabotin/ode"
kitlog "github.com/go-kit/kit/log"
"github.com/gonum/matrix/mat64"
)
const (
transitionPhiOnly = false
)
// OrbitEstimate is an ode.Integrable which allows to propagate an orbit via its initial estimate.
type OrbitEstimate struct {
Φ *mat64.Dense // STM
Orbit Orbit // estimated orbit
Perts Perturbations // perturbations to account for
StopDT time.Time // end time of te integration
dt time.Time // current time of the integration
step time.Duration // time step
logger kitlog.Logger // logger
}
// GetState gets the state.
func (e *OrbitEstimate) GetState() []float64 {
rΦ, cΦ := e.Φ.Dims()
s := make([]float64, 6+rΦ*cΦ)
R, V := e.Orbit.RV()
s[0] = R[0]
s[1] = R[1]
s[2] = R[2]
s[3] = V[0]
s[4] = V[1]
s[5] = V[2]
// Add the components of Φ
sIdx := 6
for i := 0; i < rΦ; i++ {
for j := 0; j < cΦ; j++ {
s[sIdx] = e.Φ.At(i, j)
sIdx++
}
}
return s
}
// SetState sets the next state at time t.
func (e *OrbitEstimate) SetState(t float64, s []float64) {
R := []float64{s[0], s[1], s[2]}
V := []float64{s[3], s[4], s[5]}
e.Orbit = *NewOrbitFromRV(R, V, e.Orbit.Origin)
// Extract the components of Φ
sIdx := 6
rΦ, cΦ := e.Φ.Dims()
Φk20 := mat64.NewDense(rΦ, cΦ, nil)
for i := 0; i < rΦ; i++ {
for j := 0; j < cΦ; j++ {
Φk20.Set(i, j, s[sIdx])
sIdx++
}
}
if transitionPhiOnly {
// Compute the Φ for this transition
var Φinv mat64.Dense
if err := Φinv.Inverse(e.Φ); err != nil {
panic("could not invert e.Φ")
}
e.Φ.Mul(Φk20, &Φinv)
} else {
e.Φ = Φk20
}
// Increment the time.
e.dt = e.dt.Add(e.step)
}
// Stop returns whether we should stop the integration.
func (e *OrbitEstimate) Stop(t float64) bool {
return e.dt.After(e.StopDT)
}
// State returns the latest state
func (e *OrbitEstimate) State() State {
return State{e.dt, Spacecraft{}, e.Orbit, nil, nil}
}
// Func does the math. Returns a new state.
func (e *OrbitEstimate) Func(t float64, f []float64) (fDot []float64) {
// XXX: Note that this function is very similar to Mission.Func for a Cartesian propagation.
// *BUT* we need to add in all the components of Φ, since they have to be integrated too.
rΦ, cΦ := e.Φ.Dims()
fDot = make([]float64, 6+rΦ*cΦ) // init return vector
// Re-create the orbit from the state.
R := []float64{f[0], f[1], f[2]}
V := []float64{f[3], f[4], f[5]}
orbit := NewOrbitFromRV(R, V, e.Orbit.Origin)
bodyAcc := -orbit.Origin.μ / math.Pow(orbit.RNorm(), 3)
// d\vec{R}/dt
fDot[0] = f[3]
fDot[1] = f[4]
fDot[2] = f[5]
// d\vec{V}/dt
fDot[3] = bodyAcc * f[0]
fDot[4] = bodyAcc * f[1]
fDot[5] = bodyAcc * f[2]
pert := e.Perts.Perturb(*orbit, e.dt, Spacecraft{})
for i := 0; i < 6; i++ {
fDot[i] += pert[i]
}
// Extract the components of Φ
fIdx := 6
Φ := mat64.NewDense(rΦ, cΦ, nil)
ΦDot := mat64.NewDense(rΦ, cΦ, nil)
for i := 0; i < rΦ; i++ {
for j := 0; j < cΦ; j++ {
Φ.Set(i, j, f[fIdx])
fIdx++
}
}
// Compute the STM.
A := mat64.NewDense(6, 6, nil)
// Top right is Identity 3x3
A.Set(0, 3, 1)
A.Set(1, 4, 1)
A.Set(2, 5, 1)
// Bottom left is where the magix is.
x := R[0]
y := R[1]
z := R[2]
x2 := math.Pow(R[0], 2)
y2 := math.Pow(R[1], 2)
z2 := math.Pow(R[2], 2)
r2 := x2 + y2 + z2
r232 := math.Pow(r2, 3/2.)
r252 := math.Pow(r2, 5/2.)
// Add the body perturbations
dAxDx := 3*e.Orbit.Origin.μ*x2/r252 - e.Orbit.Origin.μ/r232
dAxDy := 3 * e.Orbit.Origin.μ * x * y / r252
dAxDz := 3 * e.Orbit.Origin.μ * x * z / r252
dAyDx := 3 * e.Orbit.Origin.μ * x * y / r252
dAyDy := 3*e.Orbit.Origin.μ*y2/r252 - e.Orbit.Origin.μ/r232
dAyDz := 3 * e.Orbit.Origin.μ * y * z / r252
dAzDx := 3 * e.Orbit.Origin.μ * x * z / r252
dAzDy := 3 * e.Orbit.Origin.μ * y * z / r252
dAzDz := 3*e.Orbit.Origin.μ*z2/r252 - e.Orbit.Origin.μ/r232
A.Set(3, 0, dAxDx)
A.Set(4, 0, dAyDx)
A.Set(5, 0, dAzDx)
A.Set(3, 1, dAxDy)
A.Set(4, 1, dAyDy)
A.Set(5, 1, dAzDy)
A.Set(3, 2, dAxDz)
A.Set(4, 2, dAyDz)
A.Set(5, 2, dAzDz)
// Jn perturbations:
if e.Perts.Jn > 1 {
// Ai0 = \frac{\partial a}{\partial x}
// Ai1 = \frac{\partial a}{\partial y}
// Ai2 = \frac{\partial a}{\partial z}
A30 := A.At(3, 0)
A40 := A.At(4, 0)
A50 := A.At(5, 0)
A31 := A.At(3, 1)
A41 := A.At(4, 1)
A51 := A.At(5, 1)
A32 := A.At(3, 2)
A42 := A.At(4, 2)
A52 := A.At(5, 2)
// Notation simplification
z3 := math.Pow(R[2], 3)
z4 := math.Pow(R[2], 4)
// Adding those fractions to avoid forgetting the trailing period which makes them floats.
f32 := 3 / 2.
f152 := 15 / 2.
r272 := math.Pow(r2, 7/2.)
r292 := math.Pow(r2, 9/2.)
// J2
j2fact := e.Orbit.Origin.J(2) * math.Pow(e.Orbit.Origin.Radius, 2) * e.Orbit.Origin.μ
A30 += -f32 * j2fact * (35*x2*z2/r292 - 5*x2/r272 - 5*z2/r272 + 1/r252) //dAxDx
A40 += -f152 * j2fact * (7*x*y*z2/r292 - x*y/r272) //dAyDx
A50 += -f152 * j2fact * (7*x*z3/r292 - 3*x*z/r272) //dAzDx
A31 += -f152 * j2fact * (7*x*y*z2/r292 - x*y/r272) //dAxDy
A41 += -f32 * j2fact * (35*y2*z2/r292 - 5*y2/r272 - 5*z2/r272 + 1/r252) // dAyDy
A51 += -f152 * j2fact * (7*y*z3/r292 - 3*y*z/r272) // dAzDy
A32 += -f152 * j2fact * (7*x*z3/r292 - 3*x*z/r272) //dAxDz
A42 += -f152 * j2fact * (7*y*z3/r292 - 3*y*z/r272) //dAyDz
A52 += -f32 * j2fact * (35*z4/r292 - 30*z2/r272 + 3/r252) // dAzDz
// J3
if e.Perts.Jn > 2 {
z5 := math.Pow(R[2], 5)
r2112 := math.Pow(r2, 11/2.)
f52 := 5 / 2.
f1052 := 105 / 2.
j3fact := e.Orbit.Origin.J(3) * math.Pow(e.Orbit.Origin.Radius, 3) * e.Orbit.Origin.μ
A30 += -f52 * j3fact * (63*x2*z3/r2112 - 21*x2*z/r292 - 7*z3/r292 + 3*z/r272) //dAxDx
A40 += -f1052 * j3fact * (3*x*y*z3/r2112 - x*y*z/r292) //dAyDx
A50 += -f152 * j3fact * (21*x*z4/r2112 - 14*x*z2/r292 + x/r272) //dAzDx
A31 += -f1052 * j3fact * (3*x*y*z3/r2112 - x*y*z/r292) //dAxDy
A41 += -f52 * j3fact * (63*y2*z3/r2112 - 21*y2*z/r292 - 7*z3/r292 + 3*z/r272) // dAyDy
A51 += -f152 * j3fact * (21*y*z4/r2112 - 14*y*z2/r292 + y/r272) // dAzDy
A32 += -f152 * j3fact * (21*x*z4/r2112 - 14*x*z2/r292 + x/r272) //dAxDz
A42 += -f152 * j3fact * (21*y*z4/r2112 - 14*y*z2/r292 + y/r272) //dAyDz
A52 += -f52 * j3fact * (63*z5/r2112 - 70*z3/r292 + 15*z/r272) // dAzDz
}
// \frac{\partial a}{\partial x}
A.Set(3, 0, A30)
A.Set(4, 0, A40)
A.Set(5, 0, A50)
// \partial a/\partial y
A.Set(3, 1, A31)
A.Set(4, 1, A41)
A.Set(5, 1, A51)
// \partial a/\partial z
A.Set(3, 2, A32)
A.Set(4, 2, A42)
A.Set(5, 2, A52)
}
ΦDot.Mul(A, Φ)
// Store ΦDot in fDot
fIdx = 6
for i := 0; i < rΦ; i++ {
for j := 0; j < cΦ; j++ {
fDot[fIdx] = ΦDot.At(i, j)
fIdx++
}
}
return fDot
}
// PropagateUntil propagates until the given time is reached.
func (e *OrbitEstimate) PropagateUntil(dt time.Time) {
e.StopDT = dt
ode.NewRK4(0, e.step.Seconds(), e).Solve() // Blocking.
}
// NewOrbitEstimate returns a new Estimate of an orbit given the perturbations to be taken into account.
// The only supported state is [\vec{r} \vec{v}]T (for now at least).
func NewOrbitEstimate(n string, o Orbit, p Perturbations, epoch time.Time, step time.Duration) *OrbitEstimate {
// The initial previous STM is identity.
klog := kitlog.NewLogfmtLogger(kitlog.NewSyncWriter(os.Stdout))
klog = kitlog.With(klog, "estimate", n)
stopDT := epoch
// XXX: We add the step for consistency with Mission. Mission is broken: it skips the first step because the time addition
// happens in the Stop function instead of the SetState function, the former being called at the start of the integration.
return &OrbitEstimate{DenseIdentity(6), o, p, stopDT, epoch.Add(step), step, klog}
} | estimate.go | 0.61231 | 0.48121 | estimate.go | starcoder |
package qdb
/*
#include <qdb/ts.h>
*/
import "C"
import (
"math"
"time"
"unsafe"
)
// TsInt64Point : timestamped int64 data point
type TsInt64Point struct {
timestamp time.Time
content int64
}
// Timestamp : return data point timestamp
func (t TsInt64Point) Timestamp() time.Time {
return t.timestamp
}
// Content : return data point content
func (t TsInt64Point) Content() int64 {
return t.content
}
// NewTsInt64Point : Create new timeseries int64 point
func NewTsInt64Point(timestamp time.Time, value int64) TsInt64Point {
return TsInt64Point{timestamp, value}
}
// :: internals
func (t TsInt64Point) toStructC() C.qdb_ts_int64_point {
return C.qdb_ts_int64_point{toQdbTimespec(t.timestamp), C.qdb_int_t(t.content)}
}
func (t C.qdb_ts_int64_point) toStructG() TsInt64Point {
return TsInt64Point{t.timestamp.toStructG(), int64(t.value)}
}
func int64PointArrayToC(pts ...TsInt64Point) *C.qdb_ts_int64_point {
if len(pts) == 0 {
return nil
}
points := make([]C.qdb_ts_int64_point, len(pts))
for idx, pt := range pts {
points[idx] = pt.toStructC()
}
return &points[0]
}
func int64PointArrayToSlice(points *C.qdb_ts_int64_point, length int) []C.qdb_ts_int64_point {
// See https://github.com/mattn/go-sqlite3/issues/238 for details.
return (*[(math.MaxInt32 - 1) / unsafe.Sizeof(C.qdb_ts_int64_point{})]C.qdb_ts_int64_point)(unsafe.Pointer(points))[:length:length]
}
func int64PointArrayToGo(points *C.qdb_ts_int64_point, pointsCount C.qdb_size_t) []TsInt64Point {
length := int(pointsCount)
output := make([]TsInt64Point, length)
if length > 0 {
slice := int64PointArrayToSlice(points, length)
for i, s := range slice {
output[i] = s.toStructG()
}
}
return output
}
// TsInt64Column : a time series int64 column
type TsInt64Column struct {
tsColumn
}
// Int64Column : create a column object
func (entry TimeseriesEntry) Int64Column(columnName string) TsInt64Column {
return TsInt64Column{tsColumn{NewTsColumnInfo(columnName, TsColumnInt64), entry}}
}
// Insert int64 points into a timeseries
func (column TsInt64Column) Insert(points ...TsInt64Point) error {
alias := convertToCharStar(column.parent.alias)
defer releaseCharStar(alias)
columnName := convertToCharStar(column.name)
defer releaseCharStar(columnName)
contentCount := C.qdb_size_t(len(points))
content := int64PointArrayToC(points...)
err := C.qdb_ts_int64_insert(column.parent.handle, alias, columnName, content, contentCount)
return makeErrorOrNil(err)
}
// EraseRanges : erase all points in the specified ranges
func (column TsInt64Column) EraseRanges(rgs ...TsRange) (uint64, error) {
alias := convertToCharStar(column.parent.alias)
defer releaseCharStar(alias)
columnName := convertToCharStar(column.name)
defer releaseCharStar(columnName)
ranges := rangeArrayToC(rgs...)
rangesCount := C.qdb_size_t(len(rgs))
erasedCount := C.qdb_uint_t(0)
err := C.qdb_ts_erase_ranges(column.parent.handle, alias, columnName, ranges, rangesCount, &erasedCount)
return uint64(erasedCount), makeErrorOrNil(err)
}
// GetRanges : Retrieves int64s in the specified range of the time series column.
// It is an error to call this function on a non existing time-series.
func (column TsInt64Column) GetRanges(rgs ...TsRange) ([]TsInt64Point, error) {
alias := convertToCharStar(column.parent.alias)
defer releaseCharStar(alias)
columnName := convertToCharStar(column.name)
defer releaseCharStar(columnName)
ranges := rangeArrayToC(rgs...)
rangesCount := C.qdb_size_t(len(rgs))
var points *C.qdb_ts_int64_point
var pointsCount C.qdb_size_t
err := C.qdb_ts_int64_get_ranges(column.parent.handle, alias, columnName, ranges, rangesCount, &points, &pointsCount)
if err == 0 {
defer column.parent.Release(unsafe.Pointer(points))
return int64PointArrayToGo(points, pointsCount), nil
}
return nil, ErrorType(err)
}
// TsInt64Aggregation : Aggregation of int64 type
type TsInt64Aggregation struct {
kind TsAggregationType
rng TsRange
count int64
point TsInt64Point
}
// Type : returns the type of the aggregation
func (t TsInt64Aggregation) Type() TsAggregationType {
return t.kind
}
// Range : returns the range of the aggregation
func (t TsInt64Aggregation) Range() TsRange {
return t.rng
}
// Count : returns the number of points aggregated into the result
func (t TsInt64Aggregation) Count() int64 {
return t.count
}
// Result : result of the aggregation
func (t TsInt64Aggregation) Result() TsInt64Point {
return t.point
}
// NewInt64Aggregation : Create new timeseries int64 aggregation
func NewInt64Aggregation(kind TsAggregationType, rng TsRange) *TsInt64Aggregation {
return &TsInt64Aggregation{kind, rng, 0, TsInt64Point{}}
}
// :: internals
func (t TsInt64Aggregation) toStructC() C.qdb_ts_int64_aggregation_t {
var cAgg C.qdb_ts_int64_aggregation_t
cAgg._type = C.qdb_ts_aggregation_type_t(t.kind)
cAgg._range = t.rng.toStructC()
cAgg.count = C.qdb_size_t(t.count)
cAgg.result = t.point.toStructC()
return cAgg
}
func (t C.qdb_ts_int64_aggregation_t) toStructG() TsInt64Aggregation {
var gAgg TsInt64Aggregation
gAgg.kind = TsAggregationType(t._type)
gAgg.rng = t._range.toStructG()
gAgg.count = int64(t.count)
gAgg.point = t.result.toStructG()
return gAgg
}
func int64AggregationArrayToC(ags ...*TsInt64Aggregation) *C.qdb_ts_int64_aggregation_t {
if len(ags) == 0 {
return nil
}
var int64Aggregations []C.qdb_ts_int64_aggregation_t
for _, ag := range ags {
int64Aggregations = append(int64Aggregations, ag.toStructC())
}
return &int64Aggregations[0]
}
func int64AggregationArrayToSlice(aggregations *C.qdb_ts_int64_aggregation_t, length int) []C.qdb_ts_int64_aggregation_t {
// See https://github.com/mattn/go-sqlite3/issues/238 for details.
return (*[(math.MaxInt32 - 1) / unsafe.Sizeof(C.qdb_ts_int64_aggregation_t{})]C.qdb_ts_int64_aggregation_t)(unsafe.Pointer(aggregations))[:length:length]
}
func int64AggregationArrayToGo(aggregations *C.qdb_ts_int64_aggregation_t, aggregationsCount C.qdb_size_t, aggs []*TsInt64Aggregation) []TsInt64Aggregation {
length := int(aggregationsCount)
output := make([]TsInt64Aggregation, length)
if length > 0 {
slice := int64AggregationArrayToSlice(aggregations, length)
for i, s := range slice {
*aggs[i] = s.toStructG()
output[i] = s.toStructG()
}
}
return output
}
// TODO(Vianney): Implement aggregate
// Aggregate : Aggregate a sub-part of a timeseries from the specified aggregations.
// It is an error to call this function on a non existing time-series.
func (column TsInt64Column) Aggregate(aggs ...*TsInt64Aggregation) ([]TsInt64Aggregation, error) {
return nil, ErrNotImplemented
}
// Int64 : adds an int64 in row transaction
func (t *TsBulk) Int64(value int64) *TsBulk {
if t.err == nil {
t.err = makeErrorOrNil(C.qdb_ts_row_set_int64(t.table, C.qdb_size_t(t.index), C.qdb_int_t(value)))
}
t.index++
return t
}
// GetInt64 : gets an int64 in row
func (t *TsBulk) GetInt64() (int64, error) {
var content C.qdb_int_t
err := C.qdb_ts_row_get_int64(t.table, C.qdb_size_t(t.index), &content)
t.index++
return int64(content), makeErrorOrNil(err)
}
// RowSetInt64 : Set int64 at specified index in current row
func (t *TsBatch) RowSetInt64(index, value int64) error {
valueIndex := C.qdb_size_t(index)
return makeErrorOrNil(C.qdb_ts_batch_row_set_int64(t.table, valueIndex, C.qdb_int_t(value)))
} | entry_timeseries_int64.go | 0.725162 | 0.402744 | entry_timeseries_int64.go | starcoder |
package adt
import "github.com/solo-io/cue/cue/token"
// Op indicates the operation at the top of an expression tree of the expression
// use to evaluate a value.
type Op int
func (o Op) String() string {
return opToString[o]
}
// Values of Op.
const (
NoOp Op = iota
AndOp
OrOp
SelectorOp
IndexOp
SliceOp
CallOp
BoolAndOp
BoolOrOp
EqualOp
NotOp
NotEqualOp
LessThanOp
LessEqualOp
GreaterThanOp
GreaterEqualOp
MatchOp
NotMatchOp
AddOp
SubtractOp
MultiplyOp
FloatQuotientOp
IntQuotientOp
IntRemainderOp
IntDivideOp
IntModuloOp
InterpolationOp
)
var opToString = map[Op]string{
AndOp: "&",
OrOp: "|",
BoolAndOp: "&&",
BoolOrOp: "||",
EqualOp: "==",
NotOp: "!",
NotEqualOp: "!=",
LessThanOp: "<",
LessEqualOp: "<=",
GreaterThanOp: ">",
GreaterEqualOp: ">=",
MatchOp: "=~",
NotMatchOp: "!~",
AddOp: "+",
SubtractOp: "-",
MultiplyOp: "*",
FloatQuotientOp: "/",
IntQuotientOp: "quo",
IntRemainderOp: "rem",
IntDivideOp: "div",
IntModuloOp: "mod",
SelectorOp: ".",
IndexOp: "[]",
SliceOp: "[:]",
CallOp: "()",
InterpolationOp: `\()`,
}
// OpFromToken converts a token.Token to an Op.
func OpFromToken(t token.Token) Op {
return tokenMap[t]
}
// Token returns the token.Token corresponding to the Op.
func (op Op) Token() token.Token {
return opMap[op]
}
var tokenMap = map[token.Token]Op{
token.OR: OrOp, // |
token.AND: AndOp, // &
token.ADD: AddOp, // +
token.SUB: SubtractOp, // -
token.MUL: MultiplyOp, // *
token.QUO: FloatQuotientOp, // /
token.IDIV: IntDivideOp, // div
token.IMOD: IntModuloOp, // mod
token.IQUO: IntQuotientOp, // quo
token.IREM: IntRemainderOp, // rem
token.LAND: BoolAndOp, // &&
token.LOR: BoolOrOp, // ||
token.EQL: EqualOp, // ==
token.LSS: LessThanOp, // <
token.GTR: GreaterThanOp, // >
token.NOT: NotOp, // !
token.NEQ: NotEqualOp, // !=
token.LEQ: LessEqualOp, // <=
token.GEQ: GreaterEqualOp, // >=
token.MAT: MatchOp, // =~
token.NMAT: NotMatchOp, // !~
}
var opMap = map[Op]token.Token{}
func init() {
for t, o := range tokenMap {
opMap[o] = t
}
} | internal/core/adt/op.go | 0.725162 | 0.447521 | op.go | starcoder |
package sudokuconv
import (
"math"
"math/bits"
"sort"
"github.com/pkg/errors"
)
var (
// symbolMasks extract 3 bits a symbol (must be zero based and <8)
symbolMasks = [3]uint8{4, 2, 1}
// bitMasks extract each bit of a byte/uint8
bitMasks = [8]uint8{128, 64, 32, 16, 8, 4, 2, 1}
)
// ToBytes converts a 9x9 sudoku board into a compact bit representation.
// Size is 23 or 24 bytes depending on where the 9s are.
// The returned byte slice contains 4 bits for the row where the 9 is in the last column.
// Then follow 3 bits for each of the other eight columns containing 9s.
// Then the other symbols are converted and appended as 3 bits each.
// For this, 1-8 are converted to 0-7.
// The last row and column are left out since they can trivially be computed.
// An error is returned iff the provided board is not correctly solved.
func ToBytes(board [9][9]int) ([]byte, error) {
if !validate(board) {
return nil, errors.New("board not solved correctly")
}
im := toIntermediate(board)
bytes := [24]byte{}
bitIdx := uint(4)
bytes[0] = im.RowWith9Last << bitIdx
for _, v := range append(im.NineIndices, im.OtherSymbols...) {
for idxInSymbol, mask := range symbolMasks {
idxInByte := 7 - bitIdx%8
bytes[bitIdx/8] = bytes[bitIdx/8] + (v&mask)>>uint8(2-idxInSymbol)<<idxInByte
bitIdx++
}
}
return bytes[:byteSize(bitIdx)], nil
}
func byteSize(bitSize uint) int {
return int(math.Ceil(float64(bitSize) / 8))
}
// FromBytes converts bytes (see ToBytes) back to board.
// An error is returned iff the provided bytes are malformed.
func FromBytes(bytes []byte) ([9][9]int, error) {
if len(bytes) < 9 {
return [9][9]int{}, errors.New("not enough bytes")
}
symbols := toSymbols(bytes)
im := &intermediate{
RowWith9Last: bytes[0] >> 4,
NineIndices: symbols[:8],
OtherSymbols: symbols[8:]}
board, err := im.toBoard()
if err != nil {
return [9][9]int{}, errors.Wrap(err, "incomplete bytes")
}
board = solveNaively(board)
if !validate(board) {
return [9][9]int{}, errors.New("bytes lead to incorrect board")
}
return board, nil
}
func toSymbols(bytes []byte) []uint8 {
var initialIdxInByte uint = 4
var idxInSymbol uint
symbols := []uint8{}
var currentValue uint8
for _, b := range bytes {
for idxInByte := initialIdxInByte; idxInByte < 8; idxInByte++ {
bit := b & bitMasks[idxInByte]
currentValue = currentValue + bit>>(7-idxInByte)<<(2-idxInSymbol)
idxInSymbol = (idxInSymbol + 1) % 3
if idxInSymbol == 0 {
symbols = append(symbols, currentValue)
currentValue = 0
}
}
initialIdxInByte = 0
}
return symbols
}
type intermediate struct {
RowWith9Last uint8
NineIndices []uint8
OtherSymbols []uint8
}
func (im *intermediate) toBoard() ([9][9]int, error) {
board := [9][9]int{}
board = im.fill9s(board)
return im.fillOtherSymbols(board)
}
func (im *intermediate) fill9s(board [9][9]int) [9][9]int {
board[im.RowWith9Last][8] = 9
for rowIdx, colIdx := range im.NineIndices {
if rowIdx >= int(im.RowWith9Last) {
board[rowIdx+1][colIdx] = 9
} else {
board[rowIdx][colIdx] = 9
}
}
return board
}
// Fill 9s first!
func (im *intermediate) fillOtherSymbols(board [9][9]int) ([9][9]int, error) {
valIdx := 0
valLen := len(im.OtherSymbols)
for rowIdx, row := range board {
for colIdx, val := range row {
if includeVal(rowIdx, colIdx, val) {
if valIdx >= valLen {
return [9][9]int{}, errors.New("not enough values")
}
board[rowIdx][colIdx] = int(im.OtherSymbols[valIdx]) + 1
valIdx++
}
}
}
return board, nil
}
func toIntermediate(board [9][9]int) *intermediate {
im := intermediate{}
for rowIdx, row := range board {
for colIdx, val := range row {
if val == 9 && colIdx == 8 {
im.RowWith9Last = uint8(rowIdx)
} else if val == 9 {
im.NineIndices = append(im.NineIndices, uint8(colIdx))
} else if includeVal(rowIdx, colIdx, val) {
// 1 is subtracted to have values from 0-7
im.OtherSymbols = append(im.OtherSymbols, uint8(val-1))
}
}
}
return &im
}
func includeVal(rowIdx, colIdx, val int) bool {
return !firstInBlock(rowIdx, colIdx) && rowIdx < 8 && colIdx < 8 && val != 9
}
func firstInBlock(rowIdx, colIdx int) bool {
block := 0 + uint(1)<<uint(8-rowIdx) + uint(1)<<uint(8-colIdx)
return block == 512 || block == 288 || block == 64
}
func validate(board [9][9]int) bool {
for _, row := range board {
if !validateGroup(row) {
return false
}
}
for colIdx := 0; colIdx < 9; colIdx++ {
if !validateGroup(extractCol(board, colIdx)) {
return false
}
}
for x := 0; x < 3; x++ {
for y := 0; y < 3; y++ {
if !validateGroup(extractGrid(board, x, y)) {
return false
}
}
}
return true
}
func validateGroup(group [9]int) bool {
sorted := group[:]
sort.Ints(sorted)
for idx, val := range sorted {
if val != idx+1 {
return false
}
}
return true
}
func solveNaively(board [9][9]int) [9][9]int {
solved := solveSubgrids(board)
solved = solveRows(solved)
return solveCols(solved)
}
func solveSubgrids(board [9][9]int) [9][9]int {
grids := [2][2][]int{}
for rowIdx := 0; rowIdx < 6; rowIdx++ {
for colIdx := 0; colIdx < 6; colIdx++ {
grids[rowIdx/3][colIdx/3] = append(grids[rowIdx/3][colIdx/3], board[rowIdx][colIdx])
}
}
for rowIdx, row := range grids {
for colIdx, grid := range row {
gridA := [9]int{}
copy(gridA[:], grid)
board[rowIdx*3][colIdx*3] = lastMissing(gridA)
}
}
return board
}
// Solve subgrids first!
func solveRows(board [9][9]int) [9][9]int {
for rowIdx, row := range board {
if rowIdx < 8 {
board[rowIdx][8] = lastMissing(row)
}
}
return board
}
// Solve rows first!
func solveCols(board [9][9]int) [9][9]int {
for colIdx := 0; colIdx < 9; colIdx++ {
board[8][colIdx] = lastMissing(extractCol(board, colIdx))
}
return board
}
func lastMissing(group [9]int) int {
var taken uint8
for _, val := range group {
taken = taken + 1<<(uint(val)-1)
}
return bits.TrailingZeros8(taken^255) + 1
}
func extractCol(board [9][9]int, idx int) [9]int {
return [9]int{
board[0][idx],
board[1][idx],
board[2][idx],
board[3][idx],
board[4][idx],
board[5][idx],
board[6][idx],
board[7][idx],
board[8][idx],
}
}
func extractGrid(board [9][9]int, x int, y int) [9]int {
var grid [9]int
var gridIdx int
for rowIdx := x * 3; rowIdx < (x+1)*3; rowIdx++ {
for colIdx := y * 3; colIdx < (y+1)*3; colIdx++ {
grid[gridIdx] = board[rowIdx][colIdx]
gridIdx++
}
}
return grid
} | sudokuconv.go | 0.692226 | 0.461441 | sudokuconv.go | starcoder |
package lsmt
import (
"fmt"
)
type TreeNode struct {
Elem Element
Left *TreeNode
Right *TreeNode
Size int
}
// NewTree accepts a sorted element slice and returns a binary tree representation.
func NewTree(elems []Element) *TreeNode {
size := len(elems)
if size == 0 {
return nil
}
root := &TreeNode{
Elem: elems[size/2],
Left: NewTree(elems[0:size/2]),
Size: size,
}
if rightIndex := size/2+1; rightIndex < size {
root.Right = NewTree(elems[rightIndex:size])
}
return root
}
func Upsert(tree **TreeNode, elem Element) {
if *tree == nil {
*tree = &TreeNode{Elem: elem, Size: 1}
} else if elem.Key < (*tree).Elem.Key {
Upsert(&((*tree).Left), elem)
(*tree).Size++
} else if elem.Key > (*tree).Elem.Key {
Upsert(&((*tree).Right), elem)
(*tree).Size++
} else {
(*tree).Elem.Value = elem.Value
}
}
func Find(tree *TreeNode, key string) (Element, error) {
if tree == nil {
// Not found.
return Element{}, fmt.Errorf("key %s not found", key)
} else if tree.Elem.Key == key {
return tree.Elem, nil
}
if key <= tree.Elem.Key {
return Find(tree.Left, key)
} else {
return Find(tree.Right, key)
}
}
// Traverse returns all the elements in key order.
func Traverse(tree *TreeNode) []Element {
var elems []Element
if tree == nil {
return elems
}
left := Traverse(tree.Left)
right := Traverse(tree.Right)
elems = append(elems, left...)
elems = append(elems, tree.Elem)
return append(elems, right...)
}
func JustSmallerOrEqual(tree *TreeNode, key string) (Element, error) {
if tree == nil {
return Element{}, fmt.Errorf("key %s is smaller than any key in the tree", key)
}
current := tree.Elem
if current.Key <= key {
right, err := JustSmallerOrEqual(tree.Right, key)
if err == nil && current.Key < right.Key {
current = right
}
} else {
left, err := JustSmallerOrEqual(tree.Left, key)
if err != nil {
return Element{}, err
}
current = left
}
return current, nil
}
func JustLarger(tree *TreeNode, key string) (Element, error) {
if tree == nil {
return Element{}, fmt.Errorf("key %s is larger than any key in the tree", key)
}
current := tree.Elem
if current.Key > key {
left, err := JustLarger(tree.Left, key)
if err == nil && current.Key > left.Key {
current = left
}
} else {
right, err := JustLarger(tree.Right, key)
if err != nil {
return Element{}, err
}
current = right
}
return current, nil
} | LSM-Tree-main/binary_tree.go | 0.837055 | 0.464416 | binary_tree.go | starcoder |
package balance
import (
"fmt"
"math/big"
"runtime/debug"
)
/*
Coin starts here
*/
// Coin is the basic amount, specified in integers, at the smallest increment (i.e. a satoshi, not a bitcoin)
type Coin struct {
Currency Currency `json:"currency"`
Amount *big.Int `json:"amount,string"`
}
// See if the coin is one of a list of currencies
func (coin Coin) IsCurrency(currencies ...string) bool {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
found := false
for _, currency := range currencies {
if coin.Currency.Name == currency {
found = true
break
}
}
return found
}
// LessThan, for coins...
func (coin Coin) LessThanCoin(value Coin) bool {
if coin.Amount == nil || value.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
if coin.Currency.Chain != value.Currency.Chain {
logger.Fatal("Compare two different coin", coin, value)
}
//logger.Dump("LessThanCoin", value, coin)
if coin.Amount.Cmp(value.Amount) < 0 {
return true
}
return false
}
// LessThanEqual, for coins...
func (coin Coin) LessThanEqualCoin(value Coin) bool {
if coin.Amount == nil || value.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
if coin.Currency.Chain != value.Currency.Chain {
logger.Fatal("Compare two different coin", coin, value)
}
//logger.Dump("LessThanEqualCoin", value, coin)
if coin.Amount.Cmp(value.Amount) <= 0 {
return true
}
return false
}
// IsValid coin or is it broken
func (coin Coin) IsValid() bool {
switch {
case coin.Amount == nil:
return false
case coin.Currency.Name == "":
return false
default:
return coin.Amount.Cmp(big.NewInt(0)) >= 0
}
}
// Equals another coin
func (coin Coin) Equals(value Coin) bool {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
if coin.Currency.Chain != value.Currency.Chain {
return false
}
if coin.Amount.Cmp(value.Amount) == 0 {
return true
}
return false
}
// Minus two coins
func (coin Coin) Minus(value Coin) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
if coin.Currency.Name != value.Currency.Name {
//logger.Error("Mismatching currencies", "coin", coin, "value", value)
logger.Fatal("Mismatching currencies", coin, value)
return coin
}
base := big.NewInt(0)
result := Coin{
Currency: coin.Currency,
Amount: base.Sub(coin.Amount, value.Amount),
}
return result
}
// Plus two coins
func (coin Coin) Plus(value Coin) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", "coin", coin)
}
if coin.Currency.Name != value.Currency.Name {
//logger.Error("Mismatching currencies", "coin", coin, "value", value)
logger.Fatal("Mismatching currencies", coin, value)
return coin
}
base := big.NewInt(0)
result := Coin{
Currency: coin.Currency,
Amount: base.Add(coin.Amount, value.Amount),
}
return result
}
// Quotient of one coin by another (divide without remainder, modulus, etc)
func (coin Coin) Quotient(value Coin) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", "coin", coin)
}
if coin.Currency.Name != value.Currency.Name {
//logger.Error("Mismatching currencies", "coin", coin, "value", value)
logger.Fatal("Mismatching currencies", coin, value)
return coin
}
base := big.NewInt(0)
result := Coin{
Currency: coin.Currency,
Amount: base.Quo(coin.Amount, value.Amount),
}
return result
}
func (coin Coin) Divide(value int) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
base := big.NewInt(0)
divisor := big.NewInt(int64(value))
result := Coin{
Currency: coin.Currency,
Amount: base.Div(coin.Amount, divisor),
}
return result
}
// Multiply one coin by another
func (coin Coin) Multiply(value Coin) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
if coin.Currency.Name != value.Currency.Name {
//logger.Error("Mismatching currencies", "coin", coin, "value", value)
logger.Fatal("Mismatching currencies", coin, value)
return coin
}
base := big.NewInt(0)
result := Coin{
Currency: coin.Currency,
Amount: base.Mul(coin.Amount, value.Amount),
}
return result
}
// Multiply one coin by another
func (coin Coin) MultiplyInt(value int) Coin {
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", coin)
}
multiplier := big.NewInt(int64(value))
base := big.NewInt(0)
result := Coin{
Currency: coin.Currency,
Amount: base.Mul(coin.Amount, multiplier),
}
return result
}
// Turn a coin into a readable, floating point string with the currency
func (coin Coin) String() string {
/*
if coin.Amount == nil {
debug.PrintStack()
logger.Fatal("Invalid Coin", "err", "Amount is nil")
}
*/
float := new(big.Float).SetInt(coin.Amount)
value := float.Quo(float, new(big.Float).SetInt(coin.Currency.Base()))
return fmt.Sprintf("%s %s", value.String(), coin.Currency.Name)
} | data/balance/coin.go | 0.71423 | 0.472014 | coin.go | starcoder |
package models
// IsType returns whether the field is a type.
func (f *Field) IsType() bool {
return f.Parent == nil
}
// basicMap contains a list of basic types.
var (
basicMap = map[string]bool{
"invalid": true,
"bool": true,
"int": true,
"int8": true,
"int16": true,
"int32": true,
"int64": true,
"uint": true,
"uint8": true,
"uint16": true,
"uint32": true,
"uint64": true,
"uintptr": true,
"float32": true,
"float64": true,
"complex64": true,
"complex128": true,
"string": true,
"byte": true,
"rune": true,
}
)
// IsBasic determines whether the field is a basic type.
func (f *Field) IsBasic() bool {
return basicMap[f.Definition]
}
// Pointer represents the char representation of a pointer.
const Pointer = '*'
// IsPointer returns whether the field is a pointer.
func (f *Field) IsPointer() bool {
return len(f.Definition) >= 1 && f.Definition[0] == Pointer
}
// Collection refers to a category of types which indicate that
// a field's definition collects multiple fields (i.e `map[string]bool`).
const (
CollectionPointer = "*"
CollectionSlice = "[]"
CollectionMap = "map"
CollectionChan = "chan"
CollectionFunc = "func"
CollectionInterface = "interface"
)
// IsArray returns whether the field is an array.
func (f *Field) IsArray() bool {
return len(f.Definition) >= 3 && f.Definition[0] == '[' && ('0' <= f.Definition[1] && f.Definition[1] <= '9')
}
// IsSlice returns whether the field is a slice.
func (f *Field) IsSlice() bool {
return len(f.Definition) >= 2 && f.Definition[:2] == CollectionSlice
}
// IsMap returns whether the field is a map.
func (f *Field) IsMap() bool {
return len(f.Definition) >= 3 && f.Definition[:3] == CollectionMap
}
// IsMap returns whether the field is a chan.
func (f *Field) IsChan() bool {
return len(f.Definition) >= 4 && f.Definition[:4] == CollectionChan
}
// IsComposite returns whether the field is a composite type: array, slice, map, chan.
func (f *Field) IsComposite() bool {
return f.IsArray() || f.IsSlice() || f.IsMap() || f.IsChan()
}
// IsFunc returns whether the field is a function.
func (f *Field) IsFunc() bool {
return len(f.Definition) >= 4 && f.Definition[:4] == CollectionFunc
}
// IsInterface returns whether the field is an interface.
func (f *Field) IsInterface() bool {
return len(f.Definition) >= 9 && f.Definition[:9] == CollectionInterface
}
// IsCollection returns whether the field is a collection.
func (f *Field) IsCollection() bool {
return f.IsPointer() || f.IsComposite() || f.IsFunc() || f.IsInterface()
}
// IsAlias determines whether the field is a type alias.
func (f *Field) IsAlias() bool {
return f.Definition != "" && !(f.IsBasic() || f.IsPointer() || f.IsComposite() || f.IsFunc())
} | cli/models/category.go | 0.878939 | 0.402744 | category.go | starcoder |
package values
import (
"fmt"
)
type Scope interface {
// Lookup a name in the scope.
Lookup(name string) (Value, bool)
// LocalLookup a name in current scope only.
LocalLookup(name string) (Value, bool)
// Set binds a variable in the current scope.
Set(name string, v Value)
// Nest creates a new scope by nesting the current scope.
// If the passed in object is not nil, its values will be added to the new nested scope.
Nest(Object) Scope
// Pop returns the parent of the current scope.
Pop() Scope
// Size is the number of visible names in scope.
Size() int
// Range iterates over all variable bindings in scope applying f.
Range(f func(k string, v Value))
// LocalRange iterates over all variable bindings only in the current scope.
LocalRange(f func(k string, v Value))
// SetReturn binds the return value of the scope.
SetReturn(Value)
// Return reports the bound return value of the scope.
Return() Value
// Copy creates a deep copy of the scope, values are not copied.
// Copy preserves the nesting structure.
Copy() Scope
}
type scope struct {
parent Scope
values map[string]Value
returnValue Value
}
// NewScope creates a new empty scope with no parent.
func NewScope() Scope {
return &scope{
values: make(map[string]Value),
}
}
// NewNestedScope creates a new scope with bindings from obj and a parent.
func NewNestedScope(parent Scope, obj Object) Scope {
var values map[string]Value
if obj != nil {
values = make(map[string]Value, obj.Len())
obj.Range(func(name string, v Value) {
values[name] = v
})
}
return &scope{
parent: parent,
values: values,
}
}
func (s *scope) Lookup(name string) (Value, bool) {
v, ok := s.values[name]
if !ok && s.parent != nil {
return s.parent.Lookup(name)
}
return v, ok
}
func (s *scope) LocalLookup(name string) (Value, bool) {
v, ok := s.values[name]
return v, ok
}
func (s *scope) Set(name string, v Value) {
if s.values == nil {
s.values = make(map[string]Value)
}
s.values[name] = v
}
func (s *scope) Nest(obj Object) Scope {
return NewNestedScope(s, obj)
}
func (s *scope) Pop() Scope {
return s.parent
}
func (s *scope) Size() int {
if s.parent == nil {
return len(s.values)
}
return len(s.values) + s.parent.Size()
}
func (s *scope) Range(f func(k string, v Value)) {
s.LocalRange(f)
if s.parent != nil {
s.parent.Range(f)
}
}
func (s *scope) LocalRange(f func(k string, v Value)) {
for k, v := range s.values {
f(k, v)
}
}
func (s *scope) SetReturn(v Value) {
s.returnValue = v
}
func (s *scope) Return() Value {
return s.returnValue
}
func (s *scope) Copy() Scope {
var pc Scope
if s.parent != nil {
pc = s.parent.Copy()
}
ns := NewNestedScope(pc, nil)
s.LocalRange(func(k string, v Value) {
ns.Set(k, v)
})
return ns
}
// FormattedScope produces a fmt.Formatter for pretty printing a scope.
func FormattedScope(scope Scope) fmt.Formatter {
return scopeFormatter{scope}
}
type scopeFormatter struct {
scope Scope
}
func (s scopeFormatter) Format(state fmt.State, _ rune) {
state.Write([]byte("["))
for scope := s.scope; scope != nil; scope = scope.Pop() {
state.Write([]byte("{"))
j := 0
scope.LocalRange(func(k string, v Value) {
if j != 0 {
state.Write([]byte(", "))
}
fmt.Fprintf(state, "%s = %v", k, v)
j++
})
state.Write([]byte("} -> "))
}
state.Write([]byte("nil ]"))
} | values/scope.go | 0.629547 | 0.485417 | scope.go | starcoder |
package classifier
import (
"encoding/json"
"io"
deep "github.com/patrikeh/go-deep"
"github.com/patrikeh/go-deep/training"
"github.com/bububa/facenet/core"
)
// Neural represents neural classifier
type Neural struct {
ml *deep.Neural
threshold float64
}
// Name return sclassifier name
func (n *Neural) Identity() ClassifierIdentity {
return NeuralClassifier
}
// Write implement Classifier interface
func (n *Neural) Write(w io.Writer) error {
dump := n.ml.Dump()
return json.NewEncoder(w).Encode(dump)
}
// Read implement Classifier interface
func (n *Neural) Read(r io.Reader) error {
var dump deep.Dump
if err := json.NewDecoder(r).Decode(&dump); err != nil {
return err
}
n.ml = deep.FromDump(&dump)
return nil
}
// SetThreadshold set Neural match threshold
func (n *Neural) SetThreadshold(threshold float64) {
n.threshold = threshold
}
func (n *Neural) peopleToExamples(people *core.People, split float64) (training.Examples, training.Examples) {
var data training.Examples
var heldout training.Examples
classes := len(people.GetList())
for idx, person := range people.GetList() {
var examples training.Examples
embeddings := person.GetEmbeddings()
for _, embedding := range embeddings {
e := training.Example{
Response: onehot(classes, idx),
Input: convInputs(embedding.GetValue()),
}
deep.Standardize(e.Input)
examples = append(examples, e)
}
examples.Shuffle()
t, h := examples.Split(split)
data = append(data, t...)
heldout = append(heldout, h...)
}
data.Shuffle()
heldout.Shuffle()
return data, heldout
}
func (n *Neural) initDeep(inputs int, layout []int, std float64, mean float64) {
n.ml = deep.NewNeural(&deep.Config{
Inputs: inputs,
Layout: layout,
// Activation: deep.ActivationTanh,
// Activation: deep.ActivationSigmoid,
Activation: deep.ActivationReLU,
//Activation: deep.ActivationSoftmax,
Mode: deep.ModeMultiClass,
Weight: deep.NewNormal(std, mean),
Bias: true,
})
}
// Train implement Classifier interface
func (n *Neural) Train(people *core.People, split float64, iterations int, verbosity int) {
n.initDeep(512, []int{64, 16, len(people.GetList())}, 0.5, 0)
//trainer := training.NewTrainer(training.NewSGD(0.01, 0.5, 1e-6, true), 1)
//trainer := training.NewTrainer(training.NewSGD(0.005, 0.5, 1e-6, true), 50)
//trainer := training.NewBatchTrainer(training.NewSGD(0.005, 0.1, 0, true), 50, 300, 16)
//trainer := training.NewTrainer(training.NewAdam(0.1, 0, 0, 0), 50)
// solver := training.NewSGD(0.01, 0.5, 1e-6, true)
solver := training.NewAdam(0.02, 0.9, 0.999, 1e-8)
trainer := training.NewTrainer(solver, verbosity)
data, heldout := n.peopleToExamples(people, split)
trainer.Train(n.ml, data, heldout, iterations)
}
// BatchTrain implement Classifier interface
func (n *Neural) BatchTrain(people *core.People, split float64, iterations int, verbosity int, batch int) {
n.initDeep(512, []int{64, 16, len(people.GetList())}, 0.5, 0)
//solver := training.NewSGD(0.01, 0.5, 1e-6, true)
solver := training.NewAdam(0.02, 0.9, 0.999, 1e-8)
trainer := training.NewBatchTrainer(solver, verbosity, batch, 4)
data, heldout := n.peopleToExamples(people, split)
trainer.Train(n.ml, data, heldout, iterations)
}
// Predict implement Classifier interface
func (n *Neural) Predict(embedding []float32) []float64 {
return n.ml.Predict(convInputs(embedding))
}
// Match returns best match result
func (n *Neural) Match(input []float32) (int, float64) {
scores := n.Predict(input)
var index = -1
var maxScore float64
threshold := n.threshold
if threshold < 1e-15 {
threshold = NeuralMatchThreshold
}
for idx, score := range scores {
if score >= threshold && maxScore < score {
maxScore = score
index = idx
}
}
return index, maxScore
}
func convInputs(embedding []float32) []float64 {
ret := make([]float64, len(embedding))
for i, v := range embedding {
ret[i] = float64(v)
}
return ret
}
func onehot(classes int, val int) []float64 {
res := make([]float64, classes)
res[val] = 1
return res
} | classifier/deep.go | 0.81571 | 0.415254 | deep.go | starcoder |
package stacks_queues
/*
You are given two non-empty zero-indexed arrays A and B consisting of N integers.
Arrays A and B represent N voracious fish in a river, ordered downstream along the flow of the river.
The fish are numbered from 0 to N − 1. If P and Q are two fish and P < Q, then fish P is initially
upstream of fish Q. Initially, each fish has a unique position.
Fish number P is represented by A[P] and B[P]. Array A contains the sizes of the fish.
All its elements are unique. Array B contains the directions of the fish. It contains only 0s and/or 1s, where:
0 represents a fish flowing upstream,
1 represents a fish flowing downstream.
If two fish move in opposite directions and there are no other (living) fish between them,
they will eventually meet each other. Then only one fish can stay alive − the larger fish eats the smaller one.
More precisely, we say that two fish P and Q meet each other when P < Q, B[P] = 1 and B[Q] = 0,
and there are no living fish between them. After they meet:
If A[P] > A[Q] then P eats Q, and P will still be flowing downstream,
If A[Q] > A[P] then Q eats P, and Q will still be flowing upstream.
We assume that all the fish are flowing at the same speed. That is, fish moving in the same direction never meet.
The goal is to calculate the number of fish that will stay alive.
For example, consider arrays A and B such that:
A[0] = 4 B[0] = 0 ) ( ) )
A[1] = 3 B[1] = 1
A[2] = 2 B[2] = 0
A[3] = 1 B[3] = 0
A[4] = 5 B[4] = 0
Initially all the fish are alive and all except fish number 1 are moving upstream.
Fish number 1 meets fish number 2 and eats it, then it meets fish number 3 and eats it too.
Finally, it meets fish number 4 and is eaten by it. The remaining two fish, number 0 and 4, never meet and therefore stay alive.
Write a function:
func Solution(A []int, B []int) int
that, given two non-empty zero-indexed arrays A and B consisting of N integers, returns the number of fish that will stay alive.
For example, given the arrays shown above, the function should return 2, as explained above.
Assume that:
N is an integer within the range [1..100,000];
each element of array A is an integer within the range [0..1,000,000,000];
each element of array B is an integer that can have one of the following values: 0, 1;
the elements of A are all distinct.
Complexity:
expected worst-case time complexity is O(N);
expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).
*/
// Check if fish is downsteam and add to stack. If fish is upstream check how many it will eat from downstream
func Fish(A []int, B []int) int {
l := NewIntStack(len(A))
response := 0
for i, _ := range B {
if B[i] == 1 {
l.Push(A[i])
} else {
for l.size > 0 { // We have a matching pair
if l.Front() < A[i] {
l.Pop()
} else {
break
}
}
if l.size == 0 { // Escaping upstreams
response += 1
}
}
}
return response + l.size
} | stacks-queues/Fish.go | 0.851922 | 0.921957 | Fish.go | starcoder |
package core
import . "don/syntax"
// DType
// DTypes form a Boolean algebra
// Conceptually, a DType is a set of []string
type DType struct {
NoUnit bool
Positive bool
Fields map[string]DType
}
// Get DType
var UnknownType DType
var UnitType = DType{Positive: true}
var FieldsType = DType{NoUnit: true}
var NullType = DType{NoUnit: true, Positive: true}
func NullPtr() *DType { nt := NullType; return &nt }
func MakeNFieldsType(nFields int) DType {
return DType{NoUnit: true, Positive: true, Fields: make(map[string]DType, nFields)}
}
func (t DType) Get(fieldName string) DType {
if fieldType, ok := t.Fields[fieldName]; ok {
return fieldType
} else if t.Positive {
return NullType
} else {
return UnknownType
}
}
func (t DType) againstPath(pathType DType) DType {
fields := make(map[string]DType, len(t.Fields))
for fieldName, fieldType := range t.Fields {
fields[fieldName] = fieldType.againstPath(pathType)
}
u := DType{NoUnit: true, Positive: t.Positive, Fields: fields}
if !t.NoUnit {
u.Joins(pathType)
}
return u
}
// If there are negative fields, returns upper bound
func (t DType) AgainstPath(path []string) DType {
pathType := UnitType
for i := len(path) - 1; i >= 0; i-- {
pathType = pathType.AtLow(path[i])
}
return t.againstPath(pathType)
}
// Other
func (t *DType) RemakeFields() {
fields := make(map[string]DType, len(t.Fields))
for fieldName, fieldType := range t.Fields {
fields[fieldName] = fieldType
}
t.Fields = fields
}
// Less Than or Equal
func (t0 DType) LTE(t1 DType) bool {
if !t0.NoUnit && t1.NoUnit || !t0.Positive && t1.Positive {
return false
}
for fieldName, fieldType0 := range t0.Fields {
if !fieldType0.LTE(t1.Get(fieldName)) {
return false
}
}
if !t0.Positive {
for fieldName, fieldType1 := range t1.Fields {
if _, ok := t0.Fields[fieldName]; !ok && !UnknownType.LTE(fieldType1) {
return false
}
}
}
return true
}
func (t0 DType) Equal(t1 DType) bool { return t0.LTE(t1) && t1.LTE(t0) }
func (t DType) Complement() (c DType) {
c.NoUnit = !t.NoUnit
c.Positive = !t.Positive
c.Fields = make(map[string]DType, len(t.Fields))
for fieldName, fieldType := range t.Fields {
c.Fields[fieldName] = fieldType.Complement()
}
return
}
func (t0 *DType) Meets(t1 DType) {
t0.NoUnit = t0.NoUnit || t1.NoUnit
if t0.Positive {
t0.RemakeFields()
for fieldName, fieldType0 := range t0.Fields {
if fieldType0.Meets(t1.Get(fieldName)); fieldType0.LTE(NullType) {
delete(t0.Fields, fieldName)
} else {
t0.Fields[fieldName] = fieldType0
}
}
} else if t1.Positive {
fields := make(map[string]DType)
for fieldName, fieldType1 := range t1.Fields {
if fieldType1.Meets(t0.Get(fieldName)); !fieldType1.LTE(NullType) {
fields[fieldName] = fieldType1
}
}
t0.Positive = true
t0.Fields = fields
} else {
t0.RemakeFields()
for fieldName, fieldType1 := range t1.Fields {
fieldType1.Meets(t0.Get(fieldName))
t0.Fields[fieldName] = fieldType1
}
}
return
}
func (t DType) AtHigh(fieldName string) DType {
fields := make(map[string]DType, 1)
fields[fieldName] = t
return DType{Fields: fields}
}
func (t DType) AtLow(fieldName string) DType {
u := MakeNFieldsType(1)
u.Fields[fieldName] = t
return u
}
func (t0 *DType) Joins(t1 DType) {
t0C := t0.Complement()
t0C.Meets(t1.Complement())
*t0 = t0C.Complement()
}
func (t DType) Underdefined() Error {
if !t.Positive {
return NewError("Negative fields")
}
for fieldName, fieldType := range t.Fields {
if subUnderdefined := fieldType.Underdefined(); subUnderdefined != nil {
return subUnderdefined.InField(fieldName)
}
}
return nil
}
func (t DType) Syntax() Syntax {
if !t.NoUnit && !t.Positive && len(t.Fields) == 0 {
return ISyntax{}
}
var lFactors []Syntax
if !t.NoUnit {
lFactors = append(lFactors, Named{Name: "unit"})
}
if !t.Positive {
cFactors := []Syntax{Named{Name: "fields"}}
for fieldName := range t.Fields {
cFactors = append(cFactors, Application{
Com: Named{Name: "withoutField"},
Arg: Quote{Named{Name: fieldName}},
})
}
if len(cFactors) > 1 {
lFactors =
append(lFactors, Composition{cFactors})
} else if lFactors = append(lFactors, cFactors[0]); true {
}
}
for fieldName, fieldType := range t.Fields {
lFactors =
append(lFactors, Composition{[]Syntax{
Named{RightMarker: true, Name: fieldName},
fieldType.Syntax(),
Named{LeftMarker: true, Name: fieldName},
}})
}
if len(lFactors) == 1 {
return lFactors[0]
} else {
return Conjunction{lFactors}
}
}
func (t DType) String() string { return t.Syntax().String() } | src/core/dType.go | 0.6137 | 0.515071 | dType.go | starcoder |
package chaikin
import (
"github.com/MicahParks/go-ad"
"github.com/MicahParks/go-ma"
)
const (
// ShortEMA is the number of periods in the short EMA of the Accumulation Distribution Line results. For the Chaikin
// Oscillator.
ShortEMA = 3
// LongEMA is the number of periods in the long EMA of the Accumulation Distribution Line results. For the Chaikin
// Oscillator.
LongEMA = 10
)
// Chaikin represents the state of the Chaikin Oscillator.
type Chaikin struct {
ad *ad.AD
short *ma.EMA
long *ma.EMA
prevBuy bool
}
// Result holds the results of a Chaikin calculation.
type Result struct {
ADLine float64
BuySignal *bool
ChaikinLine float64
}
// New creates a new Chaikin Oscillator and returns its first point along with the corresponding Accumulation
// Distribution Line point.
func New(initial [LongEMA]ad.Input) (*Chaikin, Result) {
return NewCustom(initial[:], ShortEMA, 0, 0)
}
// NewCustom creates a new Chaikin Oscillator and returns its first point along with the corresponding Accumulation
// Distribution Line point. Custom (non-Chaikin approved) inputs are allowed. The length of the initial input slice is
// the length of the long EMA period.
func NewCustom(initial []ad.Input, shortPeriod uint, shortSmoothing, longSmoothing float64) (*Chaikin, Result) {
adLinePoints := make([]float64, len(initial))
cha := &Chaikin{}
var adLine float64
cha.ad, adLine = ad.New(initial[0])
adLinePoints[0] = adLine
for i, input := range initial[1:] {
adLinePoints[i+1] = cha.ad.Calculate(input)
}
_, shortSMA := ma.NewSMA(adLinePoints[:shortPeriod])
cha.short = ma.NewEMA(shortPeriod, shortSMA, shortSmoothing)
// Catch up the short EMA to where the long EMA will be.
var latestShortEMA float64
for _, adLine = range adLinePoints[shortPeriod:] {
latestShortEMA = cha.short.Calculate(adLine)
}
_, longSMA := ma.NewSMA(adLinePoints)
cha.long = ma.NewEMA(uint(len(initial)), longSMA, longSmoothing)
result := Result{
ADLine: adLine,
BuySignal: nil,
ChaikinLine: latestShortEMA - longSMA,
}
cha.prevBuy = result.ChaikinLine > adLine
return cha, result
}
// Calculate produces the next point on the Chaikin Oscillator given the current period's information.
func (c *Chaikin) Calculate(next ad.Input) Result {
adLine := c.ad.Calculate(next)
result := c.short.Calculate(adLine) - c.long.Calculate(adLine)
var buySignal *bool
if result > adLine != c.prevBuy {
buy := !c.prevBuy
c.prevBuy = buy
buySignal = &buy
}
return Result{
ADLine: adLine,
BuySignal: buySignal,
ChaikinLine: result,
}
} | float64.go | 0.723016 | 0.545467 | float64.go | starcoder |
package kalman
import (
"math"
mat "github.com/mrfyo/matrix"
)
type CubatureKalmanFilter struct {
DimX int // n
DimZ int // m
Dt float64 // collect duration
Fx FilterFun // (n, 1)
Hx FilterFun // (m, 1)
X Matrix // (n, 1)
P Matrix // (n, n)
R Matrix // (m, m)
Q Matrix // (n, n)
PriorX Matrix // (n, 1)
PriorP Matrix // (n, n)
}
func (kf *CubatureKalmanFilter) Init(x, P, Q, R Matrix) {
kf.X = x
kf.P = P
kf.Q = Q
kf.R = R
}
// computeSigmaPoints 计算样本点
func (kf *CubatureKalmanFilter) computeSigmaPoints(x Matrix, P Matrix) (sigmas Matrix) {
n := kf.DimX
sigmas = mat.Zeros(Shape{Row: n, Col: 2 * n})
L, _ := mat.Cholesky(P)
S := L.ScaleMul(math.Sqrt(float64(n)))
for k := 0; k < n; k++ {
sigmas.SetCol(k, x.Add(S.GetCol(k)))
sigmas.SetCol(k+n, x.Sub(S.GetCol(k)))
}
return sigmas
}
func (kf *CubatureKalmanFilter) Predict() {
n := kf.DimX
x := kf.X
P := kf.P
Q := kf.Q
sigmas := kf.computeSigmaPoints(x, P)
priorX := mat.Zeros(x.Shape)
priorP := mat.Zeros(P.Shape)
for j := 0; j < sigmas.Col; j++ {
xk := kf.Fx(kf.Dt, sigmas.GetCol(j))
mat.MatrixAdd(priorX, xk)
mat.MatrixAdd(priorP, xk.Dot(xk.T()))
}
wn := 1.0 / float64(2*n)
priorX = priorX.ScaleMul(wn)
priorP = priorP.ScaleMul(wn).Sub(priorX.Dot(priorX.T())).Add(Q)
kf.PriorX = priorX
kf.PriorP = priorP
}
func (kf *CubatureKalmanFilter) Update(z Matrix) Matrix {
n := kf.DimX
m := kf.DimZ
R := kf.R
priorX := kf.PriorX
priorP := kf.PriorP
sigmas := kf.computeSigmaPoints(priorX, priorP) // (n, 2*n)
Pzz := mat.Zeros(R.Shape) // (m, m)
Pxz := mat.Zeros(Shape{Row: n, Col: m}) // (n, m)
priorZ := mat.Zeros(Shape{Row: m, Col: 1}) // (m, 1)
for k := 0; k < sigmas.Col; k++ {
xk := sigmas.GetCol(k) // (n, 1)
zk := kf.Hx(kf.Dt, xk) // (m, 1)
zkT := zk.T() // (1, m)
mat.MatrixAdd(priorZ, zk)
mat.MatrixAdd(Pzz, zk.Dot(zkT))
mat.MatrixAdd(Pxz, xk.Dot(zkT))
}
wn := 1.0 / float64(2*n)
priorZ = priorZ.ScaleMul(wn)
priorZT := priorZ.T()
Pzz = Pzz.ScaleMul(wn).Sub(priorZ.Dot(priorZT)).Add(R)
Pxz = Pxz.ScaleMul(wn).Sub(priorX.Dot(priorZT))
K := Pxz.Dot(mat.Inv(Pzz)) // (n, m)
X := priorX.Add(K.Dot(z.Sub(priorZ))) // (n, 1)
P := priorP.Sub(K.Dot(Pzz).Dot(K.T())) // (n, n)
kf.X = X
kf.P = P
return X
}
func NewCubatureKalmanFilter(dimX int, dimZ int, dt float64, Fx FilterFun, Hx FilterFun) *CubatureKalmanFilter {
shapeX := Shape{Row: dimX, Col: dimX}
shapeZ := Shape{Row: dimZ, Col: dimZ}
return &CubatureKalmanFilter{
DimX: dimX,
DimZ: dimZ,
Dt: dt,
Fx: Fx,
Hx: Hx,
X: mat.Zeros(shapeX),
P: mat.Eye(dimX),
R: mat.Zeros(shapeZ),
Q: mat.Zeros(shapeX),
PriorX: mat.Zeros(shapeX),
PriorP: mat.Eye(dimX),
}
} | ckf.go | 0.555194 | 0.44746 | ckf.go | starcoder |
package ent
import (
"fmt"
"strings"
"time"
"entgo.io/ent/dialect/sql"
"github.com/hm-edu/domain-rest-interface/ent/delegation"
"github.com/hm-edu/domain-rest-interface/ent/domain"
)
// Delegation is the model entity for the Delegation schema.
type Delegation struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// CreateTime holds the value of the "create_time" field.
CreateTime time.Time `json:"create_time,omitempty"`
// UpdateTime holds the value of the "update_time" field.
UpdateTime time.Time `json:"update_time,omitempty"`
// User holds the value of the "user" field.
User string `json:"user,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the DelegationQuery when eager-loading is set.
Edges DelegationEdges `json:"edges"`
domain_delegations *int
}
// DelegationEdges holds the relations/edges for other nodes in the graph.
type DelegationEdges struct {
// Domain holds the value of the domain edge.
Domain *Domain `json:"domain,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// DomainOrErr returns the Domain value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e DelegationEdges) DomainOrErr() (*Domain, error) {
if e.loadedTypes[0] {
if e.Domain == nil {
// The edge domain was loaded in eager-loading,
// but was not found.
return nil, &NotFoundError{label: domain.Label}
}
return e.Domain, nil
}
return nil, &NotLoadedError{edge: "domain"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Delegation) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case delegation.FieldID:
values[i] = new(sql.NullInt64)
case delegation.FieldUser:
values[i] = new(sql.NullString)
case delegation.FieldCreateTime, delegation.FieldUpdateTime:
values[i] = new(sql.NullTime)
case delegation.ForeignKeys[0]: // domain_delegations
values[i] = new(sql.NullInt64)
default:
return nil, fmt.Errorf("unexpected column %q for type Delegation", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Delegation fields.
func (d *Delegation) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case delegation.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
d.ID = int(value.Int64)
case delegation.FieldCreateTime:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field create_time", values[i])
} else if value.Valid {
d.CreateTime = value.Time
}
case delegation.FieldUpdateTime:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field update_time", values[i])
} else if value.Valid {
d.UpdateTime = value.Time
}
case delegation.FieldUser:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field user", values[i])
} else if value.Valid {
d.User = value.String
}
case delegation.ForeignKeys[0]:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for edge-field domain_delegations", value)
} else if value.Valid {
d.domain_delegations = new(int)
*d.domain_delegations = int(value.Int64)
}
}
}
return nil
}
// QueryDomain queries the "domain" edge of the Delegation entity.
func (d *Delegation) QueryDomain() *DomainQuery {
return (&DelegationClient{config: d.config}).QueryDomain(d)
}
// Update returns a builder for updating this Delegation.
// Note that you need to call Delegation.Unwrap() before calling this method if this Delegation
// was returned from a transaction, and the transaction was committed or rolled back.
func (d *Delegation) Update() *DelegationUpdateOne {
return (&DelegationClient{config: d.config}).UpdateOne(d)
}
// Unwrap unwraps the Delegation entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (d *Delegation) Unwrap() *Delegation {
tx, ok := d.config.driver.(*txDriver)
if !ok {
panic("ent: Delegation is not a transactional entity")
}
d.config.driver = tx.drv
return d
}
// String implements the fmt.Stringer.
func (d *Delegation) String() string {
var builder strings.Builder
builder.WriteString("Delegation(")
builder.WriteString(fmt.Sprintf("id=%v", d.ID))
builder.WriteString(", create_time=")
builder.WriteString(d.CreateTime.Format(time.ANSIC))
builder.WriteString(", update_time=")
builder.WriteString(d.UpdateTime.Format(time.ANSIC))
builder.WriteString(", user=")
builder.WriteString(d.User)
builder.WriteByte(')')
return builder.String()
}
// Delegations is a parsable slice of Delegation.
type Delegations []*Delegation
func (d Delegations) config(cfg config) {
for _i := range d {
d[_i].config = cfg
}
} | backend/domain-rest-interface/ent/delegation.go | 0.662796 | 0.419886 | delegation.go | starcoder |
package nbc
import (
"sort"
)
type Classifier struct {
trainingSet map[string]map[string]float64
labelWeights map[string]float64
minWeight float64
validationRate float64
}
func (c *Classifier) Train(items []ClassItem) {
c.trainingSet = make(map[string]map[string]float64)
c.labelWeights = make(map[string]float64)
c.minWeight = 0.05
c.validationRate = 0.1
//fill classes and values
for _, val := range items {
if _, ok := c.trainingSet[val.Class]; !ok {
c.trainingSet[val.Class] = make(map[string]float64)
c.labelWeights[val.Class] = 1
}
}
cnt := 0
for _, val := range items {
for _, w := range val.Content {
for l, _ := range c.trainingSet {
if _, ok := c.trainingSet[l][w]; !ok {
c.trainingSet[l][w] = 0
}
if val.Class == l {
c.trainingSet[l][w]++
cnt++
}
}
}
}
for _, d := range c.trainingSet {
for i, _ := range d {
if d[i] == 0 {
//clamp to minWeight to avoid multiply by zero
d[i] = c.minWeight
} else {
d[i] /= float64(len(items))
}
}
}
}
func (c *Classifier) Classes() []string {
if c.trainingSet == nil {
return []string{}
}
l := make([]string, len(c.trainingSet))
i := 0
for k, _ := range c.trainingSet {
l[i] = k
i++
}
return l
}
// Validates adjusts the threshold for each labels n times. Where n is given by iterations.
// It returns an Array of float64 which gives the accuracy between each iteration/adjustmens as well as before and after the Validation. (len() == iterations + 1)
func (c *Classifier) Validate(items []ClassItem, iterations int) []float64 {
acc := make([]float64, iterations+1)
classErr := make(map[string]float64)
for w, _ := range c.labelWeights {
classErr[w] = 0
}
//find error by items
for iter := 0; iter <= iterations; iter++ {
for _, item := range items {
matches := c.PredictAll(item.Content)
for _, m := range matches {
if m.Class == item.Class {
classErr[item.Class] += matches[0].Probability - m.Probability
break
}
}
}
acc[iter] = c.Accuracy(items)
}
//TODO don't just increase weights for false negative but also decrease false positives
//apply error to weights
for l, v := range classErr {
c.labelWeights[l] += v * c.validationRate
}
return acc
}
func (c *Classifier) Accuracy(items []ClassItem) float64 {
if len(items) == 0 {
panic("at least one item expected")
}
matches := 0
for _, iv := range items {
m := c.PredictBest(iv.Content)
if m.Class == iv.Class {
matches++
}
}
return float64(matches) / float64(len(items))
}
func (c *Classifier) PredictBest(content []string) Match {
return c.PredictAll(content)[0]
}
func (c *Classifier) PredictAll(content []string) []Match {
matches := make(Matches, len(c.trainingSet))
i := 0
for k, _ := range c.trainingSet {
matches[i] = Match{
k,
1,
}
i++
}
sum := 0.
for i, _ := range matches {
for _, w := range content {
matches[i].Probability *= c.trainingSet[matches[i].Class][w] * c.labelWeights[matches[i].Class]
}
sum += matches[i].Probability
}
for i, _ := range matches {
matches[i].Probability /= sum
}
sort.Sort(matches)
return matches
}
type Match struct {
Class string
Probability float64
}
type ClassItem struct {
Class string
Content []string
}
type Matches []Match
func (m Matches) Len() int {
return len(m)
}
func (m Matches) Less(i, j int) bool {
return m[i].Probability > m[j].Probability
}
func (m Matches) Swap(i, j int) {
m[i], m[j] = m[j], m[i]
} | classifier.go | 0.55929 | 0.452052 | classifier.go | starcoder |
package main
const ddcContents string = `<ddc>
<class number="000" description="Computer science, information & general works">
<division number="000" description="Computer science, knowledge & systems">
<section number="000" description="Computer science, information & general works"></section>
<section number="001" description="Knowledge"></section>
<section number="002" description="The book"></section>
<section number="003" description="Systems"></section>
<section number="004" description="Data processing & computer science"></section>
<section number="005" description="Computer programming, programs & data"></section>
<section number="006" description="Special computer methods"></section>
<section number="007" description="Unassigned"></section>
<section number="008" description="Unassigned"></section>
<section number="009" description="Unassigned"></section>
</division>
<division number="010" description="Bibliographies">
<section number="010" description="Bibliography"></section>
<section number="011" description="Bibliographies"></section>
<section number="012" description="Bibliographies of individuals"></section>
<section number="013" description="Unassigned"></section>
<section number="014" description="Of anonymous & pseudonymous works"></section>
<section number="015" description="Bibliographies of works from specific places"></section>
<section number="016" description="Bibliographies of works on specific subjects"></section>
<section number="017" description="General subject catalogs"></section>
<section number="018" description="Catalogs arranged by author, date, etc."></section>
<section number="019" description="Dictionary catalogs"></section>
</division>
<division number="020" description="Library & information sciences">
<section number="020" description="Library & information sciences"></section>
<section number="021" description="Library relationships"></section>
<section number="022" description="Administration of physical plant"></section>
<section number="023" description="Personnel management"></section>
<section number="024" description="Unassigned"></section>
<section number="025" description="Library operations"></section>
<section number="026" description="Libraries for specific subjects"></section>
<section number="027" description="General libraries"></section>
<section number="028" description="Reading & use of other information media"></section>
<section number="029" description="Unassigned"></section>
</division>
<division number="030" description="Encyclopedias & books of facts">
<section number="030" description="General encyclopedic works"></section>
<section number="031" description="Encyclopedias in American English"></section>
<section number="032" description="Encyclopedias in English"></section>
<section number="033" description="In other Germanic languages"></section>
<section number="034" description="Encyclopedias in French, Occitan & Catalan"></section>
<section number="035" description="In Italian, Romanian & related languages"></section>
<section number="036" description="Encyclopedias in Spanish & Portuguese"></section>
<section number="037" description="Encyclopedias in Slavic languages"></section>
<section number="038" description="Encyclopedias in Scandinavian languages"></section>
<section number="039" description="Encyclopedias in other languages"></section>
</division>
<division number="040" description="Unassigned">
<section number="040" description="Unassigned"></section>
<section number="041" description="Unassigned"></section>
<section number="042" description="Unassigned"></section>
<section number="043" description="Unassigned"></section>
<section number="044" description="Unassigned"></section>
<section number="045" description="Unassigned"></section>
<section number="046" description="Unassigned"></section>
<section number="047" description="Unassigned"></section>
<section number="048" description="Unassigned"></section>
<section number="049" description="Unassigned"></section>
</division>
<division number="050" description="Magazines, journals & serials">
<section number="050" description="General serial publications"></section>
<section number="051" description="Serials in American English"></section>
<section number="052" description="Serials in English"></section>
<section number="053" description="Serials in other Germanic languages"></section>
<section number="054" description="Serials in French, Occitan & Catalan"></section>
<section number="055" description="In Italian, Romanian & related languages"></section>
<section number="056" description="Serials in Spanish & Portuguese"></section>
<section number="057" description="Serials in Slavic languages"></section>
<section number="058" description="Serials in Scandinavian languages"></section>
<section number="059" description="Serials in other languages"></section>
</division>
<division number="060" description="Associations, organizations & museums">
<section number="060" description="General organizations & museum science"></section>
<section number="061" description="Organizations in North America"></section>
<section number="062" description="Organizations in British Isles; in England"></section>
<section number="063" description="Organizations in central Europe; in Germany"></section>
<section number="064" description="Organizations in France & Monaco"></section>
<section number="065" description="Organizations in Italy & adjacent islands"></section>
<section number="066" description="In Iberian peninsula & adjacent islands"></section>
<section number="067" description="Organizations in eastern Europe; in Russia"></section>
<section number="068" description="Organizations in other geographic areas"></section>
<section number="069" description="Museum science"></section>
</division>
<division number="070" description="News media, journalism & publishing">
<section number="070" description="News media, journalism & publishing"></section>
<section number="071" description="Newspapers in North America"></section>
<section number="072" description="Newspapers in British Isles; in England"></section>
<section number="073" description="Newspapers in central Europe; in Germany"></section>
<section number="074" description="Newspapers in France & Monaco"></section>
<section number="075" description="Newspapers in Italy & adjacent islands"></section>
<section number="076" description="In Iberian peninsula & adjacent islands"></section>
<section number="077" description="Newspapers in eastern Europe; in Russia"></section>
<section number="078" description="Newspapers in Scandinavia"></section>
<section number="079" description="Newspapers in other geographic areas"></section>
</division>
<division number="080" description="Quotations">
<section number="080" description="General collections"></section>
<section number="081" description="Collections in American English"></section>
<section number="082" description="Collections in English"></section>
<section number="083" description="Collections in other Germanic languages"></section>
<section number="084" description="Collections in French, Occitan & Catalan"></section>
<section number="085" description="In Italian, Romanian & related languages"></section>
<section number="086" description="Collections in Spanish & Portuguese"></section>
<section number="087" description="Collections in Slavic languages"></section>
<section number="088" description="Collections in Scandinavian languages"></section>
<section number="089" description="Collections in other languages"></section>
</division>
<division number="090" description="Manuscripts & rare books">
<section number="090" description="Manuscripts & rare books"></section>
<section number="091" description="Manuscripts"></section>
<section number="092" description="Block books"></section>
<section number="093" description="Incunabula"></section>
<section number="094" description="Printed books"></section>
<section number="095" description="Books notable for bindings"></section>
<section number="096" description="Books notable for illustrations"></section>
<section number="097" description="Books notable for ownership or origin"></section>
<section number="098" description="Prohibited works, forgeries & hoaxes"></section>
<section number="099" description="Books notable for format"></section>
</division>
</class>
<class number="100" description="Philosophy & psychology">
<division number="100" description="Philosophy">
<section number="100" description="Philosophy & psychology"></section>
<section number="101" description="Theory of philosophy"></section>
<section number="102" description="Miscellany"></section>
<section number="103" description="Dictionaries & encyclopedias"></section>
<section number="104" description="Unassigned"></section>
<section number="105" description="Serial publications"></section>
<section number="106" description="Organizations & management"></section>
<section number="107" description="Education, research & related topics"></section>
<section number="108" description="Kinds of persons treatment"></section>
<section number="109" description="Historical & collected persons treatment"></section>
</division>
<division number="110" description="Metaphysics">
<section number="110" description="Metaphysics"></section>
<section number="111" description="Ontology"></section>
<section number="112" description="Unassigned"></section>
<section number="113" description="Cosmology"></section>
<section number="114" description="Space"></section>
<section number="115" description="Time"></section>
<section number="116" description="Change"></section>
<section number="117" description="Structure"></section>
<section number="118" description="Force & energy"></section>
<section number="119" description="Number & quantity"></section>
</division>
<division number="120" description="Epistemology">
<section number="120" description="Epistemology, causation & humankind"></section>
<section number="121" description="Epistemology"></section>
<section number="122" description="Causation"></section>
<section number="123" description="Determinism & indeterminism"></section>
<section number="124" description="Teleology"></section>
<section number="125" description="Unassigned"></section>
<section number="126" description="The self"></section>
<section number="127" description="The unconscious & the subconscious"></section>
<section number="128" description="Humankind"></section>
<section number="129" description="Origin & destiny of individual souls"></section>
</division>
<division number="130" description="Parapsychology & occultism">
<section number="130" description="Parapsychology & occultism"></section>
<section number="131" description="Parapsychological & occult methods"></section>
<section number="132" description="Unassigned"></section>
<section number="133" description="Specific topics in parapsychology & occultism"></section>
<section number="134" description="Unassigned"></section>
<section number="135" description="Dreams & mysteries"></section>
<section number="136" description="Unassigned"></section>
<section number="137" description="Divinatory graphology"></section>
<section number="138" description="Physiognomy"></section>
<section number="139" description="Phrenology"></section>
</division>
<division number="140" description="Philosophical schools of thought">
<section number="140" description="Specific philosophical schools"></section>
<section number="141" description="Idealism & related systems"></section>
<section number="142" description="Critical philosophy"></section>
<section number="143" description="Bergsonism & intuitionism"></section>
<section number="144" description="Humanism & related systems"></section>
<section number="145" description="Sensationalism"></section>
<section number="146" description="Naturalism & related systems"></section>
<section number="147" description="Pantheism & related systems"></section>
<section number="148" description="Eclecticism, liberalism & traditionalism"></section>
<section number="149" description="Other philosophical systems"></section>
</division>
<division number="150" description="Psychology">
<section number="150" description="Psychology"></section>
<section number="151" description="Unassigned"></section>
<section number="152" description="Perception, movement, emotions & drives"></section>
<section number="153" description="Mental processes & intelligence"></section>
<section number="154" description="Subconscious & altered states"></section>
<section number="155" description="Differential & developmental psychology"></section>
<section number="156" description="Comparative psychology"></section>
<section number="157" description="Unassigned"></section>
<section number="158" description="Applied psychology"></section>
<section number="159" description="Unassigned"></section>
</division>
<division number="160" description="Logic">
<section number="160" description="Logic"></section>
<section number="161" description="Induction"></section>
<section number="162" description="Deduction"></section>
<section number="163" description="Unassigned"></section>
<section number="164" description="Unassigned"></section>
<section number="165" description="Fallacies & sources of error"></section>
<section number="166" description="Syllogisms"></section>
<section number="167" description="Hypotheses"></section>
<section number="168" description="Argument & persuasion"></section>
<section number="169" description="Analogy"></section>
</division>
<division number="170" description="Ethics">
<section number="170" description="Ethics"></section>
<section number="171" description="Ethical systems"></section>
<section number="172" description="Political ethics"></section>
<section number="173" description="Ethics of family relationships"></section>
<section number="174" description="Occupational ethics"></section>
<section number="175" description="Ethics of recreation & leisure"></section>
<section number="176" description="Ethics of sex & reproduction"></section>
<section number="177" description="Ethics of social relations"></section>
<section number="178" description="Ethics of consumption"></section>
<section number="179" description="Other ethical norms"></section>
</division>
<division number="180" description="Ancient, medieval & eastern philosophy">
<section number="180" description="Ancient, medieval & eastern philosophy"></section>
<section number="181" description="Eastern philosophy"></section>
<section number="182" description="Pre-Socratic Greek philosophies"></section>
<section number="183" description="Socratic & related philosophies"></section>
<section number="184" description="Platonic philosophy"></section>
<section number="185" description="Aristotelian philosophy"></section>
<section number="186" description="Skeptic & Neoplatonic philosophies"></section>
<section number="187" description="Epicurean philosophy"></section>
<section number="188" description="Stoic philosophy"></section>
<section number="189" description="Medieval western philosophy"></section>
</division>
<division number="190" description="Modern western philosophy">
<section number="190" description="Modern western philosophy"></section>
<section number="191" description="Philosophy of United States & Canada"></section>
<section number="192" description="Philosophy of British Isles"></section>
<section number="193" description="Philosophy of Germany & Austria"></section>
<section number="194" description="Philosophy of France"></section>
<section number="195" description="Philosophy of Italy"></section>
<section number="196" description="Philosophy of Spain & Portugal"></section>
<section number="197" description="Philosophy of former Soviet Union"></section>
<section number="198" description="Philosophy of Scandinavia"></section>
<section number="199" description="Philosophy in other geographic areas"></section>
</division>
</class>
<class number="200" description="Religion">
<division number="200" description="Religion">
<section number="200" description="Religion"></section>
<section number="201" description="Religious mythology & social theology"></section>
<section number="202" description="Doctrines"></section>
<section number="203" description="Public worship & other practices"></section>
<section number="204" description="Religious experience, life & practice"></section>
<section number="205" description="Religious ethics"></section>
<section number="206" description="Leaders & organization"></section>
<section number="207" description="Missions & religious education"></section>
<section number="208" description="Sources"></section>
<section number="209" description="Sects & reform movements"></section>
</division>
<division number="210" description="Philosophy & theory of religion">
<section number="210" description="Philosophy & theory of religion"></section>
<section number="211" description="Concepts of God"></section>
<section number="212" description="Existence, knowability & attributes of God"></section>
<section number="213" description="Creation"></section>
<section number="214" description="Theodicy"></section>
<section number="215" description="Science & religion"></section>
<section number="216" description="Unassigned"></section>
<section number="217" description="Unassigned"></section>
<section number="218" description="Humankind"></section>
<section number="219" description="Unassigned"></section>
</division>
<division number="220" description="The Bible">
<section number="220" description="Bible"></section>
<section number="221" description="Old Testament (Tanakh)"></section>
<section number="222" description="Historical books of Old Testament"></section>
<section number="223" description="Poetic books of Old Testament"></section>
<section number="224" description="Prophetic books of Old Testament"></section>
<section number="225" description="New Testament"></section>
<section number="226" description="Gospels & Acts"></section>
<section number="227" description="Epistles"></section>
<section number="228" description="Revelation (Apocalypse)"></section>
<section number="229" description="Apocrypha & pseudepigrapha"></section>
</division>
<division number="230" description="Christianity & Christian theology">
<section number="230" description="Christianity & Christian theology"></section>
<section number="231" description="God"></section>
<section number="232" description="Jesus Christ & his family"></section>
<section number="233" description="Humankind"></section>
<section number="234" description="Salvation & grace"></section>
<section number="235" description="Spiritual beings"></section>
<section number="236" description="Eschatology"></section>
<section number="237" description="Unassigned"></section>
<section number="238" description="Creeds & catechisms"></section>
<section number="239" description="Apologetics & polemics"></section>
</division>
<division number="240" description="Christian practice & observance">
<section number="240" description="Christian moral & devotional theology"></section>
<section number="241" description="Christian ethics"></section>
<section number="242" description="Devotional literature"></section>
<section number="243" description="Evangelistic writings for individuals"></section>
<section number="244" description="Unassigned"></section>
<section number="245" description="Unassigned"></section>
<section number="246" description="Use of art in Christianity"></section>
<section number="247" description="Church furnishings & articles"></section>
<section number="248" description="Christian experience, practice & life"></section>
<section number="249" description="Christian observances in family life"></section>
</division>
<division number="250" description="Christian pastoral practice & religious orders">
<section number="250" description="Christian orders & local church"></section>
<section number="251" description="Preaching"></section>
<section number="252" description="Texts of sermons"></section>
<section number="253" description="Pastoral office & work"></section>
<section number="254" description="Parish administration"></section>
<section number="255" description="Religious congregations & orders"></section>
<section number="256" description="Unassigned"></section>
<section number="257" description="Unassigned"></section>
<section number="258" description="Unassigned"></section>
<section number="259" description="Pastoral care of families & kinds of persons"></section>
</division>
<division number="260" description="Christian organization, social work & worship">
<section number="260" description="Social & ecclesiastical theology"></section>
<section number="261" description="Social theology"></section>
<section number="262" description="Ecclesiology"></section>
<section number="263" description="Days, times & places of observance"></section>
<section number="264" description="Public worship"></section>
<section number="265" description="Sacraments, other rites & acts"></section>
<section number="266" description="Missions"></section>
<section number="267" description="Associations for religious work"></section>
<section number="268" description="Religious education"></section>
<section number="269" description="Spiritual renewal"></section>
</division>
<division number="270" description="History of Christianity">
<section number="270" description="History of Christianity & Christian church"></section>
<section number="271" description="Religious orders in church history"></section>
<section number="272" description="Persecutions in church history"></section>
<section number="273" description="Doctrinal controversies & heresies"></section>
<section number="274" description="History of Christianity in Europe"></section>
<section number="275" description="History of Christianity in Asia"></section>
<section number="276" description="History of Christianity in Africa"></section>
<section number="277" description="History of Christianity in North America"></section>
<section number="278" description="History of Christianity in South America"></section>
<section number="279" description="History of Christianity in other areas"></section>
</division>
<division number="280" description="Christian denominations">
<section number="280" description="Christian denominations & sects"></section>
<section number="281" description="Early church & Eastern churches"></section>
<section number="282" description="Roman Catholic Church"></section>
<section number="283" description="Anglican churches"></section>
<section number="284" description="Protestants of Continental origin"></section>
<section number="285" description="Presbyterian, Reformed & Congregational"></section>
<section number="286" description="Baptist, Disciples of Christ & Adventist"></section>
<section number="287" description="Methodist & related churches"></section>
<section number="288" description="Unassigned"></section>
<section number="289" description="Other denominations & sects"></section>
</division>
<division number="290" description="Other religions">
<section number="290" description="Other religions"></section>
<section number="291" description="Unassigned"></section>
<section number="292" description="Greek & Roman religion"></section>
<section number="293" description="Germanic religion"></section>
<section number="294" description="Religions of Indic origin"></section>
<section number="295" description="Zoroastrianism"></section>
<section number="296" description="Judaism"></section>
<section number="297" description="Islam, Bábism & Bahá'í Faith"></section>
<section number="298" description="Optional"></section>
<section number="299" description="Religions not provided for elsewhere"></section>
</division>
</class>
<class number="300" description="Social sciences">
<division number="300" description="Social sciences, sociology & anthropology">
<section number="300" description="Social sciences"></section>
<section number="301" description="Sociology & anthropology"></section>
<section number="302" description="Social interaction"></section>
<section number="303" description="Social processes"></section>
<section number="304" description="Factors affecting social behavior"></section>
<section number="305" description="Social groups"></section>
<section number="306" description="Culture & institutions"></section>
<section number="307" description="Communities"></section>
<section number="308" description="Unassigned"></section>
<section number="309" description="Unassigned"></section>
</division>
<division number="310" description="Statistics">
<section number="310" description="Collections of general statistics"></section>
<section number="311" description="Unassigned"></section>
<section number="312" description="Unassigned"></section>
<section number="313" description="Unassigned"></section>
<section number="314" description="General statistics of Europe"></section>
<section number="315" description="General statistics of Asia"></section>
<section number="316" description="General statistics of Africa"></section>
<section number="317" description="General statistics of North America"></section>
<section number="318" description="General statistics of South America"></section>
<section number="319" description="General statistics of other areas"></section>
</division>
<division number="320" description="Political science">
<section number="320" description="Political science"></section>
<section number="321" description="Systems of governments & states"></section>
<section number="322" description="Relation of state to organized groups"></section>
<section number="323" description="Civil & political rights"></section>
<section number="324" description="The political process"></section>
<section number="325" description="International migration & colonization"></section>
<section number="326" description="Slavery & emancipation"></section>
<section number="327" description="International relations"></section>
<section number="328" description="The legislative process"></section>
<section number="329" description="Unassigned"></section>
</division>
<division number="330" description="Economics">
<section number="330" description="Economics"></section>
<section number="331" description="Labor economics"></section>
<section number="332" description="Financial economics"></section>
<section number="333" description="Economics of land & energy"></section>
<section number="334" description="Cooperatives"></section>
<section number="335" description="Socialism & related systems"></section>
<section number="336" description="Public finance"></section>
<section number="337" description="International economics"></section>
<section number="338" description="Production"></section>
<section number="339" description="Macroeconomics & related topics"></section>
</division>
<division number="340" description="Law">
<section number="340" description="Law"></section>
<section number="341" description="Law of nations"></section>
<section number="342" description="Constitutional & administrative law"></section>
<section number="343" description="Military, tax, trade & industrial law"></section>
<section number="344" description="Labor, social, education & cultural law"></section>
<section number="345" description="Criminal law"></section>
<section number="346" description="Private law"></section>
<section number="347" description="Civil procedure & courts"></section>
<section number="348" description="Laws, regulations & cases"></section>
<section number="349" description="Law of specific jurisdictions & areas"></section>
</division>
<division number="350" description="Public administration & military science">
<section number="350" description="Public administration & military science"></section>
<section number="351" description="Public administration"></section>
<section number="352" description="General considerations of public administration"></section>
<section number="353" description="Specific fields of public administration"></section>
<section number="354" description="Administration of economy & environment"></section>
<section number="355" description="Military science"></section>
<section number="356" description="Infantry forces & warfare"></section>
<section number="357" description="Mounted forces & warfare"></section>
<section number="358" description="Air & other specialized forces"></section>
<section number="359" description="Sea forces & warfare"></section>
</division>
<division number="360" description="Social problems & social services">
<section number="360" description="Social problems & services; associations"></section>
<section number="361" description="Social problems & social welfare in general"></section>
<section number="362" description="Social welfare problems & services"></section>
<section number="363" description="Other social problems & services"></section>
<section number="364" description="Criminology"></section>
<section number="365" description="Penal & related institutions"></section>
<section number="366" description="Associations"></section>
<section number="367" description="General clubs"></section>
<section number="368" description="Insurance"></section>
<section number="369" description="Miscellaneous kinds of associations"></section>
</division>
<division number="370" description="Education">
<section number="370" description="Education"></section>
<section number="371" description="Schools & their activities; special education"></section>
<section number="372" description="Elementary education"></section>
<section number="373" description="Secondary education"></section>
<section number="374" description="Adult education"></section>
<section number="375" description="Curricula"></section>
<section number="376" description="Unassigned"></section>
<section number="377" description="Unassigned"></section>
<section number="378" description="Higher education"></section>
<section number="379" description="Public policy issues in education"></section>
</division>
<division number="380" description="Commerce, communications & transportation">
<section number="380" description="Commerce, communications & transportation"></section>
<section number="381" description="Commerce"></section>
<section number="382" description="International commerce"></section>
<section number="383" description="Postal communication"></section>
<section number="384" description="Communications; telecommunication"></section>
<section number="385" description="Railroad transportation"></section>
<section number="386" description="Inland waterway & ferry transportation"></section>
<section number="387" description="Water, air & space transportation"></section>
<section number="388" description="Transportation; ground transportation"></section>
<section number="389" description="Metrology & standardization"></section>
</division>
<division number="390" description="Customs, etiquette & folklore">
<section number="390" description="Customs, etiquette & folklore"></section>
<section number="391" description="Costume & personal appearance"></section>
<section number="392" description="Customs of life cycle & domestic life"></section>
<section number="393" description="Death customs"></section>
<section number="394" description="General customs"></section>
<section number="395" description="Etiquette (Manners)"></section>
<section number="396" description="Unassigned"></section>
<section number="397" description="Unassigned"></section>
<section number="398" description="Folklore"></section>
<section number="399" description="Customs of war & diplomacy"></section>
</division>
</class>
<class number="400" description="Language">
<division number="400" description="Language">
<section number="400" description="Language"></section>
<section number="401" description="Philosophy & theory"></section>
<section number="402" description="Miscellany"></section>
<section number="403" description="Dictionaries & encyclopedias"></section>
<section number="404" description="Special topics"></section>
<section number="405" description="Serial publications"></section>
<section number="406" description="Organizations & management"></section>
<section number="407" description="Education, research & related topics"></section>
<section number="408" description="Kinds of persons treatment"></section>
<section number="409" description="Geographic & persons treatment"></section>
</division>
<division number="410" description="Linguistics">
<section number="410" description="Linguistics"></section>
<section number="411" description="Writing systems"></section>
<section number="412" description="Etymology"></section>
<section number="413" description="Dictionaries"></section>
<section number="414" description="Phonology & phonetics"></section>
<section number="415" description="Grammar"></section>
<section number="416" description="Unassigned"></section>
<section number="417" description="Dialectology & historical linguistics"></section>
<section number="418" description="Standard usage & applied linguistics"></section>
<section number="419" description="Sign languages"></section>
</division>
<division number="420" description="English & Old English languages">
<section number="420" description="English & Old English"></section>
<section number="421" description="English writing system & phonology"></section>
<section number="422" description="English etymology"></section>
<section number="423" description="English dictionaries"></section>
<section number="424" description="Unassigned"></section>
<section number="425" description="English grammar"></section>
<section number="426" description="Unassigned"></section>
<section number="427" description="English language variations"></section>
<section number="428" description="Standard English usage"></section>
<section number="429" description="Old English (Anglo-Saxon)"></section>
</division>
<division number="430" description="German & related languages">
<section number="430" description="Germanic languages; German"></section>
<section number="431" description="German writing systems & phonology"></section>
<section number="432" description="German etymology"></section>
<section number="433" description="German dictionaries"></section>
<section number="434" description="Unassigned"></section>
<section number="435" description="German grammar"></section>
<section number="436" description="Unassigned"></section>
<section number="437" description="German language variations"></section>
<section number="438" description="Standard German usage"></section>
<section number="439" description="Other Germanic languages"></section>
</division>
<division number="440" description="French & related languages">
<section number="440" description="Romance languages; French"></section>
<section number="441" description="French writing systems & phonology"></section>
<section number="442" description="French etymology"></section>
<section number="443" description="French dictionaries"></section>
<section number="444" description="Unassigned"></section>
<section number="445" description="French grammar"></section>
<section number="446" description="Unassigned"></section>
<section number="447" description="French language variations"></section>
<section number="448" description="Standard French usage"></section>
<section number="449" description="Occitan & Catalan"></section>
</division>
<division number="450" description="Italian, Romanian & related languages">
<section number="450" description="Italian, Romanian & related languages"></section>
<section number="451" description="Italian writing systems & phonology"></section>
<section number="452" description="Italian etymology"></section>
<section number="453" description="Italian dictionaries"></section>
<section number="454" description="Unassigned"></section>
<section number="455" description="Italian grammar"></section>
<section number="456" description="Unassigned"></section>
<section number="457" description="Italian language variations"></section>
<section number="458" description="Standard Italian usage"></section>
<section number="459" description="Romanian & related languages"></section>
</division>
<division number="460" description="Spanish & Portuguese languages">
<section number="460" description="Spanish & Portuguese languages"></section>
<section number="461" description="Spanish writing systems & phonology"></section>
<section number="462" description="Spanish etymology"></section>
<section number="463" description="Spanish dictionaries"></section>
<section number="464" description="Unassigned"></section>
<section number="465" description="Spanish grammar"></section>
<section number="466" description="Unassigned"></section>
<section number="467" description="Spanish language variations"></section>
<section number="468" description="Standard Spanish usage"></section>
<section number="469" description="Portuguese"></section>
</division>
<division number="470" description="Latin & Italic languages">
<section number="470" description="Italic languages; Latin"></section>
<section number="471" description="Classical Latin writing & phonology"></section>
<section number="472" description="Classical Latin etymology"></section>
<section number="473" description="Classical Latin dictionaries"></section>
<section number="474" description="Unassigned"></section>
<section number="475" description="Classical Latin grammar"></section>
<section number="476" description="Unassigned"></section>
<section number="477" description="Old, postclassical & Vulgar Latin"></section>
<section number="478" description="Classical Latin usage"></section>
<section number="479" description="Other Italic languages"></section>
</division>
<division number="480" description="Classical & modern Greek languages">
<section number="480" description="Hellenic languages; classical Greek"></section>
<section number="481" description="Classical Greek writing & phonology"></section>
<section number="482" description="Classical Greek etymology"></section>
<section number="483" description="Classical Greek dictionaries"></section>
<section number="484" description="Unassigned"></section>
<section number="485" description="Classical Greek grammar"></section>
<section number="486" description="Unassigned"></section>
<section number="487" description="Preclassical & postclassical Greek"></section>
<section number="488" description="Classical Greek usage"></section>
<section number="489" description="Other Hellenic languages"></section>
</division>
<division number="490" description="Other languages">
<section number="490" description="Other languages"></section>
<section number="491" description="East Indo-European & Celtic languages"></section>
<section number="492" description="Afro-Asiatic languages; Semitic languages"></section>
<section number="493" description="Non-Semitic Afro-Asiatic languages"></section>
<section number="494" description="Altic, Uralic, Hyperborean & Dravidian"></section>
<section number="495" description="Languages of East & Southeast Asia"></section>
<section number="496" description="African languages"></section>
<section number="497" description="North American native languages"></section>
<section number="498" description="South American native languages"></section>
<section number="499" description="Austronesian & other languages"></section>
</division>
</class>
<class number="500" description="Science">
<division number="500" description="Science">
<section number="500" description="Natural sciences & mathematics"></section>
<section number="501" description="Philosophy & theory"></section>
<section number="502" description="Miscellany"></section>
<section number="503" description="Dictionaries & encyclopedias"></section>
<section number="504" description="Unassigned"></section>
<section number="505" description="Serial publications"></section>
<section number="506" description="Organizations & management"></section>
<section number="507" description="Education, research & related topics"></section>
<section number="508" description="Natural history"></section>
<section number="509" description="Historical, geographic & persons treatment"></section>
</division>
<division number="510" description="Mathematics">
<section number="510" description="Mathematics"></section>
<section number="511" description="General principles of mathematics"></section>
<section number="512" description="Algebra"></section>
<section number="513" description="Arithmetic"></section>
<section number="514" description="Topology"></section>
<section number="515" description="Analysis"></section>
<section number="516" description="Geometry"></section>
<section number="517" description="Unassigned"></section>
<section number="518" description="Numerical analysis"></section>
<section number="519" description="Probabilities & applied mathematics"></section>
</division>
<division number="520" description="Astronomy">
<section number="520" description="Astronomy & allied sciences"></section>
<section number="521" description="Celestial mechanics"></section>
<section number="522" description="Techniques, equipment & materials"></section>
<section number="523" description="Specific celestial bodies & phenomena"></section>
<section number="524" description="Unassigned"></section>
<section number="525" description="Earth (Astronomical geography)"></section>
<section number="526" description="Mathematical geography"></section>
<section number="527" description="Celestial navigation"></section>
<section number="528" description="Ephemerides"></section>
<section number="529" description="Chronology"></section>
</division>
<division number="530" description="Physics">
<section number="530" description="Physics"></section>
<section number="531" description="Classical mechanics; solid mechanics"></section>
<section number="532" description="Fluid mechanics; liquid mechanics"></section>
<section number="533" description="Gas mechanics"></section>
<section number="534" description="Sound & related vibrations"></section>
<section number="535" description="Light & infrared & ultraviolet phenomena"></section>
<section number="536" description="Heat"></section>
<section number="537" description="Electricity & electronics"></section>
<section number="538" description="Magnetism"></section>
<section number="539" description="Modern physics"></section>
</division>
<division number="540" description="Chemistry">
<section number="540" description="Chemistry & allied sciences"></section>
<section number="541" description="Physical chemistry"></section>
<section number="542" description="Techniques, equipment & materials"></section>
<section number="543" description="Analytical chemistry"></section>
<section number="544" description="Unassigned"></section>
<section number="545" description="Unassigned"></section>
<section number="546" description="Inorganic chemistry"></section>
<section number="547" description="Organic chemistry"></section>
<section number="548" description="Crystallography"></section>
<section number="549" description="Mineralogy"></section>
</division>
<division number="550" description="Earth sciences & geology">
<section number="550" description="Earth sciences"></section>
<section number="551" description="Geology, hydrology & meteorology"></section>
<section number="552" description="Petrology"></section>
<section number="553" description="Economic geology"></section>
<section number="554" description="Earth sciences of Europe"></section>
<section number="555" description="Earth sciences of Asia"></section>
<section number="556" description="Earth sciences of Africa"></section>
<section number="557" description="Earth sciences of North America"></section>
<section number="558" description="Earth sciences of South America"></section>
<section number="559" description="Earth sciences of other areas"></section>
</division>
<division number="560" description="Fossils & prehistoric life">
<section number="560" description="Paleontology; paleozoology"></section>
<section number="561" description="Paleobotany; fossil microorganisms"></section>
<section number="562" description="Fossil invertebrates"></section>
<section number="563" description="Fossil marine & seashore invertebrates"></section>
<section number="564" description="Fossil mollusks & molluscoids"></section>
<section number="565" description="Fossil arthropods"></section>
<section number="566" description="Fossil chordates"></section>
<section number="567" description="Fossil cold-blooded vertebrates; fossil fishes"></section>
<section number="568" description="Fossil birds"></section>
<section number="569" description="Fossil mammals"></section>
</division>
<division number="570" description="Life sciences; biology">
<section number="570" description="Life sciences; biology"></section>
<section number="571" description="Physiology & related subjects"></section>
<section number="572" description="Biochemistry"></section>
<section number="573" description="Specific physiological systems in animals"></section>
<section number="574" description="Unassigned"></section>
<section number="575" description="Specific parts of & systems in plants"></section>
<section number="576" description="Genetics and evolution"></section>
<section number="577" description="Ecology"></section>
<section number="578" description="Natural history of organisms"></section>
<section number="579" description="Microorganisms, fungi & algae"></section>
</division>
<division number="580" description="Plants (Botany)">
<section number="580" description="Plants (Botany)"></section>
<section number="581" description="Specific topics in natural history"></section>
<section number="582" description="Plants noted for characteristics & flowers"></section>
<section number="583" description="Dicotyledones"></section>
<section number="584" description="Monocotyledones"></section>
<section number="585" description="Gymnosperms; conifers"></section>
<section number="586" description="Seedless plants"></section>
<section number="587" description="Vascular seedless plants"></section>
<section number="588" description="Bryophyta"></section>
<section number="589" description="Unassigned"></section>
</division>
<division number="590" description="Animals (Zoology)">
<section number="590" description="Animals (Zoology)"></section>
<section number="591" description="Specific topics in natural history"></section>
<section number="592" description="Invertebrates"></section>
<section number="593" description="Marine & seashore invertebrates"></section>
<section number="594" description="Mollusks & Molluscoids"></section>
<section number="595" description="Arthropods"></section>
<section number="596" description="Chordates"></section>
<section number="597" description="Cold-blooded vertebrates; fishes"></section>
<section number="598" description="Birds"></section>
<section number="599" description="Mammals"></section>
</division>
</class>
<class number="600" description="Technology">
<division number="600" description="Technology">
<section number="600" description="Technology"></section>
<section number="601" description="Philosophy & theory"></section>
<section number="602" description="Miscellany"></section>
<section number="603" description="Dictionaries & encyclopedias"></section>
<section number="604" description="Special Topics"></section>
<section number="605" description="Serial publications"></section>
<section number="606" description="Organizations"></section>
<section number="607" description="Education, research & related topics"></section>
<section number="608" description="Inventions & patents"></section>
<section number="609" description="Historical, geographic & persons treatment"></section>
</division>
<division number="610" description="Medicine & health">
<section number="610" description="Medicine & health"></section>
<section number="611" description="Human anatomy, cytology & histology"></section>
<section number="612" description="Human physiology"></section>
<section number="613" description="Personal health & safety"></section>
<section number="614" description="Incidence & prevention of disease"></section>
<section number="615" description="Pharmacology & therapeutics"></section>
<section number="616" description="Diseases"></section>
<section number="617" description="Surgery & related medical specialties"></section>
<section number="618" description="Gynecology, obstetrics, pediatrics & geriatrics"></section>
<section number="619" description="Unassigned"></section>
</division>
<division number="620" description="Engineering">
<section number="620" description="Engineering & allied operations"></section>
<section number="621" description="Applied physics"></section>
<section number="622" description="Mining & related operations"></section>
<section number="623" description="Military & nautical engineering"></section>
<section number="624" description="Civil engineering"></section>
<section number="625" description="Engineering of railroads & roads"></section>
<section number="626" description="Unassigned"></section>
<section number="627" description="Hydraulic engineering"></section>
<section number="628" description="Sanitary & municipal engineering"></section>
<section number="629" description="Other branches of engineering"></section>
</division>
<division number="630" description="Agriculture">
<section number="630" description="Agriculture & related technologies"></section>
<section number="631" description="Techniques, equipment & materials"></section>
<section number="632" description="Plant injuries, diseases & pests"></section>
<section number="633" description="Field & plantation crops"></section>
<section number="634" description="Orchards, fruits, forestry"></section>
<section number="635" description="Garden crops (Horticulture)"></section>
<section number="636" description="Animal husbandry"></section>
<section number="637" description="Processing dairy & related products"></section>
<section number="638" description="Insect culture"></section>
<section number="639" description="Hunting, fishing & conservation"></section>
</division>
<division number="640" description="Home & family management">
<section number="640" description="Home & family management"></section>
<section number="641" description="Food & drink"></section>
<section number="642" description="Meals & table service"></section>
<section number="643" description="Housing & household equipment"></section>
<section number="644" description="Household utilities"></section>
<section number="645" description="Household furnishings"></section>
<section number="646" description="Sewing, clothing & personal living"></section>
<section number="647" description="Management of public households"></section>
<section number="648" description="Housekeeping"></section>
<section number="649" description="Child rearing & home care of persons"></section>
</division>
<division number="650" description="Management & public relations">
<section number="650" description="Management & auxiliary services"></section>
<section number="651" description="Office services"></section>
<section number="652" description="Processes of written communication"></section>
<section number="653" description="Shorthand"></section>
<section number="654" description="Unassigned"></section>
<section number="655" description="Unassigned"></section>
<section number="656" description="Unassigned"></section>
<section number="657" description="Accounting"></section>
<section number="658" description="General management"></section>
<section number="659" description="Advertising & public relations"></section>
</division>
<division number="660" description="Chemical engineering">
<section number="660" description="Chemical engineering"></section>
<section number="661" description="Industrial chemicals"></section>
<section number="662" description="Explosives, fuels & related products"></section>
<section number="663" description="Beverage technology"></section>
<section number="664" description="Food technology"></section>
<section number="665" description="Industrial oils, fats, waxes & gases"></section>
<section number="666" description="Ceramic & allied technologies"></section>
<section number="667" description="Cleaning, color & coating technologies"></section>
<section number="668" description="Technology of other organic products"></section>
<section number="669" description="Metallurgy"></section>
</division>
<division number="670" description="Manufacturing">
<section number="670" description="Manufacturing"></section>
<section number="671" description="Metalworking & primary metal products"></section>
<section number="672" description="Iron, steel & other iron alloys"></section>
<section number="673" description="Nonferrous metals"></section>
<section number="674" description="Lumber processing, wood products & cork"></section>
<section number="675" description="Leather & fur processing"></section>
<section number="676" description="Pulp & paper technology"></section>
<section number="677" description="Textiles"></section>
<section number="678" description="Elastomers & elastomer products"></section>
<section number="679" description="Other products of specific materials"></section>
</division>
<division number="680" description="Manufacture for specific uses">
<section number="680" description="Manufacture for specific uses"></section>
<section number="681" description="Precision instruments & other devices"></section>
<section number="682" description="Small forge work (Blacksmithing)"></section>
<section number="683" description="Hardware & household appliances"></section>
<section number="684" description="Furnishings & home workshops"></section>
<section number="685" description="Leather, fur goods & related products"></section>
<section number="686" description="Printing & related activities"></section>
<section number="687" description="Clothing & accessories"></section>
<section number="688" description="Other final products & packaging"></section>
<section number="689" description="Unassigned"></section>
</division>
<division number="690" description="Building & construction">
<section number="690" description="Buildings"></section>
<section number="691" description="Building materials"></section>
<section number="692" description="Auxiliary construction practices"></section>
<section number="693" description="Specific materials & purposes"></section>
<section number="694" description="Wood construction & carpentry"></section>
<section number="695" description="Roof covering"></section>
<section number="696" description="Utilities"></section>
<section number="697" description="Heating, ventilating & air-conditioning"></section>
<section number="698" description="Detail finishing"></section>
<section number="699" description="Unassigned"></section>
</division>
</class>
<class number="700" description="Arts & recreation">
<division number="700" description="Arts">
<section number="700" description="The Arts; fine & decorative arts"></section>
<section number="701" description="Philosophy of fine & decorative arts"></section>
<section number="702" description="Miscellany of fine & decorative arts"></section>
<section number="703" description="Dictionaries of fine & decorative arts"></section>
<section number="704" description="Special topics in fine & decorative arts"></section>
<section number="705" description="Serial publications of fine & decorative arts"></section>
<section number="706" description="Organizations & management"></section>
<section number="707" description="Education, research & related topics"></section>
<section number="708" description="Galleries, museums & private collections"></section>
<section number="709" description="Historical, geographic & persons treatment"></section>
</division>
<division number="710" description="Landscape & area planning">
<section number="710" description="Civic & landscape art"></section>
<section number="711" description="Area planning"></section>
<section number="712" description="Landscape architecture"></section>
<section number="713" description="Landscape architecture of trafficways"></section>
<section number="714" description="Water features"></section>
<section number="715" description="Woody plants"></section>
<section number="716" description="Herbaceous plants"></section>
<section number="717" description="Structures in landscape architecture"></section>
<section number="718" description="Landscape design of cemeteries"></section>
<section number="719" description="Natural landscapes"></section>
</division>
<division number="720" description="Architecture">
<section number="720" description="Architecture"></section>
<section number="721" description="Architectural structure"></section>
<section number="722" description="Architecture to ca. 300"></section>
<section number="723" description="Architecture from ca. 300 to 1399"></section>
<section number="724" description="Architecture from 1400"></section>
<section number="725" description="Public structures"></section>
<section number="726" description="Buildings for religious purposes"></section>
<section number="727" description="Buildings for education & research"></section>
<section number="728" description="Residential & related buildings"></section>
<section number="729" description="Design & decoration"></section>
</division>
<division number="730" description="Sculpture, ceramics & metalwork">
<section number="730" description="Plastic arts; sculpture"></section>
<section number="731" description="Processes, forms & subjects of sculpture"></section>
<section number="732" description="Sculpture to ca. 500"></section>
<section number="733" description="Greek, Etruscan & Roman sculpture"></section>
<section number="734" description="Sculpture from ca. 500 to 1399"></section>
<section number="735" description="Sculpture from 1400"></section>
<section number="736" description="Carving & carvings"></section>
<section number="737" description="Numismatics & sigillography"></section>
<section number="738" description="Ceramic arts"></section>
<section number="739" description="Art metalwork"></section>
</division>
<division number="740" description="Drawing & decorative arts">
<section number="740" description="Drawing & decorative arts"></section>
<section number="741" description="Drawing & drawings"></section>
<section number="742" description="Perspective"></section>
<section number="743" description="Drawing & drawings by subject"></section>
<section number="744" description="Unassigned"></section>
<section number="745" description="Decorative arts"></section>
<section number="746" description="Textile arts"></section>
<section number="747" description="Interior decoration"></section>
<section number="748" description="Glass"></section>
<section number="749" description="Furniture & accessories"></section>
</division>
<division number="750" description="Painting">
<section number="750" description="Painting & paintings"></section>
<section number="751" description="Techniques, equipment, materials & forms"></section>
<section number="752" description="Color"></section>
<section number="753" description="Symbolism, allegory, mythology & legend"></section>
<section number="754" description="Genre paintings"></section>
<section number="755" description="Religion"></section>
<section number="756" description="Unassigned"></section>
<section number="757" description="Human figures"></section>
<section number="758" description="Other subjects"></section>
<section number="759" description="Historical, geographic & persons treatment"></section>
</division>
<division number="760" description="Graphic arts">
<section number="760" description="Graphic arts; printmaking & prints"></section>
<section number="761" description="Relief processes (Block printing)"></section>
<section number="762" description="Unassigned"></section>
<section number="763" description="Lithographic processes"></section>
<section number="764" description="Chromolithography & serigraphy"></section>
<section number="765" description="Metal engraving"></section>
<section number="766" description="Mezzotinting, aquatinting & related processes"></section>
<section number="767" description="Etching & drypoint"></section>
<section number="768" description="Unassigned"></section>
<section number="769" description="Prints"></section>
</division>
<division number="770" description="Photography & computer art">
<section number="770" description="Photography, photographs & computer art"></section>
<section number="771" description="Techniques, equipment & materials"></section>
<section number="772" description="Metallic salt processes"></section>
<section number="773" description="Pigment processes of printing"></section>
<section number="774" description="Holography"></section>
<section number="775" description="Digital photography"></section>
<section number="776" description="Computer art (Digital art)"></section>
<section number="777" description="Unassigned"></section>
<section number="778" description="Fields & kinds of photography"></section>
<section number="779" description="Photographs"></section>
</division>
<division number="780" description="Music">
<section number="780" description="Music"></section>
<section number="781" description="General principles & musical forms"></section>
<section number="782" description="Vocal music"></section>
<section number="783" description="Music for single voices; the voice"></section>
<section number="784" description="Instruments & instrumental ensembles"></section>
<section number="785" description="Ensembles with one instrument per part"></section>
<section number="786" description="Keyboard & other instruments"></section>
<section number="787" description="Stringed instruments"></section>
<section number="788" description="Wind instruments"></section>
<section number="789" description="Optional"></section>
</division>
<division number="790" description="Sports, games & entertainment">
<section number="790" description="Recreational & performing arts"></section>
<section number="791" description="Public performances"></section>
<section number="792" description="Stage presentations"></section>
<section number="793" description="Indoor games & amusements"></section>
<section number="794" description="Indoor games of skill"></section>
<section number="795" description="Games of chance"></section>
<section number="796" description="Athletic & outdoor sports & games"></section>
<section number="797" description="Aquatic & air sports"></section>
<section number="798" description="Equestrian sports & animal racing"></section>
<section number="799" description="Fishing, hunting & shooting"></section>
</division>
</class>
<class number="800" description="Literature">
<division number="800" description="Literature, rhetoric & criticism">
<section number="800" description="Literature & rhetoric"></section>
<section number="801" description="Philosophy & theory"></section>
<section number="802" description="Miscellany"></section>
<section number="803" description="Dictionaries & encyclopedias"></section>
<section number="804" description="Unassigned"></section>
<section number="805" description="Serial publications"></section>
<section number="806" description="Organizations & management"></section>
<section number="807" description="Education, research & related topics"></section>
<section number="808" description="Rhetoric & collections of literature"></section>
<section number="809" description="History, description & criticism"></section>
</division>
<division number="810" description="American literature in English">
<section number="810" description="American literature in English"></section>
<section number="811" description="American poetry in English"></section>
<section number="812" description="American drama in English"></section>
<section number="813" description="American fiction in English"></section>
<section number="814" description="American essays in English"></section>
<section number="815" description="American speeches in English"></section>
<section number="816" description="American letters in English"></section>
<section number="817" description="American humor & satire in English"></section>
<section number="818" description="American miscellaneous writings"></section>
<section number="819" description="Optional"></section>
</division>
<division number="820" description="English & Old English literatures">
<section number="820" description="English & Old English literatures"></section>
<section number="821" description="English poetry"></section>
<section number="822" description="English drama"></section>
<section number="823" description="English fiction"></section>
<section number="824" description="English essays"></section>
<section number="825" description="English speeches"></section>
<section number="826" description="English letters"></section>
<section number="827" description="English humor & satire"></section>
<section number="828" description="English miscellaneous writings"></section>
<section number="829" description="Old English (Anglo-Saxon)"></section>
</division>
<division number="830" description="German & related literatures">
<section number="830" description="Literatures of Germanic languages"></section>
<section number="831" description="German poetry"></section>
<section number="832" description="German drama"></section>
<section number="833" description="German fiction"></section>
<section number="834" description="German essays"></section>
<section number="835" description="German speeches"></section>
<section number="836" description="German letters"></section>
<section number="837" description="German humor & satire"></section>
<section number="838" description="German miscellaneous writings"></section>
<section number="839" description="Other Germanic literatures"></section>
</division>
<division number="840" description="French & related literatures">
<section number="840" description="Literatures of Romance languages"></section>
<section number="841" description="French poetry"></section>
<section number="842" description="French drama"></section>
<section number="843" description="French fiction"></section>
<section number="844" description="French essays"></section>
<section number="845" description="French speeches"></section>
<section number="846" description="French letters"></section>
<section number="847" description="French humor & satire"></section>
<section number="848" description="French miscellaneous writings"></section>
<section number="849" description="Occitan & Catalan literatures"></section>
</division>
<division number="850" description="Italian, Romanian & related literatures">
<section number="850" description="Italian, Romanian & related literatures"></section>
<section number="851" description="Italian poetry"></section>
<section number="852" description="Italian drama"></section>
<section number="853" description="Italian fiction"></section>
<section number="854" description="Italian essays"></section>
<section number="855" description="Italian speeches"></section>
<section number="856" description="Italian letters"></section>
<section number="857" description="Italian humor & satire"></section>
<section number="858" description="Italian miscellaneous writings"></section>
<section number="859" description="Romanian & related literatures"></section>
</division>
<division number="860" description="Spanish & Portuguese literatures">
<section number="860" description="Spanish & Portuguese literatures"></section>
<section number="861" description="Spanish poetry"></section>
<section number="862" description="Spanish drama"></section>
<section number="863" description="Spanish fiction"></section>
<section number="864" description="Spanish essays"></section>
<section number="865" description="Spanish speeches"></section>
<section number="866" description="Spanish letters"></section>
<section number="867" description="Spanish humor & satire"></section>
<section number="868" description="Spanish miscellaneous writings"></section>
<section number="869" description="Portuguese literature"></section>
</division>
<division number="870" description="Latin & Italic literatures">
<section number="870" description="Italic literature; Latin literature"></section>
<section number="871" description="Latin poetry"></section>
<section number="872" description="Latin dramatic poetry & drama"></section>
<section number="873" description="Latin epic poetry & fiction"></section>
<section number="874" description="Latin lyric poetry"></section>
<section number="875" description="Latin speeches"></section>
<section number="876" description="Latin letters"></section>
<section number="877" description="Latin humor & satire"></section>
<section number="878" description="Latin miscellaneous writings"></section>
<section number="879" description="Literatures of other Italic languages"></section>
</division>
<division number="880" description="Classical & modern Greek literatures">
<section number="880" description="Hellenic literatures; classical Greek"></section>
<section number="881" description="Classical Greek poetry"></section>
<section number="882" description="Classical Greek dramatic poetry & drama"></section>
<section number="883" description="Classical Greek epic poetry & fiction"></section>
<section number="884" description="Classical Greek lyric poetry"></section>
<section number="885" description="Classical Greek speeches"></section>
<section number="886" description="Classical Greek letters"></section>
<section number="887" description="Classical Greek humor & satire"></section>
<section number="888" description="Classical Greek miscellaneous writings"></section>
<section number="889" description="Modern Greek literature"></section>
</division>
<division number="890" description="Other literatures">
<section number="890" description="Literatures of other languages"></section>
<section number="891" description="East Indo-European & Celtic literatures"></section>
<section number="892" description="Afro-Asiatic literatures; Semitic literatures"></section>
<section number="893" description="Non-Semitic Afro-Asiatic literatures"></section>
<section number="894" description="Altaic, Uralic, Hyperborean & Dravidian"></section>
<section number="895" description="Literatures of East & Southeast Asia"></section>
<section number="896" description="African literatures"></section>
<section number="897" description="North American native literatures"></section>
<section number="898" description="South American native literatures"></section>
<section number="899" description="Austronesian & other literatures"></section>
</division>
</class>
<class number="900" description="History & geography">
<division number="900" description="History">
<section number="900" description="History & geography"></section>
<section number="901" description="Philosophy & theory"></section>
<section number="902" description="Miscellany"></section>
<section number="903" description="Dictionaries & encyclopedias"></section>
<section number="904" description="Collected accounts of events"></section>
<section number="905" description="Serial publications"></section>
<section number="906" description="Organizations & management"></section>
<section number="907" description="Education, research & related topics"></section>
<section number="908" description="Kinds of persons treatment"></section>
<section number="909" description="World history"></section>
</division>
<division number="910" description="Geography & travel">
<section number="910" description="Geography & travel"></section>
<section number="911" description="Historical geography"></section>
<section number="912" description="Atlases, maps, charts & plans"></section>
<section number="913" description="Geography of & travel in ancient world"></section>
<section number="914" description="Geography of & travel in Europe"></section>
<section number="915" description="Geography of & travel in Asia"></section>
<section number="916" description="Geography of & travel in Africa"></section>
<section number="917" description="Geography of & travel in North America"></section>
<section number="918" description="Geography of & travel in South America"></section>
<section number="919" description="Geography of & travel in other areas"></section>
</division>
<division number="920" description="Biography & genealogy">
<section number="920" description="Biography, genealogy & insignia"></section>
<section number="921" description="Optional"></section>
<section number="922" description="Optional"></section>
<section number="923" description="Optional"></section>
<section number="924" description="Optional"></section>
<section number="925" description="Optional"></section>
<section number="926" description="Optional"></section>
<section number="927" description="Optional"></section>
<section number="928" description="Optional"></section>
<section number="929" description="Genealogy, names & insignia"></section>
</division>
<division number="930" description="History of ancient world (to ca. 499)">
<section number="930" description="History of ancient world to ca. 499"></section>
<section number="931" description="China to 420"></section>
<section number="932" description="Egypt to 640"></section>
<section number="933" description="Palestine to 70"></section>
<section number="934" description="South Asia to 647"></section>
<section number="935" description="Mesopotamia & Iranian Plateau to 637"></section>
<section number="936" description="Europe north & west of Italy to ca. 499"></section>
<section number="937" description="Italy & adjacent territories to 476"></section>
<section number="938" description="Greece to 323"></section>
<section number="939" description="Other parts of ancient world to ca. 640"></section>
</division>
<division number="940" description="History of Europe">
<section number="940" description="History of Europe"></section>
<section number="941" description="British Isles"></section>
<section number="942" description="England & Wales"></section>
<section number="943" description="Central Europe; Germany"></section>
<section number="944" description="France & Monaco"></section>
<section number="945" description="Italian Peninsula & adjacent islands"></section>
<section number="946" description="Iberian Peninsula & adjacent islands"></section>
<section number="947" description="Eastern Europe; Russia"></section>
<section number="948" description="Scandinavia"></section>
<section number="949" description="Other parts of Europe"></section>
</division>
<division number="950" description="History of Asia">
<section number="950" description="History of Asia; Far East"></section>
<section number="951" description="China & adjacent areas"></section>
<section number="952" description="Japan"></section>
<section number="953" description="Arabian Peninsula & adjacent areas"></section>
<section number="954" description="South Asia; India"></section>
<section number="955" description="Iran"></section>
<section number="956" description="Middle East (Near East)"></section>
<section number="957" description="Siberia (Asiatic Russia)"></section>
<section number="958" description="Central Asia"></section>
<section number="959" description="Southeast Asia"></section>
</division>
<division number="960" description="History of Africa">
<section number="960" description="History of Africa"></section>
<section number="961" description="Tunisia & Libya"></section>
<section number="962" description="Egypt & Sudan"></section>
<section number="963" description="Ethiopia & Eritrea"></section>
<section number="964" description="Northwest African coast & offshore islands"></section>
<section number="965" description="Algeria"></section>
<section number="966" description="West Africa & offshore islands"></section>
<section number="967" description="Central Africa & offshore islands"></section>
<section number="968" description="Southern Africa; Republic of South Africa"></section>
<section number="969" description="South Indian Ocean islands"></section>
</division>
<division number="970" description="History of North America">
<section number="970" description="History of North America"></section>
<section number="971" description="Canada"></section>
<section number="972" description="Middle America; Mexico"></section>
<section number="973" description="United States"></section>
<section number="974" description="Northeastern United States"></section>
<section number="975" description="Southeastern United States"></section>
<section number="976" description="South central United States"></section>
<section number="977" description="North central United States"></section>
<section number="978" description="Western United States"></section>
<section number="979" description="Great Basin & Pacific Slope region"></section>
</division>
<division number="980" description="History of South America">
<section number="980" description="History of South America"></section>
<section number="981" description="Brazil"></section>
<section number="982" description="Argentina"></section>
<section number="983" description="Chile"></section>
<section number="984" description="Bolivia"></section>
<section number="985" description="Peru"></section>
<section number="986" description="Colombia & Ecuador"></section>
<section number="987" description="Venezuela"></section>
<section number="988" description="Guiana"></section>
<section number="989" description="Paraguay & Uruguay"></section>
</division>
<division number="990" description="History of other areas">
<section number="990" description="History of other areas"></section>
<section number="991" description="Unassigned"></section>
<section number="992" description="Unassigned"></section>
<section number="993" description="New Zealand"></section>
<section number="994" description="Australia"></section>
<section number="995" description="Melanesia; New Guinea"></section>
<section number="996" description="Other parts of Pacific; Polynesia"></section>
<section number="997" description="Atlantic Ocean islands"></section>
<section number="998" description="Arctic islands & Antarctica"></section>
<section number="999" description="Extraterrestrial worlds"></section>
</division>
</class>
</ddc>` | ddc.go | 0.567337 | 0.459986 | ddc.go | starcoder |
package segment
import (
"encoding/binary"
"io"
)
// ReadWriterAt is the interface that groups the basic io.ReadAt and io.WriteAt methods.
type ReadWriterAt interface {
io.ReaderAt
io.WriterAt
}
// Segment is a data segment.
// Supported data types are uint8, uint16, uint32 and uint64.
// All numeric values in the buffer are encoded using big-endian byte order.
type Segment struct {
buf ReadWriterAt
}
// New returns a new data segment.
func New(buf ReadWriterAt) *Segment {
return &Segment{
buf: buf,
}
}
func (seg *Segment) read(buf []byte, offset int64, index int) error {
if n, err := seg.buf.ReadAt(buf, offset); err != nil {
return err
} else if n < len(buf) {
return &ErrorPartialRead{Index: index, Offset: offset, NumBytes: n}
}
return nil
}
func (seg *Segment) write(buf []byte, offset int64, index int) error {
if n, err := seg.buf.WriteAt(buf, offset); err != nil {
return err
} else if n < 1 {
return &ErrorPartialWrite{Index: index, Offset: offset, NumBytes: n}
}
return nil
}
func (seg *Segment) next(buf []byte, offset *int64) {
*offset += int64(len(buf))
}
// Get sequentially reads data from buffer starting from given offset into values pointed by v.
func (seg *Segment) Get(offset int64, v ...interface{}) error {
for i, val := range v {
switch val.(type) {
default:
return &ErrorUnsupportedType{Index: i}
case *uint8:
buf := make([]byte, 1)
if err := seg.read(buf, offset, i); err != nil {
return err
}
*val.(*uint8) = buf[0]
seg.next(buf, &offset)
case *uint16:
buf := make([]byte, 2)
if err := seg.read(buf, offset, i); err != nil {
return err
}
*val.(*uint16) = binary.BigEndian.Uint16(buf)
seg.next(buf, &offset)
case *uint32:
buf := make([]byte, 4)
if err := seg.read(buf, offset, i); err != nil {
return err
}
*val.(*uint32) = binary.BigEndian.Uint32(buf)
seg.next(buf, &offset)
case *uint64:
buf := make([]byte, 8)
if err := seg.read(buf, offset, i); err != nil {
return err
}
*val.(*uint64) = binary.BigEndian.Uint64(buf)
seg.next(buf, &offset)
}
}
return nil
}
// Set sequentially writes values specified by v to the buffer starting from given offset.
func (seg *Segment) Set(offset int64, v ...interface{}) error {
for i, val := range v {
switch val.(type) {
default:
return &ErrorUnsupportedType{Index: i}
case uint8:
buf := make([]byte, 1)
buf[0] = val.(uint8)
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint16:
buf := make([]byte, 2)
binary.BigEndian.PutUint16(buf, val.(uint16))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint32:
buf := make([]byte, 4)
binary.BigEndian.PutUint32(buf, val.(uint32))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint64:
buf := make([]byte, 8)
binary.BigEndian.PutUint64(buf, val.(uint64))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
}
}
return nil
}
// Inc sequentially increments values in the buffer starting from given offset using deltas specified by v.
func (seg *Segment) Inc(offset int64, v ...interface{}) error {
for i, val := range v {
switch val.(type) {
default:
return &ErrorUnsupportedType{Index: i}
case uint8:
buf := make([]byte, 1)
if err := seg.read(buf, offset, i); err != nil {
return err
}
buf[0] += val.(uint8)
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint16:
buf := make([]byte, 2)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint16(buf, binary.BigEndian.Uint16(buf)+val.(uint16))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint32:
buf := make([]byte, 4)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint32(buf, binary.BigEndian.Uint32(buf)+val.(uint32))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint64:
buf := make([]byte, 8)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint64(buf, binary.BigEndian.Uint64(buf)+val.(uint64))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
}
}
return nil
}
// Dec sequentially decrements values in the buffer starting from given offset using deltas specified by v.
func (seg *Segment) Dec(offset int64, v ...interface{}) error {
for i, val := range v {
switch val.(type) {
default:
return &ErrorUnsupportedType{Index: i}
case uint8:
buf := make([]byte, 1)
if err := seg.read(buf, offset, i); err != nil {
return err
}
buf[0] -= val.(uint8)
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint16:
buf := make([]byte, 2)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint16(buf, binary.BigEndian.Uint16(buf)-val.(uint16))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint32:
buf := make([]byte, 4)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint32(buf, binary.BigEndian.Uint32(buf)-val.(uint32))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
case uint64:
buf := make([]byte, 8)
if err := seg.read(buf, offset, i); err != nil {
return err
}
binary.BigEndian.PutUint64(buf, binary.BigEndian.Uint64(buf)-val.(uint64))
if err := seg.write(buf, offset, i); err != nil {
return err
}
seg.next(buf, &offset)
}
}
return nil
} | segment/segment.go | 0.668772 | 0.45175 | segment.go | starcoder |
package tort
import (
"fmt"
"strconv"
"time"
)
// TimeAssertion accepts time.Time extensions.
type TimeAssertion interface {
// Assert returns the underlying time.Time value.
Assert() time.Time
}
// TimeAssertions are tests around time values.
type TimeAssertions struct {
Assertions
name string
time time.Time
}
// Time identifies a time.Time variable value and returns test functions for its values.
func (assert Assertions) Time(value time.Time) TimeAssertions {
assert.t.Helper()
return TimeAssertions{
Assertions: assert,
name: "time.Time",
time: value,
}
}
// Time identifies a time field on a struct. If the field isn't present, or isn't a time.Time or
// implements TimeAssertion, generates an error.
func (assert StructAssertions) Time(field string) TimeAssertions {
assert.t.Helper()
name := fmt.Sprintf("%s.%s", assert.Type(), field)
property := assert.Field(field)
val, ok := property.Interface().(time.Time)
if !ok {
if i, ok := property.Interface().(TimeAssertion); ok {
val = i.Assert()
} else {
assert.Fatal(`%s is not time; it's "%#v"`, name, property.Interface())
}
}
return TimeAssertions{
Assertions: assert.Assertions,
name: name,
time: val,
}
}
// String looks up an element in a slice expecting it to be a time.Time, or fulfills TimeAssertion.
func (assert SliceAssertions) Time(idx int) TimeAssertions {
assert.t.Helper()
name := strconv.Itoa(idx)
property := assert.Element(idx)
val, ok := property.Interface().(time.Time)
if !ok {
if i, ok := property.Interface().(TimeAssertion); ok {
val = i.Assert()
} else {
assert.Fatal(`element %d is not time; it's "%#v"`, idx, property.Interface())
}
}
return TimeAssertions{
Assertions: assert.Assertions,
name: name,
time: val,
}
}
// Set generates an error if the time is set (not zero).
func (assert TimeAssertions) IsSet() {
assert.t.Helper()
if assert.time.IsZero() {
assert.Failed(`%s is not set`, assert.name)
}
}
// Unset generates an error if the time is not set (is zero).
func (assert TimeAssertions) IsNotSet() {
assert.t.Helper()
if !assert.time.IsZero() {
assert.Failed(`%s is set to %s`, assert.name, assert.time)
}
}
// Within generates an error if the time is within the given duration from right now.
func (assert TimeAssertions) Within(dur time.Duration) {
assert.t.Helper()
since := time.Since(assert.time)
if since > dur {
assert.Failed(`%s happened %s ago, more than %s`, assert.name, since, dur)
}
}
// Before generates an error if the time is after the other.
func (assert TimeAssertions) Before(other time.Time) {
assert.t.Helper()
if assert.time.After(other) {
assert.Failed(`%s at %s happened after %s`, assert.name, assert.time, other)
}
}
// Before generates an error if the time is before the other.
func (assert TimeAssertions) After(other time.Time) {
assert.t.Helper()
if assert.time.Before(other) {
assert.Failed(`%s at %s happened before %s`, assert.name, assert.time, other)
}
}
// Equals generates an error if the two times aren't equal.
func (assert TimeAssertions) Equals(other time.Time) {
assert.t.Helper()
if !assert.time.Equal(other) {
assert.Failed(`%s at %s does not equal %s`, assert.name, assert.time, other)
}
}
// Equals generates an error if the two times aren't equal.
func (assert TimeAssertions) NotEquals(other time.Time) {
assert.t.Helper()
if assert.time.Equal(other) {
assert.Failed(`%s at %s equals %s`, assert.name, assert.time, other)
}
}
// DurationAssertions are tests around duration values.
type DurationAssertions struct {
Assertions
name string
dur time.Duration
}
// Duration identifies a time.Duration variable value and returns test functions for its values.
func (assert Assertions) Duration(value time.Duration) DurationAssertions {
assert.t.Helper()
return DurationAssertions{
Assertions: assert,
name: "time.Duration",
dur: value,
}
}
// Duration identifies a duration field on a struct. If the field isn't present, or isn't a
// time.Duration, generates an error.
func (assert StructAssertions) Duration(field string) DurationAssertions {
assert.t.Helper()
name := fmt.Sprintf("%s.%s", assert.Type(), field)
property := assert.Field(field)
val, ok := property.Interface().(time.Duration)
if !ok {
assert.Fatal(`%s is not a duration; it's "%#v"`, name, property.Interface())
}
return DurationAssertions{
Assertions: assert.Assertions,
name: name,
dur: val,
}
}
// Duration identifies a duration element in the slace. If the element isn't present, or isn't a
// time.Duration, generates an error.
func (assert SliceAssertions) Duration(idx int) DurationAssertions {
assert.t.Helper()
name := strconv.Itoa(idx)
property := assert.Element(idx)
val, ok := property.Interface().(time.Duration)
if !ok {
assert.Fatal(`%s is not a duration; it's "%#v"`, name, property.Interface())
}
return DurationAssertions{
Assertions: assert.Assertions,
name: name,
dur: val,
}
}
// Equals generates an error if the duration does not equal the other..
func (assert DurationAssertions) Equals(other time.Duration) {
assert.t.Helper()
if assert.dur == other {
assert.Failed(`%s with a duration of %s does not equal %s`, assert.name, assert.dur, other)
}
}
// NotEquals generates an error if the duration equals the other..
func (assert DurationAssertions) NotEquals(other time.Duration) {
assert.t.Helper()
if assert.dur == other {
assert.Failed(`%s equals duration %s`, assert.name, other)
}
}
// GreaterThan generates an error if the duration is less than or equal to the other..
func (assert DurationAssertions) GreaterThan(other time.Duration) {
assert.t.Helper()
if assert.dur <= other {
assert.Failed(`%s duration of %s is less than %s`, assert.name, assert.dur, other)
}
}
// LessThan generates an error if the duration is greater than or equal to the other..
func (assert DurationAssertions) LessThan(other time.Duration) {
assert.t.Helper()
if assert.dur >= other {
assert.Failed(`%s duration of %s is greater than %s`, assert.name, assert.dur, other)
}
} | times.go | 0.818628 | 0.761028 | times.go | starcoder |
package main
type sufIndexed struct {
str string
idx int
}
func newSufIndexed(s string) sufIndexed {
if len(s) != 0 && s[0] == '.' {
return sufIndexed{s, suffixIndex(s[1:])}
}
return sufIndexed{s, suffixIndex(s)}
}
func isAlpha(c byte) bool {
return (((c) | 32) - 'a') < 26
}
func isAlnum(c byte) bool {
return c-'0' < 10 || (((c)|32)-'a') < 26
}
func isDigit(c byte) bool {
return c-'0' < 10
}
// Match a file suffix defined by this regular expression:
// /(\.[A-Za-z~][A-Za-z0-9~]*)*$/
// Scan the string *STR and return a pointer to the matching suffix, or
// NULL if not found. Upon return, *STR points to terminating NUL.
func suffixIndex(s string) int {
readAlphat := false
matched := 0
j := 0
for i := len(s) - 1; i >= 0; i-- {
c := s[i]
// Manual inlining helps a lot here
if (((c)|32)-'a') < 26 || c == '~' { // isAlpha
readAlphat = true
} else if readAlphat && c == '.' {
matched = j + 1
} else if c-'0' < 10 { // isDigit
readAlphat = false
} else {
break
}
j++
}
return len(s) - matched
}
// verrevcmp helper function
func sortOrder(c byte) int {
if isAlpha(c) {
return int(c)
} else if isDigit(c) {
return 0
} else if c == '~' {
return -1
}
return int(c) + 256
}
// Slightly modified verrevcmp function from dpkg
func verrevcmp(a, b string) int {
ai, bi := 0, 0
for ai < len(a) || bi < len(b) {
firstDiff := 0
for (ai < len(a) && !isDigit(a[ai])) ||
(bi < len(b) && !isDigit(b[bi])) {
var ac, bc int
if ai < len(a) {
ac = sortOrder(a[ai])
}
if bi < len(b) {
bc = sortOrder(b[bi])
}
if ac != bc {
return ac - bc
}
ai++
bi++
}
for ai < len(a) && a[ai] == '0' {
ai++
}
for bi < len(b) && b[bi] == '0' {
bi++
}
for ai < len(a) && isDigit(a[ai]) &&
bi < len(b) && isDigit(b[bi]) {
if firstDiff == 0 {
firstDiff = int(a[ai]) - int(b[bi])
}
ai++
bi++
}
if ai < len(a) && isDigit(a[ai]) {
return 1
}
if bi < len(b) && isDigit(b[bi]) {
return -1
}
if firstDiff != 0 {
return firstDiff
}
}
return 0
}
// Compare version strings s1 and s2
func filevercmp(sf1, sf2 *sufIndexed) int {
s1, s2 := sf1.str, sf2.str
// easy comparison to see if strings are identical
if s1 == s2 {
return 0
}
// special handle for "", "." and ".."
switch {
case s1 == "":
return -1
case s2 == "":
return 1
case s1 == ".":
return -1
case s2 == ".":
return 1
case s1 == "..":
return -1
case s2 == "..":
return 1
}
// special handle for other hidden files
if s1[0] == '.' {
if s2[0] == '.' {
s1 = s1[1:]
s2 = s2[1:]
} else {
return -1
}
} else if s2[0] == '.' {
return 1
}
// file suffixes
s1i := sf1.idx
s2i := sf2.idx
s1Cut, s1Suf := s1[:s1i], s1[s1i:]
s2Cut, s2Suf := s2[:s2i], s2[s2i:]
var result int
// restore file suffixes if strings are identical after "cut"
if s1Cut == s2Cut {
result = verrevcmp(s1Suf, s2Suf)
} else {
result = verrevcmp(s1Cut, s2Cut)
}
if result == 0 {
if s1 < s2 {
return -1
} else if s1 > s2 {
return 1
}
}
return result
} | filevercmp.go | 0.557604 | 0.401306 | filevercmp.go | starcoder |
package decimal
var DecByteToString = [256]string{
0: `000`,
1: `001`,
2: `002`,
3: `003`,
4: `004`,
5: `005`,
6: `006`,
7: `007`,
8: `008`,
9: `009`,
10: `010`,
11: `011`,
12: `012`,
13: `013`,
14: `014`,
15: `015`,
16: `016`,
17: `017`,
18: `018`,
19: `019`,
20: `020`,
21: `021`,
22: `022`,
23: `023`,
24: `024`,
25: `025`,
26: `026`,
27: `027`,
28: `028`,
29: `029`,
30: `030`,
31: `031`,
32: `032`,
33: `033`,
34: `034`,
35: `035`,
36: `036`,
37: `037`,
38: `038`,
39: `039`,
40: `040`,
41: `041`,
42: `042`,
43: `043`,
44: `044`,
45: `045`,
46: `046`,
47: `047`,
48: `048`,
49: `049`,
50: `050`,
51: `051`,
52: `052`,
53: `053`,
54: `054`,
55: `055`,
56: `056`,
57: `057`,
58: `058`,
59: `059`,
60: `060`,
61: `061`,
62: `062`,
63: `063`,
64: `064`,
65: `065`,
66: `066`,
67: `067`,
68: `068`,
69: `069`,
70: `070`,
71: `071`,
72: `072`,
73: `073`,
74: `074`,
75: `075`,
76: `076`,
77: `077`,
78: `078`,
79: `079`,
80: `080`,
81: `081`,
82: `082`,
83: `083`,
84: `084`,
85: `085`,
86: `086`,
87: `087`,
88: `088`,
89: `089`,
90: `090`,
91: `091`,
92: `092`,
93: `093`,
94: `094`,
95: `095`,
96: `096`,
97: `097`,
98: `098`,
99: `099`,
100: `100`,
101: `101`,
102: `102`,
103: `103`,
104: `104`,
105: `105`,
106: `106`,
107: `107`,
108: `108`,
109: `109`,
110: `110`,
111: `111`,
112: `112`,
113: `113`,
114: `114`,
115: `115`,
116: `116`,
117: `117`,
118: `118`,
119: `119`,
120: `120`,
121: `121`,
122: `122`,
123: `123`,
124: `124`,
125: `125`,
126: `126`,
127: `127`,
128: `128`,
129: `129`,
130: `130`,
131: `131`,
132: `132`,
133: `133`,
134: `134`,
135: `135`,
136: `136`,
137: `137`,
138: `138`,
139: `139`,
140: `140`,
141: `141`,
142: `142`,
143: `143`,
144: `144`,
145: `145`,
146: `146`,
147: `147`,
148: `148`,
149: `149`,
150: `150`,
151: `151`,
152: `152`,
153: `153`,
154: `154`,
155: `155`,
156: `156`,
157: `157`,
158: `158`,
159: `159`,
160: `160`,
161: `161`,
162: `162`,
163: `163`,
164: `164`,
165: `165`,
166: `166`,
167: `167`,
168: `168`,
169: `169`,
170: `170`,
171: `171`,
172: `172`,
173: `173`,
174: `174`,
175: `175`,
176: `176`,
177: `177`,
178: `178`,
179: `179`,
180: `180`,
181: `181`,
182: `182`,
183: `183`,
184: `184`,
185: `185`,
186: `186`,
187: `187`,
188: `188`,
189: `189`,
190: `190`,
191: `191`,
192: `192`,
193: `193`,
194: `194`,
195: `195`,
196: `196`,
197: `197`,
198: `198`,
199: `199`,
200: `200`,
201: `201`,
202: `202`,
203: `203`,
204: `204`,
205: `205`,
206: `206`,
207: `207`,
208: `208`,
209: `209`,
210: `210`,
211: `211`,
212: `212`,
213: `213`,
214: `214`,
215: `215`,
216: `216`,
217: `217`,
218: `218`,
219: `219`,
220: `220`,
221: `221`,
222: `222`,
223: `223`,
224: `224`,
225: `225`,
226: `226`,
227: `227`,
228: `228`,
229: `229`,
230: `230`,
231: `231`,
232: `232`,
233: `233`,
234: `234`,
235: `235`,
236: `236`,
237: `237`,
238: `238`,
239: `239`,
240: `240`,
241: `241`,
242: `242`,
243: `243`,
244: `244`,
245: `245`,
246: `246`,
247: `247`,
248: `248`,
249: `249`,
250: `250`,
251: `251`,
252: `252`,
253: `253`,
254: `254`,
255: `255`,
} | pkg/reader/byteFormatters/decimal/dec_lookup.go | 0.63375 | 0.459501 | dec_lookup.go | starcoder |
package ui
import (
"github.com/BurntSushi/xgb/shape"
"github.com/BurntSushi/xgb/xproto"
"github.com/BurntSushi/xgbutil"
"github.com/BurntSushi/xgbutil/xwindow"
"github.com/BurntSushi/xgbutil/xrect"
"log"
)
// extract the top-left and bottom-right points of an xrect as a 4-tuple: x, y, x2, y2
func coords(rect xrect.Rect) (min_x, min_y, max_x, max_y int) {
min_x = rect.X()
max_x = min_x + rect.Width()
min_y = rect.Y()
max_y = min_y + rect.Height()
return
}
func min(a, b int) int { if a < b { return a }; return b }
func max(a, b int) int { if a > b { return a }; return b }
// given a slice of rects, return a rect that covers all of them!
func Bound(rects []xrect.Rect) (xrect.Rect) {
min_x, min_y, max_x, max_y := coords(rects[0])
for _, rect := range rects[1:] {
x1, y1, x2, y2 := coords(rect)
min_x = min(x1, min_x)
min_y = min(y1, min_y)
max_x = max(x2, max_x)
max_y = max(y2, max_y)
}
return xrect.New(min_x, min_y, max_x - min_x, max_y - min_y)
}
// compose a number of rectabgles into a window shape
func ComposeShape(X *xgbutil.XUtil, dst xproto.Window, rects []xrect.Rect) (err error) {
combine_bounds := make([]shape.CombineCookie, len(rects))
combine_clip := make([]shape.CombineCookie, len(rects))
var operation shape.Op
for i, rect := range rects {
// make rectangular window of correct goemetry
win, err := xwindow.Generate(X)
if err != nil {
log.Fatalf("ComposeShape: Error creating rectange %v window.", rect)
return err
}
win.Create(X.RootWin(), rect.X(), rect.Y(), rect.Width(), rect.Height(), xproto.CwBackPixel, 0xffffff)
// choose operation. on the first one, we want to set the shape.
if i == 0 {
operation = shape.SoSet
} else {
operation = shape.SoUnion
}
// combine window request
x, y := int16(rect.X()), int16(rect.Y())
combine_kind := shape.Kind(shape.SkBounding)
combine_bounds[i] = shape.CombineChecked(X.Conn(), operation, combine_kind, combine_kind, dst, x, y, win.Id)
combine_kind = shape.Kind(shape.SkClip)
combine_clip[i] = shape.CombineChecked(X.Conn(), operation, combine_kind, combine_kind, dst, x, y, win.Id)
}
return nil
} | ui/shape.go | 0.759582 | 0.48749 | shape.go | starcoder |
package shape
import (
"fmt"
"io"
"math"
"github.com/gregoryv/draw/xy"
"github.com/gregoryv/nexus"
)
func NewArrow(x1, y1, x2, y2 int) *Arrow {
head := NewTriangle()
head.SetX(x2)
head.SetY(y2)
head.SetClass("arrow-head")
return &Arrow{
Start: xy.Point{x1, y1},
End: xy.Point{x2, y2},
Head: head,
class: "arrow",
}
}
type Arrow struct {
Start xy.Point
End xy.Point
Tail Shape
Head Shape
class string
}
func (a *Arrow) String() string {
return fmt.Sprintf("Arrow from %v to %v", a.Start, a.End)
}
func (a *Arrow) WriteSVG(out io.Writer) error {
w, err := nexus.NewPrinter(out)
x1, y1 := a.Start.XY()
x2, y2 := a.End.XY()
w.Printf(`<path class="%s" d="M%v,%v L%v,%v" />`, a.class, x1, y1, x2, y2)
w.Print("\n")
if a.Tail != nil {
w.Printf(`<g transform="rotate(%v %v %v)">`, a.angle(), x1, y1)
alignTail(a.Tail, x1, y1)
a.Tail.SetClass(a.class + "-tail")
a.Tail.WriteSVG(out)
w.Print("</g>\n")
}
if a.Head != nil {
w.Printf(`<g transform="rotate(%v %v %v)">`, a.angle(), x2, y2)
a.Head.SetX(a.End.X)
a.Head.SetY(a.End.Y)
a.Head.SetClass(a.class + "-head")
a.Head.WriteSVG(out)
w.Print("</g>\n")
}
return *err
}
func alignTail(s Shape, x, y int) {
switch s := s.(type) {
case *Circle:
s.SetX(x)
s.SetY(y - s.Radius)
default:
s.SetX(x)
s.SetY(y)
}
}
// AbsAngle
func (a *Arrow) AbsAngle() int { return int(a.absAngle()) }
func (a *Arrow) absAngle() float64 {
return math.Abs(float64(a.angle()))
}
// Angle returns value in degrees. Right = 0, down = 90, left: 180, up = -90
func (a *Arrow) Angle() int { return a.angle() }
// angle returns degrees the head of an arrow should rotate depending
// on direction
func (a *Arrow) angle() int {
var (
start = a.Start
end = a.End
// straight arrows
right = start.LeftOf(end) && start.Y == end.Y
left = start.RightOf(end) && start.Y == end.Y
down = start.Above(end) && start.X == end.X
up = start.Below(end) && start.X == end.X
)
switch {
case right: // most frequent arrow on top
case left:
return 180
case down:
return 90
case up:
return -90
case a.DirQ1():
a := float64(end.Y - start.Y)
b := float64(end.X - start.X)
A := math.Atan(a / b)
return radians2degrees(A)
case a.DirQ2():
a := float64(end.Y - start.Y)
b := float64(start.X - end.X)
A := math.Atan(a / b)
return 180 - radians2degrees(A)
case a.DirQ3():
a := float64(start.Y - end.Y)
b := float64(start.X - end.X)
A := math.Atan(a / b)
return radians2degrees(A) + 180
case a.DirQ4():
a := float64(start.Y - end.Y)
b := float64(end.X - start.X)
A := math.Atan(a / b)
return -radians2degrees(A)
}
return 0
}
// DirQ1 returns true if the arrow points to the bottom-right
// quadrant.
func (a *Arrow) DirQ1() bool {
start, end := a.endpoints()
return start.LeftOf(end) && end.Below(start)
}
// DirQ2 returns true if the arrow points to the bottom-left
// quadrant.
func (a *Arrow) DirQ2() bool {
start, end := a.endpoints()
return start.RightOf(end) && end.Below(start)
}
// DirQ3 returns true if the arrow points to the top-left
// quadrant.
func (a *Arrow) DirQ3() bool {
start, end := a.endpoints()
return start.RightOf(end) && end.Above(start)
}
// DirQ4 returns true if the arrow points to the top-right
// quadrant.
func (a *Arrow) DirQ4() bool {
start, end := a.endpoints()
return start.LeftOf(end) && end.Above(start)
}
func (a *Arrow) endpoints() (xy.Point, xy.Point) {
return a.Start, a.End
}
func radians2degrees(A float64) int {
return int(A * 180 / math.Pi)
}
func (a *Arrow) Height() int {
return intAbs(a.Start.Y - a.End.Y)
}
func (a *Arrow) Width() int {
return intAbs(a.Start.X - a.End.X)
}
func (a *Arrow) Position() (int, int) {
return a.Start.XY()
}
// CenterPosition
func (a *Arrow) CenterPosition() (x int, y int) {
d := a.Direction()
if d.Is(DirectionRight) {
x = a.Start.X + a.Width()/2
} else {
x = a.Start.X - a.Width()/2
}
if d.Is(DirectionDown) {
y = a.Start.Y + a.Height()/2
} else {
y = a.Start.Y - a.Height()/2
}
return
}
func (a *Arrow) SetX(x int) {
diff := a.Start.X - x
a.Start.X = x
a.End.X = a.End.X - diff // Set X2 so the entire arrow moves
}
func (a *Arrow) SetY(y int) {
diff := a.Start.Y - y
a.Start.Y = y
a.End.Y = a.End.Y - diff // Set Y2 so the entire arrow moves
}
// Direction returns vertical or horizontal direction, Other if at an angle.
// If Other, use arrow.DirQn() methods to check to which quadrant.
func (a *Arrow) Direction() Direction {
return NewDirection(a.Start, a.End)
}
func (a *Arrow) SetClass(c string) { a.class = c }
func NewArrowBetween(a, b Shape) *Arrow {
ax, ay := a.Position()
bx, by := b.Position()
// From center to center
x1 := ax + a.Width()/2
y1 := ay + a.Height()/2
x2 := bx + b.Width()/2
y2 := by + b.Height()/2
arrow := NewArrow(x1, y1, x2, y2)
bs, ok := b.(Edge)
if ok {
p := bs.Edge(arrow.Start)
arrow.End.X = p.X
arrow.End.Y = p.Y
}
as, ok := a.(Edge)
if ok {
p := as.Edge(arrow.End)
arrow.Start.X = p.X
arrow.Start.Y = p.Y
}
return arrow
} | shape/arrow.go | 0.776369 | 0.430207 | arrow.go | starcoder |
package medtronic
import (
"fmt"
"time"
)
// CGMRecordType represents a CGM record type.
type CGMRecordType byte
//go:generate stringer -type CGMRecordType
// Events stored in the pump's CGM history pages.
const (
CGMDataEnd CGMRecordType = 0x01
CGMWeakSignal CGMRecordType = 0x02
CGMCal CGMRecordType = 0x03
CGMPacket CGMRecordType = 0x04
CGMError CGMRecordType = 0x05
CGMDataLow CGMRecordType = 0x06
CGMDataHigh CGMRecordType = 0x07
CGMTimestamp CGMRecordType = 0x08
CGMBatteryChange CGMRecordType = 0x0A
CGMStatus CGMRecordType = 0x0B
CGMTimeChange CGMRecordType = 0x0C
CGMSync CGMRecordType = 0x0D
CGMCalBG CGMRecordType = 0x0E
CGMCalFactor CGMRecordType = 0x0F
CGMEvent10 CGMRecordType = 0x10
CGMEvent13 CGMRecordType = 0x13
// Synthetic record type.
// Single bytes with this value or greater represent glucose readings.
CGMGlucose CGMRecordType = 0x20
)
type (
// CGMRecord represents a CGM record.
CGMRecord struct {
Type CGMRecordType
Data []byte
Time time.Time
Glucose int `json:",omitempty"`
Value string `json:",omitempty"`
}
// CGMHistory represents a sequence of CGM records.
CGMHistory []CGMRecord
cgmDecoder func(*CGMRecord)
decodeInfo struct {
length int
decoder cgmDecoder
}
)
// 1-byte records containing just a type code need not be specified here
// unless additional decoding logic is required.
// Timestamp decoding is done whenever the record is at least 5 bytes long,
// so there is no need to include that in the decoder.
var cgmDecodeInfo = map[CGMRecordType]decodeInfo{
CGMCal: {2, decodeCGMCal},
CGMPacket: {2, nil},
CGMError: {2, nil},
CGMDataLow: {1, decodeCGMDataLow},
CGMDataHigh: {2, decodeCGMDataHigh},
CGMTimestamp: {5, decodeCGMTimestamp},
CGMBatteryChange: {5, nil},
CGMStatus: {5, decodeCGMStatus},
CGMTimeChange: {5, nil},
CGMSync: {5, decodeCGMSync},
CGMCalBG: {6, decodeCGMCalBG},
CGMCalFactor: {7, nil},
CGMEvent10: {8, nil},
CGMGlucose: {1, decodeCGMGlucose},
}
// Decode a 4-byte timestamp from a glucose history record.
func decodeCGMTime(data []byte) time.Time {
sec := 0
min := int(data[1] & 0x3F)
hour := int(data[0] & 0x1F)
day := int(data[2] & 0x1F)
// The 4-bit month value is encoded in the high 2 bits of the first 2 bytes.
month := time.Month(int(data[0]>>6)<<2 | int(data[1]>>6))
year := 2000 + int(data[3]&0x7F)
return time.Date(year, month, day, hour, min, sec, 0, time.Local)
}
func (t CGMRecordType) isRelative() bool {
switch t {
case CGMWeakSignal, CGMCal, CGMPacket, CGMError, CGMDataLow, CGMDataHigh, CGMGlucose:
return true
default:
return false
}
}
func decodeCGMCal(r *CGMRecord) {
switch r.Data[1] {
case 0:
r.Value = "bgNow"
case 1:
r.Value = "waiting"
case 2:
r.Value = "error"
default:
r.Value = "unknown"
}
}
func decodeCGMDataLow(r *CGMRecord) {
r.Glucose = 40
}
func decodeCGMDataHigh(r *CGMRecord) {
r.Glucose = 400
}
func decodeCGMTimestamp(r *CGMRecord) {
switch (r.Data[3] >> 5) & 0x3 {
case 0:
r.Value = "lastRF"
case 1:
r.Value = "pageEnd"
case 2:
r.Value = "gap"
default:
r.Value = "unknown"
}
}
func decodeCGMStatus(r *CGMRecord) {
switch (r.Data[3] >> 5) & 0x3 {
case 0:
r.Value = "off"
case 1:
r.Value = "on"
case 2:
r.Value = "lost"
default:
r.Value = "unknown"
}
}
func decodeCGMSync(r *CGMRecord) {
switch (r.Data[3] >> 5) & 0x3 {
case 1:
r.Value = "new"
case 2:
r.Value = "old"
default:
r.Value = "find"
}
}
func decodeCGMCalBG(r *CGMRecord) {
r.Glucose = int(r.Data[5])
}
func decodeCGMGlucose(r *CGMRecord) {
r.Glucose = 2 * int(r.Data[0])
}
// DecodeCGMRecord decodes a CGM history record based on its type.
func DecodeCGMRecord(data []byte) (CGMRecord, error) {
if len(data) == 0 {
return CGMRecord{}, fmt.Errorf("DecodeCGMRecord: len(data) == 0")
}
t := CGMRecordType(data[0])
if t >= CGMGlucose {
t = CGMGlucose
}
n := 1
var decode cgmDecoder
d, found := cgmDecodeInfo[t]
if found {
n = d.length
decode = d.decoder
}
if n > len(data) {
return CGMRecord{}, fmt.Errorf("DecodeCGMRecord: expected %d-byte record but len(data) = %d", n, len(data))
}
r := CGMRecord{Type: t, Data: data[:n]}
if n >= 5 {
r.Time = decodeCGMTime(r.Data[1:5])
}
if decode != nil {
decode(&r)
}
return r, nil
}
// DecodeCGMHistory decodes the records in a page of CGM data and
// returns them in reverse chronological order (most recent first).
// If a non-zero time is given, it is used as the initial timestamp.
func DecodeCGMHistory(data []byte, t time.Time) (CGMHistory, time.Time, error) {
reverseBytes(data)
var i int
var b byte
for i, b = range data {
if b != 0 {
break
}
}
data = data[i:]
var results CGMHistory
var err error
if t.IsZero() {
t, results, err = initialTimestamp(data)
if err != nil {
return results, t, err
}
}
results = nil
for len(data) != 0 {
var r CGMRecord
r, err = DecodeCGMRecord(data)
if err != nil {
break
}
if r.Type == CGMTimestamp {
t = r.Time
} else if r.Type.isRelative() {
r.Time = t
t = t.Add(-5 * time.Minute)
}
results = append(results, r)
data = data[len(r.Data):]
}
return results, t, err
}
// ErrorNeedsTimestamp indicates that no initial timestamp was found.
var ErrorNeedsTimestamp = fmt.Errorf("CGM history needs timestamp")
func initialTimestamp(data []byte) (time.Time, CGMHistory, error) {
var results CGMHistory
numRelative := 0
var err error
for len(data) != 0 {
var r CGMRecord
r, err = DecodeCGMRecord(data)
if err != nil {
return time.Time{}, results, err
}
results = append(results, r)
data = data[len(r.Data):]
if r.Type.isRelative() {
numRelative++
continue
}
if r.Type == CGMTimestamp {
if numRelative == 0 || r.hasOffsetTimestamp() {
delta := time.Duration(numRelative) * 5 * time.Minute
return r.Time.Add(delta), results, nil
}
}
if r.Type != CGMDataEnd && r.Type != CGMEvent13 {
break
}
}
return time.Time{}, results, ErrorNeedsTimestamp
}
func (r CGMRecord) hasOffsetTimestamp() bool {
return r.Value == "lastRF" || r.Value == "pageEnd"
}
func reverseBytes(a []byte) {
for i, j := 0, len(a)-1; i < len(a)/2; i, j = i+1, j-1 {
a[i], a[j] = a[j], a[i]
}
}
// ReverseCGMHistory reverses a slice of CGM history records.
func ReverseCGMHistory(a CGMHistory) {
for i, j := 0, len(a)-1; i < len(a)/2; i, j = i+1, j-1 {
a[i], a[j] = a[j], a[i]
}
} | cgmrecord.go | 0.563138 | 0.587233 | cgmrecord.go | starcoder |
package yasup
import (
crypto "crypto/rand"
"math/big"
"math/rand"
)
var zeroValueComplex64 complex64
//Complex64Insert will append elem at the position i. Might return ErrIndexOutOfBounds.
func Complex64Insert(sl *[]complex64, elem complex64, i int) error {
if i < 0 || i > len(*sl) {
return ErrIndexOutOfBounds
}
*sl = append(*sl, elem)
copy((*sl)[i+1:], (*sl)[i:])
(*sl)[i] = elem
return nil
}
//Complex64Delete delete the element at the position i. Might return ErrIndexOutOfBounds.
func Complex64Delete(sl *[]complex64, i int) error {
if i < 0 || i >= len(*sl) {
return ErrIndexOutOfBounds
}
*sl = append((*sl)[:i], (*sl)[i+1:]...)
return nil
}
//Complex64Contains will return true if elem is present in the slice and false otherwise.
func Complex64Contains(sl []complex64, elem complex64) bool {
for i := range sl {
if sl[i] == elem {
return true
}
}
return false
}
//Complex64Index returns the index of the first instance of elem, or -1 if elem is not present.
func Complex64Index(sl []complex64, elem complex64) int {
for i := range sl {
if sl[i] == elem {
return i
}
}
return -1
}
//Complex64LastIndex returns the index of the last instance of elem in the slice, or -1 if elem is not present.
func Complex64LastIndex(sl []complex64, elem complex64) int {
for i := len(sl) - 1; i >= 0; i-- {
if sl[i] == elem {
return i
}
}
return -1
}
//Complex64Count will return an int representing the amount of times that elem is present in the slice.
func Complex64Count(sl []complex64, elem complex64) int {
var n int
for i := range sl {
if sl[i] == elem {
n++
}
}
return n
}
//Complex64Push is equivalent to Complex64Insert with index len(*sl).
func Complex64Push(sl *[]complex64, elem complex64) {
Complex64Insert(sl, elem, len(*sl))
}
//Complex64FrontPush is equivalent to Complex64Insert with index 0.
func Complex64FrontPush(sl *[]complex64, elem complex64) {
Complex64Insert(sl, elem, 0)
}
//Complex64Pop is equivalent to getting and removing the last element of the slice. Might return ErrEmptySlice.
func Complex64Pop(sl *[]complex64) (complex64, error) {
if len(*sl) == 0 {
return zeroValueComplex64, ErrEmptySlice
}
last := len(*sl) - 1
ret := (*sl)[last]
Complex64Delete(sl, last)
return ret, nil
}
//Complex64Pop is equivalent to getting and removing the first element of the slice. Might return ErrEmptySlice.
func Complex64FrontPop(sl *[]complex64) (complex64, error) {
if len(*sl) == 0 {
return zeroValueComplex64, ErrEmptySlice
}
ret := (*sl)[0]
Complex64Delete(sl, 0)
return ret, nil
}
//Complex64Replace modifies the slice with the first n non-overlapping instances of old replaced by new. If n equals -1, there is no limit on the number of replacements.
func Complex64Replace(sl []complex64, old, new complex64, n int) (replacements int) {
left := n
for i := range sl {
if left == 0 {
break // no replacements left
}
if sl[i] == old {
sl[i] = new
left--
}
}
return n - left
}
//Complex64ReplaceAll is equivalent to Complex64Replace with n = -1.
func Complex64ReplaceAll(sl []complex64, old, new complex64) (replacements int) {
return Complex64Replace(sl, old, new, -1)
}
//Complex64Equals compares two complex64 slices. Returns true if their elements are equal.
func Complex64Equals(a, b []complex64) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
//Complex64FastShuffle will randomly swap the complex64 elements of a slice using math/rand (fast but not cryptographycally secure).
func Complex64FastShuffle(sp []complex64) {
rand.Shuffle(len(sp), func(i, j int) {
sp[i], sp[j] = sp[j], sp[i]
})
}
//Complex64SecureShuffle will randomly swap the complex64 elements of a slice using crypto/rand (resource intensive but cryptographically secure).
func Complex64SecureShuffle(sp []complex64) error {
var i int64
size := int64(len(sp)) - 1
for i = 0; i < size+1; i++ {
bigRandI, err := crypto.Int(crypto.Reader, big.NewInt(size))
if err != nil {
return err
}
randI := bigRandI.Int64()
sp[size-i], sp[randI] = sp[randI], sp[size-i]
}
return nil
} | complex64Slices.go | 0.711331 | 0.467696 | complex64Slices.go | starcoder |
package main
import (
"fmt"
"image"
"image/color"
"image/png"
"math"
"os"
"strconv"
"time"
)
// WIDTH defines the width of the created image
const WIDTH = 640
// HEIGHT defines the height of the created image
const HEIGHT = 400
func euclideanDistance(x1, y1, x2, y2 int) float64 {
return math.Sqrt(math.Pow(float64(x2-x1), 2) + math.Pow(float64(y2-y1), 2))
}
func fivePoints(randNum int64) [5][2]int {
var result [5][2]int
for i, d := range [5]int64{7, 11, 13, 17, 19} {
x := (int64(randNum/d) + (randNum % (135 * d))) % WIDTH
y := (int64(3*randNum/d) + (randNum % (287 * d))) % HEIGHT
result[i][0] = int(x)
result[i][1] = int(y)
}
return result
}
func drawRandom(img *image.RGBA, randNum int64) {
five := fivePoints(randNum)
moreWhite := func(v int64) int64 {
return int64((220*v)/256) + 36
}
for x := 0; x < WIDTH; x++ {
for y := 0; y < HEIGHT; y++ {
d1 := euclideanDistance(x, y, five[0][0], five[0][1]) + 2*euclideanDistance(x, y, five[1][0], five[1][1])
d2 := euclideanDistance(x, y, five[2][0], five[2][1]) + d1 - 5*euclideanDistance(x, y, five[3][0], five[3][1])
d3 := euclideanDistance(x, y, five[4][0], five[4][1])
r := moreWhite(int64(d1) % 256)
g := moreWhite(int64(d2) % 256)
b := moreWhite(int64(d3) % 256)
c := color.RGBA{uint8(r), uint8(g), uint8(b), 255}
img.Set(x, y, c)
}
}
}
// Draw actually draws an image based on `randNum` and stores the result at `filepath`
func Draw(filepath string, randNum int64) error {
img := image.NewRGBA(image.Rectangle{image.Point{0, 0}, image.Point{WIDTH, HEIGHT}})
drawRandom(img, randNum)
fd, err := os.Create(filepath)
if err != nil {
return err
}
defer fd.Close()
return png.Encode(fd, img)
}
func main() {
var num int64
var filepath string
switch len(os.Args) {
case 1:
num = time.Now().Unix()
fmt.Printf("using current time as random seed: %d\n", num)
fallthrough
case 2:
filepath = "randimg.png"
case 3:
default:
panic(fmt.Errorf("usage: ./randimg <uint:seed> <str:output.png>"))
}
if len(os.Args) > 2 {
i, err := strconv.ParseInt(os.Args[1], 10, 64)
if err != nil {
panic(err)
}
num = i
}
if len(os.Args) > 3 {
filepath = os.Args[2]
}
if err := Draw(filepath, num); err != nil {
panic(err)
}
} | randimg/random_image.go | 0.670608 | 0.447702 | random_image.go | starcoder |
package ksmooth
import (
"errors"
"github.com/iamthebot/gostat/stat"
"math"
)
type NWDPSmoother struct {
Bandwidth float64
Radius int
Kernel func(float64, float64) float64
}
//Allocates a new Nadraya-Watson kernel smoother using gaussian kernel
func NewNWDPGaussianSmoother(bandwidth float64, density float64) (*NWDPSmoother, error) {
//Create NWSmoother
s := NWDPSmoother{
Bandwidth: bandwidth,
Kernel: KernelGaussian,
}
//Calculate Radius
//we use density as 2-tailed probability in InverseNormalCDF
tail := (1.0 - density) / 2.0
s.Radius = int(math.Max(float64(stat.NormalInv_CDF(float64(1.0-tail), float64(bandwidth/2.0))), 1.0))
return &s, nil
}
//Computes Nadraya-Watson regression at given point
//x is the x value which we want smoothed
//inputs is the slice of y values.
//the length we should assume for the input array
func (s NWDPSmoother) SmoothPoint(x int, inputs []float64, length int) (float64, error) {
if x < 0 {
return 0.0, errors.New("x is out of range")
} else if length <= 0 {
return 0.0, errors.New("length must be a positive integer")
}
low := 0
if x-s.Radius > low {
low = x - s.Radius
}
high := length - 1
if x+s.Radius/2 < high {
high = x + s.Radius/2
}
numSum := 0.0
denSum := 0.0
for i := low; i <= high; i++ {
weight := s.Kernel(float64(i-x), s.Bandwidth/2.0)
numSum += weight * inputs[i]
denSum += weight
}
return numSum / denSum, nil
}
type NWSPSmoother struct {
Bandwidth float32
Radius int
Kernel func(float32, float32) float32
}
//Allocates a new Nadraya-Watson kernel smoother using gaussian kernel
func NewNWSPGaussianSmoother(bandwidth float32, density float32) (*NWSPSmoother, error) {
//Create NWSmoother
s := NWSPSmoother{
Bandwidth: bandwidth,
Kernel: KernelGaussianSP,
}
//Calculate Radius
//we use density as 2-tailed probability in InverseNormalCDF
tail := (1.0 - density) / 2.0
s.Radius = int(math.Max(float64(stat.NormalInv_CDF(float64(1.0-tail), float64(bandwidth/2.0))), 1.0))
return &s, nil
}
//Computes Nadraya-Watson regression at given point
//x is the x value which we want smoothed
//inputs is the slice of y values.
//the length we should assume for the input array
func (s NWSPSmoother) SmoothPoint(x int, inputs []float32, length int) (float32, error) {
if x < 0 {
return 0.0, errors.New("x is out of range")
} else if length <= 0 {
return 0.0, errors.New("length must be a positive integer")
}
low := 0
if x-s.Radius > low {
low = x - s.Radius
}
high := length - 1
if x+s.Radius/2 < high {
high = x + s.Radius/2
}
numSum := float32(0.0)
denSum := float32(0.0)
for i := low; i <= high; i++ {
weight := s.Kernel(float32(i-x), s.Bandwidth/2.0)
numSum += weight * inputs[i]
denSum += weight
}
return numSum / denSum, nil
} | smoother.go | 0.77586 | 0.448245 | smoother.go | starcoder |
package main
import (
"fmt"
"math"
"strconv"
"strings"
)
type Direction struct {
DeltaX int
DeltaY int
TurnLeft func() *Direction
TurnRight func() *Direction
}
type Command struct {
TurnDirection string
MoveDistance int
}
type Vector struct {
X int
Y int
}
func (v Vector) String() string {
return fmt.Sprintf("%d:%d", v.X, v.Y)
}
func ParseInput(input string) *Command {
input = strings.TrimSpace(input)
result := &Command{}
if input[0] == 'L' {
result.TurnDirection = "left"
} else {
result.TurnDirection = "right"
}
result.MoveDistance, _ = strconv.Atoi(string(input[1:]))
return result
}
func GetManhattanDistance(vector1, vector2 Vector) int {
return int(math.Abs(float64(vector1.X)-float64(vector2.X)) + math.Abs(float64(vector1.Y)-float64(vector2.Y)))
}
func Visited(visitedLocations map[string]Vector, vector Vector) bool {
var ok bool
_, ok = visitedLocations[vector.String()]
return ok
}
func main() {
var puzzleInput string
east := &Direction{DeltaX: -1, DeltaY: 0}
north := &Direction{DeltaX: 0, DeltaY: 1}
south := &Direction{DeltaX: 0, DeltaY: -1}
west := &Direction{DeltaX: 1, DeltaY: 0}
east.TurnLeft = func() *Direction { return north }
east.TurnRight = func() *Direction { return south }
north.TurnLeft = func() *Direction { return west }
north.TurnRight = func() *Direction { return east }
south.TurnLeft = func() *Direction { return east }
south.TurnRight = func() *Direction { return west }
west.TurnLeft = func() *Direction { return south }
west.TurnRight = func() *Direction { return north }
//puzzleInput = "R8, R4, R4, R8"
//puzzleInput = "R2, L2, R2, R2, R2"
puzzleInput = "R5, L2, L1, R1, R3, R3, L3, R3, R4, L2, R4, L4, R4, R3, L2, L1, L1, R2, R4, R4, L4, R3, L2, R1, L4, R1, R3, L5, L4, L5, R3, L3, L1, L1, R4, R2, R2, L1, L4, R191, R5, L2, R46, R3, L1, R74, L2, R2, R187, R3, R4, R1, L4, L4, L2, R4, L5, R4, R3, L2, L1, R3, R3, R3, R1, R1, L4, R4, R1, R5, R2, R1, R3, L4, L2, L2, R1, L3, R1, R3, L5, L3, R5, R3, R4, L1, R3, R2, R1, R2, L4, L1, L1, R3, L3, R4, L2, L4, L5, L5, L4, R2, R5, L4, R4, L2, R3, L4, L3, L5, R5, L4, L2, R3, R5, R5, L1, L4, R3, L1, R2, L5, L1, R4, L1, R5, R1, L4, L4, L4, R4, R3, L5, R1, L3, R4, R3, L2, L1, R1, R2, R2, R2, L1, L1, L2, L5, L3, L1"
commands := strings.Split(puzzleInput, ", ")
parsedCommand := &Command{}
currentDirection := north
origin := Vector{X: 0, Y: 0}
currentPosition := Vector{X: 0, Y: 0}
visited := make(map[string]Vector)
nextCommand := ""
visited[origin.String()] = origin
for _, nextCommand = range commands {
parsedCommand = ParseInput(nextCommand)
if parsedCommand.TurnDirection == "left" {
currentDirection = currentDirection.TurnLeft()
} else {
currentDirection = currentDirection.TurnRight()
}
for dx := 0; dx < parsedCommand.MoveDistance; dx++ {
currentPosition.X += currentDirection.DeltaX
currentPosition.Y += currentDirection.DeltaY
if Visited(visited, currentPosition) {
break
}
visited[currentPosition.String()] = currentPosition
}
}
fmt.Printf("We are now %d blocks away from our starting point.\n", GetManhattanDistance(origin, currentPosition))
} | day1/part2/part2.go | 0.594787 | 0.439266 | part2.go | starcoder |
package deque
type (
// Deque basic generic deque (double-ended queue) implementation based on double-linked list
Deque[T any] struct {
head *node[T]
tail *node[T]
length int
}
// node is a generic double-linked list node use for Deque implementation
node[T any] struct {
Value T
Next *node[T]
Prev *node[T]
}
)
// NewDeque TODO
func NewDeque[T any]() *Deque[T] {
return &Deque[T]{
head: &node[T]{},
tail: &node[T]{},
}
}
// Empty checks if Deque has no element
func (d *Deque[T]) Empty() bool {
return d.length == 0
}
// Size returns the number of elements in Deque
// Complexity - O(1).
func (d *Deque[T]) Size() int {
return d.length
}
// PushBack adds element to the end of Deque
// Complexity - O(1).
func (d *Deque[T]) PushBack(element T) {
node := &node[T]{Value: element, Next: nil, Prev: nil}
if d.Empty() {
d.insertIntoEmpty(node)
return
}
d.tail.Next = node
node.Prev = d.tail
d.tail = node
d.length++
}
// PushFront adds element before first element of Deque
// Complexity - O(1).
func (d *Deque[T]) PushFront(element T) {
node := &node[T]{Value: element, Next: nil, Prev: nil}
if d.Empty() {
d.insertIntoEmpty(node)
return
}
d.head.Prev = node
node.Next = d.head
d.head = node
d.length++
return
}
// PopBack returns and removes the last element from Deque.
// Calling PopBack() on an empty Deque results in undefined behavior.
// Complexity - O(1).
func (d *Deque[T]) PopBack() T {
if d.tail == nil {
value := d.head.Value
d.reset()
return value
}
value := d.tail.Value
d.tail = d.tail.Prev
d.length--
return value
}
// PopFront returns and removes the first element from Deque.
// Calling PopFront on an empty Deque results in undefined behavior.
// Complexity - O(1).
func (d *Deque[T]) PopFront() T {
if d.head == nil {
value := d.tail.Value
d.reset()
return value
}
value := d.head.Value
d.head = d.head.Next
d.length--
return value
}
// Front returns values of the first element in Deque.
// Calling Front on an empty Deque results in undefined behavior.
// Complexity - O(1).
func (d *Deque[T]) Front() T {
if d.head == nil {
return d.tail.Value
}
return d.head.Value
}
// Back returns values of the last element in Deque.
// Calling Back on an empty Deque results in undefined behavior.
// Complexity - O(1).
func (d *Deque[T]) Back() T {
if d.tail == nil {
return d.head.Value
}
return d.tail.Value
}
func (d *Deque[T]) insertIntoEmpty(node *node[T]) {
d.tail = node
d.head = node
d.length++
}
func (d *Deque[T]) reset() {
d.tail = nil
d.head = nil
d.length = 0
} | containers/deque/deque.go | 0.597256 | 0.525917 | deque.go | starcoder |
package gates
import "math"
// basic unit type to recreate the simpe neural network
type Unit struct {
Value float64
Gradient float64
}
// interface for all gate types
type Gate interface {
Forward()
Backward()
}
// MultiplyGate structure and traits
type MultiplyGate struct {
U0 *Unit
U1 *Unit
UOut *Unit
}
func (g MultiplyGate) Forward() {
g.U0.Gradient, g.U1.Gradient = 0.0, 0.0
g.UOut.Value, g.UOut.Gradient = g.U0.Value * g.U1.Value, 0.0
}
func (g MultiplyGate) Backward() {
g.U0.Gradient = g.U0.Gradient + g.UOut.Gradient * g.U1.Value
g.U1.Gradient = g.U1.Gradient + g.UOut.Gradient * g.U0.Value
}
// AddGate structure and traits
type AddGate struct {
U0 *Unit
U1 *Unit
UOut *Unit
}
func (g AddGate) Forward() {
g.U0.Gradient, g.U1.Gradient = 0.0, 0.0
g.UOut.Value, g.UOut.Gradient = g.U0.Value + g.U1.Value, 0.0
}
func (g AddGate) Backward() {
g.U0.Gradient = g.U0.Gradient + g.UOut.Gradient * 1.0
g.U1.Gradient = g.U1.Gradient + g.UOut.Gradient * 1.0
}
// SubGate structure and traits
type SubGate struct {
U0 *Unit
U1 *Unit
UOut *Unit
}
func (g SubGate) Forward() {
g.U0.Gradient, g.U1.Gradient = 0.0, 0.0
g.UOut.Value, g.UOut.Gradient = g.U0.Value - g.U1.Value, 0.0
}
func (g SubGate) Backward() {
g.U0.Gradient = g.U0.Gradient + g.UOut.Gradient * 1.0
g.U1.Gradient = g.U1.Gradient + g.UOut.Gradient * -1.0
}
// PowerGate structure and triats
type PowerGate struct {
U0 *Unit
UOut *Unit
Power float64
}
func (g PowerGate) Forward() {
g.U0.Gradient = 0.0
g.UOut.Value, g.UOut.Gradient = math.Pow(g.U0.Value, g.Power), 0.0
}
func (g PowerGate) Backward() {
g.U0.Gradient = g.U0.Gradient + g.UOut.Gradient * g.Power * math.Pow(g.U0.Value, g.Power - 1.0)
}
// Sigmoid gate for nonlinearity
func sigmoid(x float64) float64 {
return 1 / ( 1 + math.Exp(-1.0 * x))
}
type SigmoidGate struct {
U0 *Unit
UOut *Unit
}
func (g SigmoidGate) Forward() {
g.U0.Gradient = 0.0
g.UOut.Value, g.UOut.Gradient = sigmoid(g.U0.Value), 0.0
}
func (g SigmoidGate) Backward() {
g.U0.Gradient = g.U0.Gradient + g.UOut.Gradient * sigmoid(g.U0.Value) * (1 - sigmoid(g.U0.Value))
} | src/go_NN/gates/gates.go | 0.851259 | 0.402451 | gates.go | starcoder |
package datastructs
type BSTNode struct {
Value int
LeftChild *BSTNode
RightChild *BSTNode
Parent *BSTNode
}
type BSTree struct {
RootNode BSTNode
}
func MakeBSTree(rootVal int) BSTree {
return BSTree{RootNode: BSTNode{Value: rootVal, LeftChild: nil, RightChild: nil, Parent: nil}}
}
func (tree *BSTree) Insert(value int) *BSTree {
insertNode(&(tree.RootNode), value)
return tree
}
func insertNode(currentNode *BSTNode, value int) {
if value == currentNode.Value {
// node already exists, do nothing
} else if value < currentNode.Value {
if currentNode.LeftChild != nil {
insertNode(currentNode.LeftChild, value)
} else {
currentNode.LeftChild = &BSTNode{Value: value, LeftChild: nil, RightChild: nil, Parent: currentNode}
}
} else {
if currentNode.RightChild != nil {
insertNode(currentNode.RightChild, value)
} else {
currentNode.RightChild = &BSTNode{Value: value, LeftChild: nil, RightChild: nil, Parent: currentNode}
}
}
}
func (tree *BSTree) Delete(value int) *BSTree {
deleteNode(&(tree.RootNode), value)
return tree
}
func deleteNode(currentNode *BSTNode, value int) {
if value == currentNode.Value {
if currentNode.LeftChild == nil && currentNode.RightChild == nil {
replaceCurrent(currentNode, nil)
} else if currentNode.LeftChild != nil && currentNode.RightChild == nil {
replaceCurrent(currentNode, currentNode.LeftChild)
} else if currentNode.LeftChild == nil && currentNode.RightChild != nil {
replaceCurrent(currentNode, currentNode.RightChild)
} else {
inOrderSuccessor := leftLeaf(currentNode.RightChild)
currentNode.Value = inOrderSuccessor.Value
deleteNode(inOrderSuccessor, inOrderSuccessor.Value)
}
} else if value < currentNode.Value {
if currentNode.LeftChild != nil {
deleteNode(currentNode.LeftChild, value)
} else {
// node does not exist, do nothing
}
} else {
if currentNode.RightChild != nil {
deleteNode(currentNode.RightChild, value)
} else {
// node does not exist, do nothing
}
}
}
func replaceCurrent(currentNode *BSTNode, newNode *BSTNode) {
if currentNode.Parent != nil {
if currentNode.Parent.LeftChild == currentNode {
currentNode.Parent.LeftChild = newNode
} else {
currentNode.Parent.RightChild = newNode
}
} else {
// this means we are deleting the root node
// we do not allow to delete root node if it's the only node (case when newNode == nil)
if newNode != nil {
*currentNode = *newNode
currentNode.Parent = nil
}
}
}
func leftLeaf(currentNode *BSTNode) *BSTNode {
if currentNode.LeftChild != nil {
return leftLeaf(currentNode.LeftChild)
} else {
return currentNode
}
}
func (tree *BSTree) ReturnInOrder() []int {
orderedValues := make([]int, 100)[0:0]
return returnInOrder(&(tree.RootNode), orderedValues)
}
func returnInOrder(node *BSTNode, orderedValues []int) []int {
if node != nil {
orderedValues = returnInOrder(node.LeftChild, orderedValues)
orderedValues = append(orderedValues, node.Value)
orderedValues = returnInOrder(node.RightChild, orderedValues)
}
return orderedValues
} | datastructs/binary_tree.go | 0.553023 | 0.587263 | binary_tree.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.