code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package chart
import (
"fmt"
"math"
"strings"
"github.com/beevee/go-chart/util"
)
// TicksProvider is a type that provides ticks.
type TicksProvider interface {
GetTicks(r Renderer, defaults Style, vf ValueFormatter) []Tick
}
// Tick represents a label on an axis.
type Tick struct {
Value float64
Label string
}
// Ticks is an array of ticks.
type Ticks []Tick
// Len returns the length of the ticks set.
func (t Ticks) Len() int {
return len(t)
}
// Swap swaps two elements.
func (t Ticks) Swap(i, j int) {
t[i], t[j] = t[j], t[i]
}
// Less returns if i's value is less than j's value.
func (t Ticks) Less(i, j int) bool {
return t[i].Value < t[j].Value
}
// String returns a string representation of the set of ticks.
func (t Ticks) String() string {
var values []string
for i, tick := range t {
values = append(values, fmt.Sprintf("[%d: %s]", i, tick.Label))
}
return strings.Join(values, ", ")
}
// GenerateContinuousTicks generates a set of ticks.
func GenerateContinuousTicks(r Renderer, ra Range, isVertical bool, style Style, vf ValueFormatter) []Tick {
if vf == nil {
vf = FloatValueFormatter
}
var ticks []Tick
min, max := ra.GetMin(), ra.GetMax()
if ra.IsDescending() {
ticks = append(ticks, Tick{
Value: max,
Label: vf(max),
})
} else {
ticks = append(ticks, Tick{
Value: min,
Label: vf(min),
})
}
minLabel := vf(min)
style.GetTextOptions().WriteToRenderer(r)
labelBox := r.MeasureText(minLabel)
var tickSize float64
if isVertical {
tickSize = float64(labelBox.Height() + DefaultMinimumTickVerticalSpacing)
} else {
tickSize = float64(labelBox.Width() + DefaultMinimumTickHorizontalSpacing)
}
domain := float64(ra.GetDomain())
domainRemainder := domain - (tickSize * 2)
intermediateTickCount := int(math.Floor(float64(domainRemainder) / float64(tickSize)))
rangeDelta := math.Abs(max - min)
tickStep := rangeDelta / float64(intermediateTickCount)
roundTo := util.Math.GetRoundToForDelta(rangeDelta) / 10
intermediateTickCount = util.Math.MinInt(intermediateTickCount, DefaultTickCountSanityCheck)
for x := 1; x < intermediateTickCount; x++ {
var tickValue float64
if ra.IsDescending() {
tickValue = max - util.Math.RoundUp(tickStep*float64(x), roundTo)
} else {
tickValue = min + util.Math.RoundUp(tickStep*float64(x), roundTo)
}
ticks = append(ticks, Tick{
Value: tickValue,
Label: vf(tickValue),
})
}
if ra.IsDescending() {
ticks = append(ticks, Tick{
Value: min,
Label: vf(min),
})
} else {
ticks = append(ticks, Tick{
Value: max,
Label: vf(max),
})
}
return ticks
}
// GeneratePrettyContinuousTicks generates a set of ticks at visually pleasing intervals.
// Based on http://vis.stanford.edu/files/2010-TickLabels-InfoVis.pdf by <NAME> et. al.
func GeneratePrettyContinuousTicks(r Renderer, ra Range, isVertical bool, style Style, vf ValueFormatter) []Tick {
if vf == nil {
vf = FloatValueFormatter
}
prettyStepsPriorityList := []float64{1, 5, 2, 2.5, 4, 3}
paramWeights := map[string]float64{
"simplicity": 0.2,
"coverage": 0.25,
"density": 0.5,
"legibility": 0.05,
}
rangeMin, rangeMax := ra.GetMin(), ra.GetMax()
if rangeMin >= rangeMax || ra.GetDomain() == 0 {
return []Tick{}
}
renderedLabelExample := vf(rangeMin)
style.GetTextOptions().WriteToRenderer(r)
renderedLabelSizePx := r.MeasureText(renderedLabelExample)
var actualLabelSizePx, desiredPaddedLabelSizePx float64
if isVertical {
actualLabelSizePx = math.Max(float64(renderedLabelSizePx.Height()), 1)
desiredPaddedLabelSizePx = actualLabelSizePx + DefaultMinimumTickVerticalSpacing
} else {
actualLabelSizePx = math.Max(float64(renderedLabelSizePx.Width()), 1)
desiredPaddedLabelSizePx = actualLabelSizePx + DefaultMinimumTickHorizontalSpacing
}
availableSpacePx := float64(ra.GetDomain())
desiredTicksCount := math.Min(
math.Max(math.Floor(availableSpacePx/desiredPaddedLabelSizePx), 2), // less than 2 leads to incorrect density calculation
DefaultTickCountSanityCheck)
prettyStepsCount := float64(len(prettyStepsPriorityList))
var bestTickMin, bestTickMax, bestTickStep float64
bestScore := -2.0
stepsToSkip := 1.0
OUTER:
for {
for prettyStepIndex, prettyStep := range prettyStepsPriorityList {
simplicityMax := calculateSimplicityMax(float64(prettyStepIndex), prettyStepsCount, stepsToSkip)
if paramWeights["simplicity"]*simplicityMax+
paramWeights["coverage"]+
paramWeights["density"]+
paramWeights["legibility"] < bestScore {
break OUTER
}
ticksCount := 2.0
for {
densityMax := calculateDensityMax(ticksCount, desiredTicksCount)
if paramWeights["simplicity"]*simplicityMax+
paramWeights["coverage"]+
paramWeights["density"]*densityMax+
paramWeights["legibility"] < bestScore {
break
}
delta := (rangeMax - rangeMin) / (ticksCount + 1) / stepsToSkip / prettyStep
stepSizeMultiplierLog := math.Ceil(math.Log10(delta))
for {
tickStep := stepsToSkip * prettyStep * math.Pow(10, stepSizeMultiplierLog)
coverageMax := calculateCoverageMax(rangeMin, rangeMax, tickStep*(ticksCount-1))
if paramWeights["simplicity"]*simplicityMax+
paramWeights["coverage"]*coverageMax+
paramWeights["density"]*densityMax+
paramWeights["legibility"] < bestScore {
break
}
minStart := math.Floor(rangeMax/tickStep)*stepsToSkip - (ticksCount-1)*stepsToSkip
maxStart := math.Ceil(rangeMin/tickStep) * stepsToSkip
if minStart > maxStart {
stepSizeMultiplierLog += 1
continue
}
for start := minStart; start <= maxStart; start++ {
tickMin := start * (tickStep / stepsToSkip)
tickMax := tickMin + tickStep*(ticksCount-1)
coverage := calculateCoverage(rangeMin, rangeMax, tickMin, tickMax)
simplicity := calculateSimplicity(prettyStepsCount, float64(prettyStepIndex), stepsToSkip, tickMin, tickMax, tickStep)
density := calculateDensity(ticksCount, desiredTicksCount, rangeMin, rangeMax, tickMin, tickMax)
legibility := 1.0 // format is out of our control (provided by ValueFormatter)
// font size is out of our control (provided by Style)
// orientation is out of our control
if actualLabelSizePx*ticksCount > availableSpacePx {
legibility = math.Inf(-1) // overlap is unacceptable
}
score := paramWeights["simplicity"]*simplicity +
paramWeights["coverage"]*coverage +
paramWeights["density"]*density +
paramWeights["legibility"]*legibility
// original algorithm allows ticks outside value range, but it breaks rendering in this library
if score > bestScore && tickMin >= rangeMin && tickMax <= rangeMax {
bestTickMin = tickMin
bestTickMax = tickMax
bestTickStep = tickStep
bestScore = score
}
}
stepSizeMultiplierLog++
}
ticksCount++
}
}
stepsToSkip++
}
var ticks []Tick
if bestTickStep == 0 {
return ticks
}
if ra.IsDescending() {
for tickValue := bestTickMax; tickValue > bestTickMin-bestTickStep/2; tickValue -= bestTickStep {
ticks = append(ticks, Tick{
Value: tickValue,
Label: vf(tickValue),
})
}
} else {
for tickValue := bestTickMin; tickValue < bestTickMax+bestTickStep/2; tickValue += bestTickStep {
ticks = append(ticks, Tick{
Value: tickValue,
Label: vf(tickValue),
})
}
}
return ticks
}
func calculateSimplicity(prettyStepsCount, prettyStepIndex, stepsToSkip, tickMin, tickMax, tickStep float64) float64 {
var hasZeroTick float64
if tickMin <= 0 && tickMax >= 0 && math.Mod(tickMin, tickStep) < 10e-10 {
hasZeroTick = 1
}
return 1 - prettyStepIndex/(prettyStepsCount-1) - stepsToSkip + hasZeroTick
}
func calculateSimplicityMax(prettyStepIndex, prettyStepsCount, stepsToSkip float64) float64 {
return 2 - prettyStepIndex/(prettyStepsCount-1) - stepsToSkip
}
func calculateCoverage(rangeMin, rangeMax, tickMin, tickMax float64) float64 {
return 1 - 0.5*(math.Pow(rangeMax-tickMax, 2)+math.Pow(rangeMin-tickMin, 2))/math.Pow(0.1*(rangeMax-rangeMin), 2)
}
func calculateCoverageMax(rangeMin, rangeMax, span float64) float64 {
if span <= rangeMax-rangeMin {
return 1
}
return 1 - math.Pow((rangeMax-rangeMin)/2, 2)/math.Pow(0.1*(rangeMax-rangeMin), 2)
}
func calculateDensity(ticksCount, desiredTicksCount, rangeMin, rangeMax, tickMin, tickMax float64) float64 {
ticksDensity := (ticksCount - 1) / (tickMax - tickMin)
desiredTicksDensity := (desiredTicksCount - 1) / (math.Max(tickMax, rangeMax) - math.Min(tickMin, rangeMin))
return 2 - math.Max(ticksDensity/desiredTicksDensity, desiredTicksDensity/ticksDensity)
}
func calculateDensityMax(ticksCount, desiredTicksCount float64) float64 {
if ticksCount >= desiredTicksCount {
return 2 - (ticksCount-1)/(desiredTicksCount-1)
}
return 1
} | tick.go | 0.779574 | 0.434941 | tick.go | starcoder |
package gorules
import "fmt"
// Expression refers to anytype that can be evaluated
type Expression interface {
Evaluate() (bool, error)
}
// RuleExpression stores the value and target be Operated on.Can act with different operates
type RuleExpression struct {
Operator Operator `json:"operator"`
Value string `json:"value"`
Target string `json:"target"`
}
// Evaluate used to eval an expression to a bool
func (v RuleExpression) Evaluate() (bool, error) {
operatorFunc := operatorFuncList[v.Operator]
result, err := operatorFunc(v.Value, v.Target)
fmt.Println("Evaluate", v, result)
return result, err
}
func createRuleExpression(operatorText string, value string) Expression {
operator, err := toOperator(operatorText)
if err == nil {
return RuleExpression{Operator: operator, Value: value}
}
panic(err)
}
func createRuleExpressionWithTarget(operatorText string, value string, target string) Expression {
operator, err := toOperator(operatorText)
if err == nil {
return RuleExpression{Operator: operator, Value: value, Target: target}
}
panic(err)
}
func createRuleExpressionFromRuleStmt(rule RuleStatement, data map[string]interface{}) Expression {
// fmt.Println("target", rule.Source, rule.Target.Evaluate(data))
source, _ := rule.Source.Evaluate(data)
target, _ := rule.Target.Evaluate(data)
return createRuleExpressionWithTarget(rule.Operator, decodeSpace(source), decodeSpace(target))
}
// ConjunctionExpression used to combine any type of Expressions
type ConjunctionExpression struct {
Conjunction Conjunction `json:"conjunction"`
Expressions []Expression `json:"expressions"`
}
// Evaluate used to get the combined evaluated value of all the expressions using the Conjunction
func (c ConjunctionExpression) Evaluate() (bool, error) {
evaluator, accumlator := conjunctionExprProperties(c.Conjunction)
// fmt.Println(c.Expressions)
for _, e := range c.Expressions {
var resultBool, _ = evaluator(accumlator, (e))
accumlator = createBooleanExpression(resultBool)
}
return accumlator.Evaluate()
}
// Add the expression to be evaluated into the conjunction espression
func (c ConjunctionExpression) Add(expr Expression) ConjunctionExpression {
c.Expressions = append(c.Expressions, expr)
return c
}
var createAndConjunctionExpression = createConjunctionExpression(And)
var createOrConjunctionExpression = createConjunctionExpression(Or)
func createConjunctionExpression(conjunction Conjunction) func(Expression) Expression {
return func(expr Expression) Expression {
conj := ConjunctionExpression{Conjunction: conjunction}
conj = conj.Add(expr)
return conj
}
}
func createConjuntionExprFromCollectionStmt(ruleStmt RuleStatement, data map[string]interface{}) Expression {
selector, err := toSelector(ruleStmt.Selector)
if err != nil {
panic(err)
}
conjExpr := selectorConjExprMap(selector)
arrayPath, key := getArrayPathAndKey(ruleStmt.Source.String())
arrayValue := selectValue(data, arrayPath).([]interface{})
for _, x := range arrayValue {
valueToCompare := selectValue(x.(map[string]interface{}), key).(string)
target, _ := ruleStmt.Target.Evaluate(x)
valExp := createRuleExpressionWithTarget(ruleStmt.Operator, valueToCompare, target)
conjExpr = conjExpr.Add(valExp)
}
return conjExpr
}
func isConjunctionExpression(expr Expression) bool {
_, ok := expr.(ConjunctionExpression)
// fmt.Println("conv", x, ok)
return ok
}
// BooleanExpression stores either true or false value as an Expression
type BooleanExpression bool
// Evaluate makes BooleanExpression implement Expression
func (v BooleanExpression) Evaluate() (bool, error) {
if v {
return true, nil
}
return false, nil
}
// createBooleanExpression creates a BooleanExpression with a bool
func createBooleanExpression(boolType bool) Expression {
if boolType {
return BooleanExpression(true)
}
return BooleanExpression(false)
}
// TrueExpression always evaluates to True
var TrueExpression = createBooleanExpression(true)
// FalseExpression always evaluates to False
var FalseExpression = createBooleanExpression(false)
//------------------------------------- | expression.go | 0.739422 | 0.523542 | expression.go | starcoder |
package themefiles
const DeckJs = `
/*!
Deck JS - deck.core
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
The deck.core module provides all the basic functionality for creating and
moving through a deck. It does so by applying classes to indicate the state of
the deck and its slides, allowing CSS to take care of the visual representation
of each state. It also provides methods for navigating the deck and inspecting
its state, as well as basic key bindings for going to the next and previous
slides. More functionality is provided by wholly separate extension modules
that use the API provided by core.
*/
(function($, deck, document, undefined) {
var slides, // Array of all the uh, slides...
current, // Array index of the current slide
$container, // Keeping this cached
events = {
/*
This event fires whenever the current slide changes, whether by way of
next, prev, or go. The callback function is passed two parameters, from
and to, equal to the indices of the old slide and the new slide
respectively. If preventDefault is called on the event within this handler
the slide change does not occur.
$(document).bind('deck.change', function(event, from, to) {
alert('Moving from slide ' + from + ' to ' + to);
});
*/
change: 'deck.change',
/*
This event fires at the beginning of deck initialization, after the options
are set but before the slides array is created. This event makes a good hook
for preprocessing extensions looking to modify the deck.
*/
beforeInitialize: 'deck.beforeInit',
/*
This event fires at the end of deck initialization. Extensions should
implement any code that relies on user extensible options (key bindings,
element selectors, classes) within a handler for this event. Native
events associated with Deck JS should be scoped under a .deck event
namespace, as with the example below:
var $d = $(document);
$.deck.defaults.keys.myExtensionKeycode = 70; // 'h'
$d.bind('deck.init', function() {
$d.bind('keydown.deck', function(event) {
if (event.which === $.deck.getOptions().keys.myExtensionKeycode) {
// Rock out
}
});
});
*/
initialize: 'deck.init'
},
options = {},
$d = $(document),
/*
Internal function. Updates slide and container classes based on which
slide is the current slide.
*/
updateStates = function() {
var oc = options.classes,
osc = options.selectors.container,
old = $container.data('onSlide'),
$all = $();
// Container state
$container.removeClass(oc.onPrefix + old)
.addClass(oc.onPrefix + current)
.data('onSlide', current);
// Remove and re-add child-current classes for nesting
$('.' + oc.current).parentsUntil(osc).removeClass(oc.childCurrent);
slides[current].parentsUntil(osc).addClass(oc.childCurrent);
// Remove previous states
$.each(slides, function(i, el) {
$all = $all.add(el);
});
$all.removeClass([
oc.before,
oc.previous,
oc.current,
oc.next,
oc.after
].join(" "));
// Add new states back in
slides[current].addClass(oc.current);
if (current > 0) {
slides[current-1].addClass(oc.previous);
}
if (current + 1 < slides.length) {
slides[current+1].addClass(oc.next);
}
if (current > 1) {
$.each(slides.slice(0, current - 1), function(i, el) {
el.addClass(oc.before);
});
}
if (current + 2 < slides.length) {
$.each(slides.slice(current+2), function(i, el) {
el.addClass(oc.after);
});
}
},
/* Methods exposed in the jQuery.deck namespace */
methods = {
/*
jQuery.deck(selector, options)
selector: string | jQuery | array
options: object, optional
Initializes the deck, using each element matched by selector as a slide.
May also be passed an array of string selectors or jQuery objects, in
which case each selector in the array is considered a slide. The second
parameter is an optional options object which will extend the default
values.
$.deck('.slide');
or
$.deck([
'#first-slide',
'#second-slide',
'#etc'
]);
*/
init: function(elements, opts) {
var startTouch,
tolerance,
esp = function(e) {
e.stopPropagation();
};
options = $.extend(true, {}, $[deck].defaults, opts);
slides = [];
current = 0;
$container = $(options.selectors.container);
tolerance = options.touch.swipeTolerance;
// Pre init event for preprocessing hooks
$d.trigger(events.beforeInitialize);
// Hide the deck while states are being applied to kill transitions
$container.addClass(options.classes.loading);
// Fill slides array depending on parameter type
if ($.isArray(elements)) {
$.each(elements, function(i, e) {
slides.push($(e));
});
}
else {
$(elements).each(function(i, e) {
slides.push($(e));
});
}
/* Remove any previous bindings, and rebind key events */
$d.unbind('keydown.deck').bind('keydown.deck', function(e) {
if (e.which === options.keys.next || $.inArray(e.which, options.keys.next) > -1) {
methods.next();
e.preventDefault();
}
else if (e.which === options.keys.previous || $.inArray(e.which, options.keys.previous) > -1) {
methods.prev();
e.preventDefault();
}
})
/* Stop propagation of key events within editable elements */
.undelegate('input, textarea, select, button, meter, progress, [contentEditable]', 'keydown', esp)
.delegate('input, textarea, select, button, meter, progress, [contentEditable]', 'keydown', esp);
/* Bind touch events for swiping between slides on touch devices */
$container.unbind('touchstart.deck').bind('touchstart.deck', function(e) {
if (!startTouch) {
startTouch = $.extend({}, e.originalEvent.targetTouches[0]);
}
})
.unbind('touchmove.deck').bind('touchmove.deck', function(e) {
$.each(e.originalEvent.changedTouches, function(i, t) {
if (startTouch && t.identifier === startTouch.identifier) {
if (t.screenX - startTouch.screenX > tolerance || t.screenY - startTouch.screenY > tolerance) {
$[deck]('prev');
startTouch = undefined;
}
else if (t.screenX - startTouch.screenX < -1 * tolerance || t.screenY - startTouch.screenY < -1 * tolerance) {
$[deck]('next');
startTouch = undefined;
}
return false;
}
});
e.preventDefault();
})
.unbind('touchend.deck').bind('touchend.deck', function(t) {
$.each(t.originalEvent.changedTouches, function(i, t) {
if (startTouch && t.identifier === startTouch.identifier) {
startTouch = undefined;
}
});
})
.scrollLeft(0).scrollTop(0);
/*
Kick iframe videos, which dont like to redraw w/ transforms.
Remove this if Webkit ever fixes it.
*/
$.each(slides, function(i, $el) {
$el.unbind('webkitTransitionEnd.deck').bind('webkitTransitionEnd.deck',
function(event) {
if ($el.hasClass($[deck]('getOptions').classes.current)) {
var embeds = $(this).find('iframe').css('opacity', 0);
window.setTimeout(function() {
embeds.css('opacity', 1);
}, 100);
}
});
});
if (slides.length) {
updateStates();
}
// Show deck again now that slides are in place
$container.removeClass(options.classes.loading);
$d.trigger(events.initialize);
},
/*
jQuery.deck('go', index)
index: integer | string
Moves to the slide at the specified index if index is a number. Index is
0-based, so $.deck('go', 0); will move to the first slide. If index is a
string this will move to the slide with the specified id. If index is out
of bounds or doesn't match a slide id the call is ignored.
*/
go: function(index) {
var e = $.Event(events.change),
ndx;
/* Number index, easy. */
if (typeof index === 'number' && index >= 0 && index < slides.length) {
ndx = index;
}
/* Id string index, search for it and set integer index */
else if (typeof index === 'string') {
$.each(slides, function(i, $slide) {
if ($slide.attr('id') === index) {
ndx = i;
return false;
}
});
};
/* Out of bounds, id doesn't exist, illegal input, eject */
if (typeof ndx === 'undefined') return;
$d.trigger(e, [current, ndx]);
if (e.isDefaultPrevented()) {
/* Trigger the event again and undo the damage done by extensions. */
$d.trigger(events.change, [ndx, current]);
}
else {
current = ndx;
updateStates();
}
},
/*
jQuery.deck('next')
Moves to the next slide. If the last slide is already active, the call
is ignored.
*/
next: function() {
methods.go(current+1);
},
/*
jQuery.deck('prev')
Moves to the previous slide. If the first slide is already active, the
call is ignored.
*/
prev: function() {
methods.go(current-1);
},
/*
jQuery.deck('getSlide', index)
index: integer, optional
Returns a jQuery object containing the slide at index. If index is not
specified, the current slide is returned.
*/
getSlide: function(index) {
var i = typeof index !== 'undefined' ? index : current;
if (typeof i != 'number' || i < 0 || i >= slides.length) return null;
return slides[i];
},
/*
jQuery.deck('getSlides')
Returns all slides as an array of jQuery objects.
*/
getSlides: function() {
return slides;
},
/*
jQuery.deck('getContainer')
Returns a jQuery object containing the deck container as defined by the
container option.
*/
getContainer: function() {
return $container;
},
/*
jQuery.deck('getOptions')
Returns the options object for the deck, including any overrides that
were defined at initialization.
*/
getOptions: function() {
return options;
},
/*
jQuery.deck('extend', name, method)
name: string
method: function
Adds method to the deck namespace with the key of name. This doesn’t
give access to any private member data — public methods must still be
used within method — but lets extension authors piggyback on the deck
namespace rather than pollute jQuery.
$.deck('extend', 'alert', function(msg) {
alert(msg);
});
// Alerts 'boom'
$.deck('alert', 'boom');
*/
extend: function(name, method) {
methods[name] = method;
}
};
/* jQuery extension */
$[deck] = function(method, arg) {
if (methods[method]) {
return methods[method].apply(this, Array.prototype.slice.call(arguments, 1));
}
else {
return methods.init(method, arg);
}
};
/*
The default settings object for a deck. All deck extensions should extend
this object to add defaults for any of their options.
options.classes.after
This class is added to all slides that appear after the 'next' slide.
options.classes.before
This class is added to all slides that appear before the 'previous'
slide.
options.classes.childCurrent
This class is added to all elements in the DOM tree between the
'current' slide and the deck container. For standard slides, this is
mostly seen and used for nested slides.
options.classes.current
This class is added to the current slide.
options.classes.loading
This class is applied to the deck container during loading phases and is
primarily used as a way to short circuit transitions between states
where such transitions are distracting or unwanted. For example, this
class is applied during deck initialization and then removed to prevent
all the slides from appearing stacked and transitioning into place
on load.
options.classes.next
This class is added to the slide immediately following the 'current'
slide.
options.classes.onPrefix
This prefix, concatenated with the current slide index, is added to the
deck container as you change slides.
options.classes.previous
This class is added to the slide immediately preceding the 'current'
slide.
options.selectors.container
Elements matched by this CSS selector will be considered the deck
container. The deck container is used to scope certain states of the
deck, as with the onPrefix option, or with extensions such as deck.goto
and deck.menu.
options.keys.next
The numeric keycode used to go to the next slide.
options.keys.previous
The numeric keycode used to go to the previous slide.
options.touch.swipeTolerance
The number of pixels the users finger must travel to produce a swipe
gesture.
*/
$[deck].defaults = {
classes: {
after: 'deck-after',
before: 'deck-before',
childCurrent: 'deck-child-current',
current: 'deck-current',
loading: 'deck-loading',
next: 'deck-next',
onPrefix: 'on-slide-',
previous: 'deck-previous'
},
selectors: {
container: '.deck-container'
},
keys: {
// enter, space, page down, right arrow, down arrow,
next: [13, 32, 34, 39, 40],
// backspace, page up, left arrow, up arrow
previous: [8, 33, 37, 38]
},
touch: {
swipeTolerance: 60
}
};
$d.ready(function() {
$('html').addClass('ready');
});
/*
FF + Transforms + Flash video don't get along...
Firefox will reload and start playing certain videos after a
transform. Blanking the src when a previously shown slide goes out
of view prevents this.
*/
$d.bind('deck.change', function(e, from, to) {
var oldFrames = $[deck]('getSlide', from).find('iframe'),
newFrames = $[deck]('getSlide', to).find('iframe');
oldFrames.each(function() {
var $this = $(this),
curSrc = $this.attr('src');
if(curSrc) {
$this.data('deck-src', curSrc).attr('src', '');
}
});
newFrames.each(function() {
var $this = $(this),
originalSrc = $this.data('deck-src');
if (originalSrc) {
$this.attr('src', originalSrc);
}
});
});
})(jQuery, 'deck', document);
/*!
Deck JS - deck.goto
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
This module adds the necessary methods and key bindings to show and hide a form
for jumping to any slide number/id in the deck (and processes that form
accordingly). The form-showing state is indicated by the presence of a class on
the deck container.
*/
(function($, deck, undefined) {
var $d = $(document);
/*
Extends defaults/options.
options.classes.goto
This class is added to the deck container when showing the Go To Slide
form.
options.selectors.gotoDatalist
The element that matches this selector is the datalist element that will
be populated with options for each of the slide ids. In browsers that
support the datalist element, this provides a drop list of slide ids to
aid the user in selecting a slide.
options.selectors.gotoForm
The element that matches this selector is the form that is submitted
when a user hits enter after typing a slide number/id in the gotoInput
element.
options.selectors.gotoInput
The element that matches this selector is the text input field for
entering a slide number/id in the Go To Slide form.
options.keys.goto
The numeric keycode used to show the Go To Slide form.
options.countNested
If false, only top level slides will be counted when entering a
slide number.
*/
$.extend(true, $[deck].defaults, {
classes: {
goto: 'deck-goto'
},
selectors: {
gotoDatalist: '#goto-datalist',
gotoForm: '.goto-form',
gotoInput: '#goto-slide'
},
keys: {
goto: 71 // g
},
countNested: true
});
/*
jQuery.deck('showGoTo')
Shows the Go To Slide form by adding the class specified by the goto class
option to the deck container.
*/
$[deck]('extend', 'showGoTo', function() {
$[deck]('getContainer').addClass($[deck]('getOptions').classes.goto);
$($[deck]('getOptions').selectors.gotoInput).focus();
});
/*
jQuery.deck('hideGoTo')
Hides the Go To Slide form by removing the class specified by the goto class
option from the deck container.
*/
$[deck]('extend', 'hideGoTo', function() {
$($[deck]('getOptions').selectors.gotoInput).blur();
$[deck]('getContainer').removeClass($[deck]('getOptions').classes.goto);
});
/*
jQuery.deck('toggleGoTo')
Toggles between showing and hiding the Go To Slide form.
*/
$[deck]('extend', 'toggleGoTo', function() {
$[deck]($[deck]('getContainer').hasClass($[deck]('getOptions').classes.goto) ? 'hideGoTo' : 'showGoTo');
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions'),
$datalist = $(opts.selectors.gotoDatalist),
slideTest = $.map([
opts.classes.before,
opts.classes.previous,
opts.classes.current,
opts.classes.next,
opts.classes.after
], function(el, i) {
return '.' + el;
}).join(', '),
rootCounter = 1;
// Bind key events
$d.unbind('keydown.deckgoto').bind('keydown.deckgoto', function(e) {
var key = $[deck]('getOptions').keys.goto;
if (e.which === key || $.inArray(e.which, key) > -1) {
e.preventDefault();
$[deck]('toggleGoTo');
}
});
/* Populate datalist and work out countNested*/
$.each($[deck]('getSlides'), function(i, $slide) {
var id = $slide.attr('id'),
$parentSlides = $slide.parentsUntil(opts.selectors.container, slideTest);
if (id) {
$datalist.append('<option value="' + id + '">');
}
if ($parentSlides.length) {
$slide.removeData('rootIndex');
}
else if (!opts.countNested) {
$slide.data('rootIndex', rootCounter);
++rootCounter;
}
});
// Process form submittal, go to the slide entered
$(opts.selectors.gotoForm)
.unbind('submit.deckgoto')
.bind('submit.deckgoto', function(e) {
var $field = $($[deck]('getOptions').selectors.gotoInput),
ndx = parseInt($field.val(), 10);
if (!$[deck]('getOptions').countNested) {
if (ndx >= rootCounter) return false;
$.each($[deck]('getSlides'), function(i, $slide) {
if ($slide.data('rootIndex') === ndx) {
ndx = i + 1;
return false;
}
});
}
$[deck]('go', isNaN(ndx) ? $field.val() : ndx - 1);
$[deck]('hideGoTo');
$field.val('');
e.preventDefault();
});
// Dont let keys in the input trigger deck actions
$(opts.selectors.gotoInput)
.unbind('keydown.deckgoto')
.bind('keydown.deckgoto', function(e) {
e.stopPropagation();
});
});
})(jQuery, 'deck');
/*!
Deck JS - deck.hash
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
This module adds deep linking to individual slides, enables internal links
to slides within decks, and updates the address bar with the hash as the user
moves through the deck. A permalink anchor is also updated. Standard themes
hide this link in browsers that support the History API, and show it for
those that do not. Slides that do not have an id are assigned one according to
the hashPrefix option. In addition to the on-slide container state class
kept by core, this module adds an on-slide state class that uses the id of each
slide.
*/
(function ($, deck, window, undefined) {
var $d = $(document),
$window = $(window),
/* Collection of internal fragment links in the deck */
$internals,
/*
Internal only function. Given a string, extracts the id from the hash,
matches it to the appropriate slide, and navigates there.
*/
goByHash = function(str) {
var id = str.substr(str.indexOf("#") + 1),
slides = $[deck]('getSlides');
$.each(slides, function(i, $el) {
if ($el.attr('id') === id) {
$[deck]('go', i);
return false;
}
});
// If we don't set these to 0 the container scrolls due to hashchange
$[deck]('getContainer').scrollLeft(0).scrollTop(0);
};
/*
Extends defaults/options.
options.selectors.hashLink
The element matching this selector has its href attribute updated to
the hash of the current slide as the user navigates through the deck.
options.hashPrefix
Every slide that does not have an id is assigned one at initialization.
Assigned ids take the form of hashPrefix + slideIndex, e.g., slide-0,
slide-12, etc.
options.preventFragmentScroll
When deep linking to a hash of a nested slide, this scrolls the deck
container to the top, undoing the natural browser behavior of scrolling
to the document fragment on load.
*/
$.extend(true, $[deck].defaults, {
selectors: {
hashLink: '.deck-permalink'
},
hashPrefix: 'slide-',
preventFragmentScroll: true
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions');
$internals = $(),
slides = $[deck]('getSlides');
$.each(slides, function(i, $el) {
var hash;
/* Hand out ids to the unfortunate slides born without them */
if (!$el.attr('id') || $el.data('deckAssignedId') === $el.attr('id')) {
$el.attr('id', opts.hashPrefix + i);
$el.data('deckAssignedId', opts.hashPrefix + i);
}
hash ='#' + $el.attr('id');
/* Deep link to slides on init */
if (hash === window.location.hash) {
$[deck]('go', i);
}
/* Add internal links to this slide */
$internals = $internals.add('a[href="' + hash + '"]');
});
if (!Modernizr.hashchange) {
/* Set up internal links using click for the poor browsers
without a hashchange event. */
$internals.unbind('click.deckhash').bind('click.deckhash', function(e) {
goByHash($(this).attr('href'));
});
}
/* Set up first id container state class */
if (slides.length) {
$[deck]('getContainer').addClass(opts.classes.onPrefix + $[deck]('getSlide').attr('id'));
};
})
/* Update permalink, address bar, and state class on a slide change */
.bind('deck.change', function(e, from, to) {
var hash = '#' + $[deck]('getSlide', to).attr('id'),
hashPath = window.location.href.replace(/#.*/, '') + hash,
opts = $[deck]('getOptions'),
osp = opts.classes.onPrefix,
$c = $[deck]('getContainer');
$c.removeClass(osp + $[deck]('getSlide', from).attr('id'));
$c.addClass(osp + $[deck]('getSlide', to).attr('id'));
$(opts.selectors.hashLink).attr('href', hashPath);
if (Modernizr.history) {
window.history.replaceState({}, "", hashPath);
}
});
/* Deals with internal links in modern browsers */
$window.bind('hashchange.deckhash', function(e) {
if (e.originalEvent && e.originalEvent.newURL) {
goByHash(e.originalEvent.newURL);
}
else {
goByHash(window.location.hash);
}
})
/* Prevent scrolling on deep links */
.bind('load', function() {
if ($[deck]('getOptions').preventFragmentScroll) {
$[deck]('getContainer').scrollLeft(0).scrollTop(0);
}
});
})(jQuery, 'deck', this);
/*!
Deck JS - deck.menu
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
This module adds the methods and key binding to show and hide a menu of all
slides in the deck. The deck menu state is indicated by the presence of a class
on the deck container.
*/
(function($, deck, undefined) {
var $d = $(document),
rootSlides; // Array of top level slides
/*
Extends defaults/options.
options.classes.menu
This class is added to the deck container when showing the slide menu.
options.keys.menu
The numeric keycode used to toggle between showing and hiding the slide
menu.
options.touch.doubletapWindow
Two consecutive touch events within this number of milliseconds will
be considered a double tap, and will toggle the menu on touch devices.
*/
$.extend(true, $[deck].defaults, {
classes: {
menu: 'deck-menu'
},
keys: {
menu: 77 // m
},
touch: {
doubletapWindow: 400
}
});
/*
jQuery.deck('showMenu')
Shows the slide menu by adding the class specified by the menu class option
to the deck container.
*/
$[deck]('extend', 'showMenu', function() {
var $c = $[deck]('getContainer'),
opts = $[deck]('getOptions');
if ($c.hasClass(opts.classes.menu)) return;
// Hide through loading class to short-circuit transitions (perf)
$c.addClass([opts.classes.loading, opts.classes.menu].join(' '));
/* Forced to do this in JS until CSS learns second-grade math. Save old
style value for restoration when menu is hidden. */
if (Modernizr.csstransforms) {
$.each(rootSlides, function(i, $slide) {
$slide.data('oldStyle', $slide.attr('style'));
$slide.css({
'position': 'absolute',
'left': ((i % 4) * 25) + '%',
'top': (Math.floor(i / 4) * 25) + '%'
});
});
}
// Need to ensure the loading class renders first, then remove
window.setTimeout(function() {
$c.removeClass(opts.classes.loading)
.scrollTop($[deck]('getSlide').offset().top);
}, 0);
});
/*
jQuery.deck('hideMenu')
Hides the slide menu by removing the class specified by the menu class
option from the deck container.
*/
$[deck]('extend', 'hideMenu', function() {
var $c = $[deck]('getContainer'),
opts = $[deck]('getOptions');
if (!$c.hasClass(opts.classes.menu)) return;
$c.removeClass(opts.classes.menu);
$c.addClass(opts.classes.loading);
/* Restore old style value */
if (Modernizr.csstransforms) {
$.each(rootSlides, function(i, $slide) {
var oldStyle = $slide.data('oldStyle');
$slide.attr('style', oldStyle ? oldStyle : '');
});
}
window.setTimeout(function() {
$c.removeClass(opts.classes.loading).scrollTop(0);
}, 0);
});
/*
jQuery.deck('toggleMenu')
Toggles between showing and hiding the slide menu.
*/
$[deck]('extend', 'toggleMenu', function() {
$[deck]('getContainer').hasClass($[deck]('getOptions').classes.menu) ?
$[deck]('hideMenu') : $[deck]('showMenu');
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions'),
touchEndTime = 0,
currentSlide,
slideTest = $.map([
opts.classes.before,
opts.classes.previous,
opts.classes.current,
opts.classes.next,
opts.classes.after
], function(el, i) {
return '.' + el;
}).join(', ');
// Build top level slides array
rootSlides = [];
$.each($[deck]('getSlides'), function(i, $el) {
if (!$el.parentsUntil(opts.selectors.container, slideTest).length) {
rootSlides.push($el);
}
});
// Bind key events
$d.unbind('keydown.deckmenu').bind('keydown.deckmenu', function(e) {
if (e.which === opts.keys.menu || $.inArray(e.which, opts.keys.menu) > -1) {
$[deck]('toggleMenu');
e.preventDefault();
}
});
// Double tap to toggle slide menu for touch devices
$[deck]('getContainer').unbind('touchstart.deckmenu').bind('touchstart.deckmenu', function(e) {
currentSlide = $[deck]('getSlide');
})
.unbind('touchend.deckmenu').bind('touchend.deckmenu', function(e) {
var now = Date.now();
// Ignore this touch event if it caused a nav change (swipe)
if (currentSlide !== $[deck]('getSlide')) return;
if (now - touchEndTime < opts.touch.doubletapWindow) {
$[deck]('toggleMenu');
e.preventDefault();
}
touchEndTime = now;
});
// Selecting slides from the menu
$.each($[deck]('getSlides'), function(i, $s) {
$s.unbind('click.deckmenu').bind('click.deckmenu', function(e) {
if (!$[deck]('getContainer').hasClass(opts.classes.menu)) return;
$[deck]('go', i);
$[deck]('hideMenu');
e.stopPropagation();
e.preventDefault();
});
});
})
.bind('deck.change', function(e, from, to) {
var container = $[deck]('getContainer');
if (container.hasClass($[deck]('getOptions').classes.menu)) {
container.scrollTop($[deck]('getSlide', to).offset().top);
}
});
})(jQuery, 'deck');
/*!
Deck JS - deck.navigation
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
This module adds clickable previous and next links to the deck.
*/
(function($, deck, undefined) {
var $d = $(document),
/* Updates link hrefs, and disabled states if last/first slide */
updateButtons = function(e, from, to) {
var opts = $[deck]('getOptions'),
last = $[deck]('getSlides').length - 1,
prevSlide = $[deck]('getSlide', to - 1),
nextSlide = $[deck]('getSlide', to + 1),
hrefBase = window.location.href.replace(/#.*/, ''),
prevId = prevSlide ? prevSlide.attr('id') : undefined,
nextId = nextSlide ? nextSlide.attr('id') : undefined;
$(opts.selectors.previousLink)
.toggleClass(opts.classes.navDisabled, !to)
.attr('href', hrefBase + '#' + (prevId ? prevId : ''));
$(opts.selectors.nextLink)
.toggleClass(opts.classes.navDisabled, to === last)
.attr('href', hrefBase + '#' + (nextId ? nextId : ''));
};
/*
Extends defaults/options.
options.classes.navDisabled
This class is added to a navigation link when that action is disabled.
It is added to the previous link when on the first slide, and to the
next link when on the last slide.
options.selectors.nextLink
The elements that match this selector will move the deck to the next
slide when clicked.
options.selectors.previousLink
The elements that match this selector will move to deck to the previous
slide when clicked.
*/
$.extend(true, $[deck].defaults, {
classes: {
navDisabled: 'deck-nav-disabled'
},
selectors: {
nextLink: '.deck-next-link',
previousLink: '.deck-prev-link'
}
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions'),
slides = $[deck]('getSlides'),
$current = $[deck]('getSlide'),
ndx;
// Setup prev/next link events
$(opts.selectors.previousLink)
.unbind('click.decknavigation')
.bind('click.decknavigation', function(e) {
$[deck]('prev');
e.preventDefault();
});
$(opts.selectors.nextLink)
.unbind('click.decknavigation')
.bind('click.decknavigation', function(e) {
$[deck]('next');
e.preventDefault();
});
// Find where we started in the deck and set initial states
$.each(slides, function(i, $slide) {
if ($slide === $current) {
ndx = i;
return false;
}
});
updateButtons(null, ndx, ndx);
})
.bind('deck.change', updateButtons);
})(jQuery, 'deck');
/*!
Deck JS - deck.status
Copyright (c) 2011 <NAME>
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/deck.js/blob/master/MIT-license.txt
https://github.com/imakewebthings/deck.js/blob/master/GPL-license.txt
*/
/*
This module adds a (current)/(total) style status indicator to the deck.
*/
(function($, deck, undefined) {
var $d = $(document),
updateCurrent = function(e, from, to) {
var opts = $[deck]('getOptions');
$(opts.selectors.statusCurrent).text(opts.countNested ?
to + 1 :
$[deck]('getSlide', to).data('rootSlide')
);
};
/*
Extends defaults/options.
options.selectors.statusCurrent
The element matching this selector displays the current slide number.
options.selectors.statusTotal
The element matching this selector displays the total number of slides.
options.countNested
If false, only top level slides will be counted in the current and
total numbers.
*/
$.extend(true, $[deck].defaults, {
selectors: {
statusCurrent: '.deck-status-current',
statusTotal: '.deck-status-total'
},
countNested: true
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions'),
slides = $[deck]('getSlides'),
$current = $[deck]('getSlide'),
ndx;
// Set total slides once
if (opts.countNested) {
$(opts.selectors.statusTotal).text(slides.length);
}
else {
/* Determine root slides by checking each slide's ancestor tree for
any of the slide classes. */
var rootIndex = 1,
slideTest = $.map([
opts.classes.before,
opts.classes.previous,
opts.classes.current,
opts.classes.next,
opts.classes.after
], function(el, i) {
return '.' + el;
}).join(', ');
/* Store the 'real' root slide number for use during slide changes. */
$.each(slides, function(i, $el) {
var $parentSlides = $el.parentsUntil(opts.selectors.container, slideTest);
$el.data('rootSlide', $parentSlides.length ?
$parentSlides.last().data('rootSlide') :
rootIndex++
);
});
$(opts.selectors.statusTotal).text(rootIndex - 1);
}
// Find where we started in the deck and set initial state
$.each(slides, function(i, $el) {
if ($el === $current) {
ndx = i;
return false;
}
});
updateCurrent(null, ndx, ndx);
})
/* Update current slide number with each change event */
.bind('deck.change', updateCurrent);
})(jQuery, 'deck');
`
const DeckCss = `
body.deck-container {
overflow-y: auto;
position: static;
}
.deck-container {
position: relative;
min-height: 100%;
margin: 0 auto;
padding: 0 48px;
font-size: 16px;
line-height: 1.25;
overflow: hidden;
/* Resets and base styles from HTML5 Boilerplate */
/* End HTML5 Boilerplate adaptations */
}
.js .deck-container {
visibility: hidden;
}
.ready .deck-container {
visibility: visible;
}
.touch .deck-container {
-webkit-text-size-adjust: none;
-moz-text-size-adjust: none;
}
.deck-container div, .deck-container span, .deck-container object, .deck-container iframe,
.deck-container h1, .deck-container h2, .deck-container h3, .deck-container h4, .deck-container h5, .deck-container h6, .deck-container p, .deck-container blockquote, .deck-container pre,
.deck-container abbr, .deck-container address, .deck-container cite, .deck-container code, .deck-container del, .deck-container dfn, .deck-container em, .deck-container img, .deck-container ins, .deck-container kbd, .deck-container q, .deck-container samp,
.deck-container small, .deck-container strong, .deck-container sub, .deck-container sup, .deck-container var, .deck-container b, .deck-container i, .deck-container dl, .deck-container dt, .deck-container dd, .deck-container ol, .deck-container ul, .deck-container li,
.deck-container fieldset, .deck-container form, .deck-container label, .deck-container legend,
.deck-container table, .deck-container caption, .deck-container tbody, .deck-container tfoot, .deck-container thead, .deck-container tr, .deck-container th, .deck-container td,
.deck-container article, .deck-container aside, .deck-container canvas, .deck-container details, .deck-container figcaption, .deck-container figure,
.deck-container footer, .deck-container header, .deck-container hgroup, .deck-container menu, .deck-container nav, .deck-container section, .deck-container summary,
.deck-container time, .deck-container mark, .deck-container audio, .deck-container video {
margin: 0;
padding: 0;
border: 0;
font-size: 100%;
font: inherit;
vertical-align: baseline;
}
.deck-container article, .deck-container aside, .deck-container details, .deck-container figcaption, .deck-container figure,
.deck-container footer, .deck-container header, .deck-container hgroup, .deck-container menu, .deck-container nav, .deck-container section {
display: block;
}
.deck-container blockquote, .deck-container q {
quotes: none;
}
.deck-container blockquote:before, .deck-container blockquote:after, .deck-container q:before, .deck-container q:after {
content: "";
content: none;
}
.deck-container ins {
background-color: #ff9;
color: #000;
text-decoration: none;
}
.deck-container mark {
background-color: #ff9;
color: #000;
font-style: italic;
font-weight: bold;
}
.deck-container del {
text-decoration: line-through;
}
.deck-container abbr[title], .deck-container dfn[title] {
border-bottom: 1px dotted;
cursor: help;
}
.deck-container table {
border-collapse: collapse;
border-spacing: 0;
}
.deck-container hr {
display: block;
height: 1px;
border: 0;
border-top: 1px solid #ccc;
margin: 1em 0;
padding: 0;
}
.deck-container input, .deck-container select {
vertical-align: middle;
}
.deck-container select, .deck-container input, .deck-container textarea, .deck-container button {
font: 99% sans-serif;
}
.deck-container pre, .deck-container code, .deck-container kbd, .deck-container samp {
font-family: monospace, sans-serif;
}
.deck-container a {
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
}
.deck-container a:hover, .deck-container a:active {
outline: none;
}
.deck-container ul, .deck-container ol {
margin-left: 2em;
vertical-align: top;
}
.deck-container ol {
list-style-type: decimal;
}
.deck-container nav ul, .deck-container nav li {
margin: 0;
list-style: none;
list-style-image: none;
}
.deck-container small {
font-size: 85%;
}
.deck-container strong, .deck-container th {
font-weight: bold;
}
.deck-container td {
vertical-align: top;
}
.deck-container sub, .deck-container sup {
font-size: 75%;
line-height: 0;
position: relative;
}
.deck-container sup {
top: -0.5em;
}
.deck-container sub {
bottom: -0.25em;
}
.deck-container textarea {
overflow: auto;
}
.ie6 .deck-container legend, .ie7 .deck-container legend {
margin-left: -7px;
}
.deck-container input[type="radio"] {
vertical-align: text-bottom;
}
.deck-container input[type="checkbox"] {
vertical-align: bottom;
}
.ie7 .deck-container input[type="checkbox"] {
vertical-align: baseline;
}
.ie6 .deck-container input {
vertical-align: text-bottom;
}
.deck-container label, .deck-container input[type="button"], .deck-container input[type="submit"], .deck-container input[type="image"], .deck-container button {
cursor: pointer;
}
.deck-container button, .deck-container input, .deck-container select, .deck-container textarea {
margin: 0;
}
.deck-container input:invalid, .deck-container textarea:invalid {
border-radius: 1px;
-moz-box-shadow: 0px 0px 5px red;
-webkit-box-shadow: 0px 0px 5px red;
box-shadow: 0px 0px 5px red;
}
.deck-container input:invalid .no-boxshadow, .deck-container textarea:invalid .no-boxshadow {
background-color: #f0dddd;
}
.deck-container button {
width: auto;
overflow: visible;
}
.ie7 .deck-container img {
-ms-interpolation-mode: bicubic;
}
.deck-container, .deck-container select, .deck-container input, .deck-container textarea {
color: #444;
}
.deck-container a {
color: #607890;
}
.deck-container a:hover, .deck-container a:focus {
color: #036;
}
.deck-container a:link {
-webkit-tap-highlight-color: #fff;
}
.deck-container.deck-loading {
display: none;
}
.slide {
width: auto;
min-height: 100%;
position: relative;
}
.slide h1 {
font-size: 4.5em;
}
.slide h1, .slide .vcenter {
font-weight: bold;
text-align: center;
padding-top: 1em;
max-height: 100%;
}
.csstransforms .slide h1, .csstransforms .slide .vcenter {
padding: 0 48px;
position: absolute;
left: 0;
right: 0;
top: 50%;
-webkit-transform: translate(0, -50%);
-moz-transform: translate(0, -50%);
-ms-transform: translate(0, -50%);
-o-transform: translate(0, -50%);
transform: translate(0, -50%);
}
.slide .vcenter h1 {
position: relative;
top: auto;
padding: 0;
-webkit-transform: none;
-moz-transform: none;
-ms-transform: none;
-o-transform: none;
transform: none;
}
.slide h2 {
font-size: 2.25em;
font-weight: bold;
padding-top: .5em;
margin: 0 0 .66666em 0;
border-bottom: 3px solid #888;
}
.slide h3 {
font-size: 1.4375em;
font-weight: bold;
margin-bottom: .30435em;
}
.slide h4 {
font-size: 1.25em;
font-weight: bold;
margin-bottom: .25em;
}
.slide h5 {
font-size: 1.125em;
font-weight: bold;
margin-bottom: .2222em;
}
.slide h6 {
font-size: 1em;
font-weight: bold;
}
.slide img, .slide iframe, .slide video {
display: block;
max-width: 100%;
}
.slide video, .slide iframe, .slide img {
display: block;
margin: 0 auto;
}
.slide p, .slide blockquote, .slide iframe, .slide img, .slide ul, .slide ol, .slide pre, .slide video {
margin-bottom: 1em;
}
.slide pre {
white-space: pre;
white-space: pre-wrap;
word-wrap: break-word;
padding: 1em;
border: 1px solid #888;
}
.slide em {
font-style: italic;
}
.slide li {
padding: .25em 0;
vertical-align: middle;
}
.deck-before, .deck-previous, .deck-next, .deck-after {
position: absolute;
left: -999em;
top: -999em;
}
.deck-current {
z-index: 2;
}
.slide .slide {
visibility: hidden;
position: static;
min-height: 0;
}
.deck-child-current {
position: static;
z-index: 2;
}
.deck-child-current .slide {
visibility: hidden;
}
.deck-child-current .deck-previous, .deck-child-current .deck-before, .deck-child-current .deck-current {
visibility: visible;
}
.deck-container .goto-form {
position: absolute;
z-index: 3;
bottom: 10px;
left: 50%;
height: 1.75em;
margin: 0 0 0 -9.125em;
line-height: 1.75em;
padding: 0.625em;
display: none;
background: #ccc;
overflow: hidden;
}
.borderradius .deck-container .goto-form {
-webkit-border-radius: 10px;
-moz-border-radius: 10px;
border-radius: 10px;
}
.deck-container .goto-form label {
font-weight: bold;
}
.deck-container .goto-form label, .deck-container .goto-form input {
display: inline-block;
font-family: inherit;
}
.deck-goto .goto-form {
display: block;
}
#goto-slide {
width: 8.375em;
margin: 0 0.625em;
height: 1.4375em;
}
.deck-container .deck-permalink {
display: none;
position: absolute;
z-index: 4;
bottom: 30px;
right: 0;
width: 48px;
text-align: center;
}
.no-history .deck-container:hover .deck-permalink {
display: block;
}
.deck-menu .slide {
background: #eee;
position: relative;
left: 0;
top: 0;
visibility: visible;
cursor: pointer;
}
.no-csstransforms .deck-menu > .slide {
float: left;
width: 22%;
height: 22%;
min-height: 0;
margin: 1%;
font-size: 0.22em;
overflow: hidden;
padding: 0 0.5%;
}
.deck-menu iframe, .deck-menu img, .deck-menu video {
max-width: 100%;
}
.deck-menu .deck-current, .no-touch .deck-menu .slide:hover {
background: #ddf;
}
.deck-menu.deck-container:hover .deck-prev-link, .deck-menu.deck-container:hover .deck-next-link {
display: none;
}
.deck-container .deck-prev-link, .deck-container .deck-next-link {
display: none;
position: absolute;
z-index: 3;
top: 50%;
width: 32px;
height: 32px;
margin-top: -16px;
font-size: 20px;
font-weight: bold;
line-height: 32px;
vertical-align: middle;
text-align: center;
text-decoration: none;
color: #fff;
background: #888;
}
.borderradius .deck-container .deck-prev-link, .borderradius .deck-container .deck-next-link {
-webkit-border-radius: 16px;
-moz-border-radius: 16px;
border-radius: 16px;
}
.deck-container .deck-prev-link:hover, .deck-container .deck-prev-link:focus, .deck-container .deck-prev-link:active, .deck-container .deck-prev-link:visited, .deck-container .deck-next-link:hover, .deck-container .deck-next-link:focus, .deck-container .deck-next-link:active, .deck-container .deck-next-link:visited {
color: #fff;
}
.deck-container .deck-prev-link {
left: 8px;
}
.deck-container .deck-next-link {
right: 8px;
}
.deck-container:hover .deck-prev-link, .deck-container:hover .deck-next-link {
display: block;
}
.deck-container:hover .deck-prev-link.deck-nav-disabled, .touch .deck-container:hover .deck-prev-link, .deck-container:hover .deck-next-link.deck-nav-disabled, .touch .deck-container:hover .deck-next-link {
display: none;
}
.deck-container .deck-status {
position: absolute;
bottom: 10px;
right: 5px;
color: #888;
z-index: 3;
margin: 0;
}
body.deck-container .deck-status {
position: fixed;
}
` | web/view/themes/themefiles/deckjs.go | 0.598077 | 0.40987 | deckjs.go | starcoder |
package scene
import (
"github.com/mikee385/GolangRayTracer/color"
"github.com/mikee385/GolangRayTracer/geometry"
"math"
)
const Bias = 1.0E-4
type Scene struct {
backgroundColor color.ColorRGB
refractiveIndex float32
maxRayDepth uint
items []internalObject
lights []internalLight
}
func NewScene(backgroundColor color.ColorRGB, refractiveIndex float32, maxRayDepth uint) Scene {
return Scene{
backgroundColor: backgroundColor,
refractiveIndex: refractiveIndex,
maxRayDepth: maxRayDepth,
items: make([]internalObject, 0, 4),
lights: make([]internalLight, 0, 4),
}
}
func (scene *Scene) AddLightSource(light *SceneLight) {
var index = len(scene.items)
scene.items = append(scene.items, internalObject{
index: index,
object: light,
isLight: true,
})
scene.lights = append(scene.lights, internalLight{
index: index,
light: light,
})
}
func (scene *Scene) AddObject(object SceneObject) {
var index = len(scene.items)
scene.items = append(scene.items, internalObject{
index: index,
object: object,
isLight: false,
})
}
func (scene *Scene) Trace(ray geometry.Ray3D, depth uint) TraceResult {
var nearestDistance float32 = 0.0
var nearestItem internalObject
var nearestIntersection = false
// Find the nearest object that the ray intersects.
for _, item := range scene.items {
var currentDistance, hasIntersection = item.object.Intersect(ray)
if hasIntersection {
if !nearestIntersection || currentDistance < nearestDistance {
nearestDistance = currentDistance
nearestItem = item
nearestIntersection = true
}
}
}
// If the ray doesn't hit any objects, return the background color.
if !nearestIntersection {
return TraceResult{
Color: scene.backgroundColor,
Distance: 0.0,
}
}
// Get the point where the ray intersects the object.
var point = ray.Point(nearestDistance)
// If the ray intersects a light source, simply return the color of the light.
if nearestItem.isLight {
return TraceResult{
Color: nearestItem.object.Material(point).Color,
Distance: nearestDistance,
}
}
// Get the surface normal and color at the intersection point.
var normal = nearestItem.object.Normal(point)
var surfaceMaterial = nearestItem.object.Material(point)
var rayVector = ray.Direction.ToVector()
var normalVector = normal.ToVector()
// Calculate the color at the intersection point.
var totalRayColor = color.Black()
if depth < scene.maxRayDepth {
// TODO: Add Fresnel effects (?)
// Calculate the color from the reflected ray.
var reflection = surfaceMaterial.Reflection
if reflection > 0.0 {
var reflectedDirection = rayVector.Sub(normalVector.Scale(2.0 * geometry.Dot(rayVector, normalVector))).ToUnit()
var nearbyPoint = point.Translate_Dist(reflectedDirection, Bias)
var reflectedResult = scene.Trace(geometry.NewRay(nearbyPoint, reflectedDirection), depth+1)
totalRayColor = totalRayColor.Add(reflectedResult.Color.Scale(reflection).Mul(surfaceMaterial.Color))
}
// Calculate the color from the refracted ray.
var refraction = surfaceMaterial.Refraction
if refraction > 0.0 {
var n, cosI float32
if geometry.Dot(rayVector, normalVector) > 0.0 {
// Internal refraction
n = surfaceMaterial.RefractiveIndex / scene.refractiveIndex
cosI = -geometry.Dot(rayVector, normalVector.Neg())
} else {
// External refraction
n = scene.refractiveIndex / surfaceMaterial.RefractiveIndex
cosI = -geometry.Dot(rayVector, normalVector)
}
var cos2T = 1 - n*n*(1-cosI*cosI)
if cos2T > 0.0 {
var refractedDirection = rayVector.Scale(n).Add(normalVector.Scale(n*cosI - float32(math.Sqrt(float64(cos2T))))).ToUnit()
var nearbyPoint = point.Translate_Dist(refractedDirection, Bias)
var refractedResult = scene.Trace(geometry.NewRay(nearbyPoint, refractedDirection), depth+1)
// Beer's Law
var absorbance = surfaceMaterial.Color.Scale(0.15 * -refractedResult.Distance)
var transparency = color.New(
float32(math.Exp(float64(absorbance.Red))),
float32(math.Exp(float64(absorbance.Green))),
float32(math.Exp(float64(absorbance.Blue))))
totalRayColor = totalRayColor.Add(refractedResult.Color.Mul(transparency))
}
}
}
// Calculate the color from each light in the scene.
for _, lightItem := range scene.lights {
var light = lightItem.light
var lightColor = light.Material(point).Color
var vectorToLight = geometry.NewVector_BetweenPoints(point, light.Center())
var distanceToLight = vectorToLight.Magnitude()
var directionToLight = vectorToLight.ToUnit()
var directionToLightVector = directionToLight.ToVector()
// Calculate the shading from the light.
var shade float32 = 1.0
var nearbyPoint = point.Translate_Dist(directionToLight, Bias)
var shadowRay = geometry.NewRay(nearbyPoint, directionToLight)
for _, shadowItem := range scene.items {
if shadowItem.index != lightItem.index {
var shadowDistance, hasIntersection = shadowItem.object.Intersect(shadowRay)
if hasIntersection && shadowDistance < distanceToLight {
shade = 0.0
break
}
}
}
if shade != 0.0 {
// Calculate the diffusive lighting from the light.
var diffuse = surfaceMaterial.Diffuse
if diffuse > 0.0 {
var percentageOfLight = geometry.Dot(normalVector, directionToLightVector)
if percentageOfLight > 0.0 {
totalRayColor = totalRayColor.Add(lightColor.Scale(shade * diffuse * percentageOfLight).Mul(surfaceMaterial.Color))
}
}
// Calculate the specular lighting from the light.
var specular = surfaceMaterial.Specular
var shininess = surfaceMaterial.Shininess
if specular > 0.0 && shininess > 0 {
var reflectedDirection = directionToLightVector.Sub(normalVector.Scale(2.0 * geometry.Dot(directionToLightVector, normalVector))).ToUnit()
var percentageOfLight = geometry.Dot(rayVector, reflectedDirection.ToVector())
if percentageOfLight > 0.0 {
totalRayColor = totalRayColor.Add(lightColor.Scale(shade * specular * float32(math.Pow(float64(percentageOfLight), float64(shininess)))))
}
}
}
}
return TraceResult{
Color: totalRayColor,
Distance: nearestDistance,
}
}
type internalObject struct {
index int
object SceneObject
isLight bool
}
type internalLight struct {
index int
light *SceneLight
}
type TraceResult struct {
Color color.ColorRGB
Distance float32
} | scene/Scene.go | 0.722527 | 0.617541 | Scene.go | starcoder |
package arraymap
import (
"encoding/json"
"sync"
)
type ArrayMap struct {
positions map[interface{}]int
keys []interface{}
values []interface{}
rwl sync.RWMutex
}
func NewArrayMap() *ArrayMap {
return &ArrayMap{
positions: make(map[interface{}]int),
keys: make([]interface{}, 0),
values: make([]interface{}, 0),
}
}
func (a *ArrayMap) Len() int {
return len(a.keys)
}
func (a *ArrayMap) Add(key, value interface{}) (oldValue interface{}) {
if p, existed := a.positions[key]; existed {
oldValue = a.values[p]
a.values[p] = value
return
}
a.keys = append(a.keys, key)
a.values = append(a.values, value)
a.positions[key] = len(a.keys) - 1
return
}
func (a *ArrayMap) GetKeyAt(i int) interface{} {
return a.keys[i]
}
func (a *ArrayMap) GetValueAt(i int) interface{} {
return a.values[i]
}
func (a *ArrayMap) GetValueOf(key interface{}) interface{} {
return a.values[a.positions[key]]
}
func (a *ArrayMap) Has(key interface{}) bool {
_, existed := a.positions[key]
return existed
}
func (a *ArrayMap) removeAt(i int) {
removingKey, lastKey := a.keys[i], a.keys[len(a.keys)-1]
// Swap the removing item and the last.
a.keys[i], a.keys[len(a.keys)-1] = a.keys[len(a.keys)-1], a.keys[i]
a.values[i], a.values[len(a.values)-1] = a.values[len(a.values)-1], a.values[i]
// Update the position.
a.positions[lastKey] = i
// Removing.
a.keys = a.keys[:len(a.keys)-1]
a.values = a.values[:len(a.values)-1]
delete(a.positions, removingKey)
}
func (a *ArrayMap) Remove(key interface{}) bool {
if p, exisited := a.positions[key]; exisited {
a.removeAt(p)
return true
}
return false
}
func (a *ArrayMap) RemoveAll() {
for k := range a.positions {
delete(a.positions, k)
}
a.keys = a.keys[:0]
a.values = a.values[:0]
}
func (a *ArrayMap) Lock() {
a.rwl.Lock()
return
}
func (a *ArrayMap) Unlock() {
a.rwl.Unlock()
return
}
func (a *ArrayMap) RLock() {
a.rwl.RLock()
return
}
func (a *ArrayMap) RUnlock() {
a.rwl.RUnlock()
return
}
func (a *ArrayMap) Values() []interface{} {
return a.values
}
func (a *ArrayMap) MarshalJSON() ([]byte, error) {
return json.Marshal(a.values)
} | arraymap/arraymap.go | 0.535341 | 0.430866 | arraymap.go | starcoder |
package webgraph
import "errors"
// Key: used for conversion between key and index into an array
// Incoming: key of the source node where the arrow comes from and it's weight
type Vertex struct {
Key string
Incoming map[string]float64
}
// To make a graph first we gradually fill Keys by using AddNode. Keys hashes
// strings and gives us integer indexes. Then when we have all of them Fixate
// allocates Nodes. Each node is a Vertex which knows its original Key and all
// its neighbourly Incoming nodes.
// After Fixate we use AddArrow to add directed connections. The Incoming part
// of each node is like an adjency list. Elements of OutgoingCount will be
// incremented when an outgoing arrow is added by AddArrow.
// CalculateDefaultWeights will then fixate the values in OutgoingCount.
type Graph struct {
Keys map[string]int
Nodes []Vertex
OutgoingCount []int
}
// New returns a webgraph. Use expectedNodeCount to hint at the total no. of
// nodes it will hold.
func New(expectedNodeCount int) (g *Graph) {
g = new(Graph)
g.Keys = make(map[string]int, expectedNodeCount)
return
}
// AddNode can only be used before Fixate. It checks if it already has a
// node with key if so it returns an error, if not it adds it.
func (g *Graph) AddNode(key string) error {
if _, present := g.Keys[key]; present {
return errors.New("AddNode: Node already present")
}
g.Keys[key] = len(g.Keys)
return nil
}
// Fixate uses the Keys map to learn the length necessary for Nodes and
// fills in the Key part of each Vertex in the Nodes slice
func (g *Graph) Fixate() {
l := len(g.Keys)
g.OutgoingCount = make([]int, l)
g.Nodes = make([]Vertex, l)
for key, index := range g.Keys {
g.Nodes[index].Key = key
g.Nodes[index].Incoming = make(map[string]float64)
}
}
// Returns the number of nodes added before the call to Fixate
func (g *Graph) FixedLength() int {
return len(g.Nodes)
}
// After Fixate one can use this function to translate string keys to indices
func (g *Graph) Key2idx(key string) (idx int) {
idx = g.Keys[key]
return
}
// After Fixate one can use this function to translate indices to string keys
func (g *Graph) Idx2key(idx int) (key string) {
key = g.Nodes[idx].Key
return
}
// AddArrow adds an arrow from 'k2' to 'k1' if they are both present nodes
// and if the arrow doesn't already exist
func (g *Graph) AddArrow(k2, k1 string) error {
k1Idx, k1Present := g.Keys[k1]
if !k1Present {
return errors.New("AddArrow: Could not find 'k1' node")
}
k2Idx, k2Present := g.Keys[k2]
if !k2Present {
return errors.New("AddArrow: Could not find 'k2' node")
}
if _, arrowPresent := g.Nodes[k1Idx].Incoming[k2]; arrowPresent {
return errors.New("AddArrow: Arrow already in place")
}
g.Nodes[k1Idx].Incoming[k2] = 0.0
g.OutgoingCount[k2Idx]++
return nil
}
// In a default webgraph the PageRank is equally devided over the outgoing
// links. So one of n outgoing arrows has weight 1/n.
func (g *Graph) CalculateDefaultWeights() error {
tmp := make([]int, len(g.OutgoingCount)) // we need a copy of OutgoingCount
if len(g.OutgoingCount) != copy(tmp, g.OutgoingCount) {
return errors.New("CalculateDefaultWeights: Could not copy all of OutgoingCount")
}
// distribute PageRank to all nodes if there are no outgoing arrows
for j := range tmp {
if tmp[j] == 0 {
// and add arrows to all nodes from g.Idx2key(j)
from := g.Idx2key(j)
for to := range g.Keys {
g.AddArrow(from, to)
}
}
}
// set the weight
for i := range g.Nodes {
for k := range g.Nodes[i].Incoming {
g.Nodes[i].Incoming[k] = 1.0 / (float64(g.OutgoingCount[g.Key2idx(k)]))
}
}
return nil
}
// takes a weight w and dampens it
func (g *Graph) Dampen(w float64) float64 {
const d float64 = 0.85
e := (1.0 - d) / (float64(g.FixedLength()))
return d*w + e
}
// IncomingWeightsVector gives a row of the Google matrix.
// Takes a nodekey 'key' and finds the corresponding incoming node weights,
// which it puts in the correct positions of an otherwise empty vector.
// Gives an error if 'key' has no node associated with it.
func (g *Graph) IncomingWeightsVector(key string) (wvec []float64, err error) {
if idx, present := g.Keys[key]; present {
wvec = make([]float64, g.FixedLength())
for k, w := range g.Nodes[idx].Incoming {
wvec[g.Keys[k]] = w
}
for i := range wvec {
wvec[i] = g.Dampen(wvec[i])
}
} else {
err = errors.New("IncomingWeightsVector: Could not find node with given key")
}
return
}
// Multiply treats the webgraph as a square matrix which will be right multiplied
// by a vector 'invec'. It returns an error when invec doesn't correspond to
// the size of the square matrix (g.FixedLength()).
func (g *Graph) Multiply(invec []float64) (outvec []float64, err error) {
if len(invec) != g.FixedLength() {
err = errors.New("Multiply: Input vector not the right size")
return
}
outvec = make([]float64, g.FixedLength())
for i := 0; i < len(outvec); i++ {
weights, werr := g.IncomingWeightsVector(g.Idx2key(i)) // row of the matrix
if werr != nil {
outvec = nil
err = werr
return
}
outvec[i] = 0.0
for j := 0; j < len(invec); j++ {
outvec[i] += invec[j] * weights[j]
}
}
return
} | pkg/internal/webrank/webgraph.go | 0.691393 | 0.566678 | webgraph.go | starcoder |
package solver2
import (
"errors"
"github.com/Spi1y/tsp-solver/solver2/iterator"
"github.com/Spi1y/tsp-solver/solver2/tasks"
"github.com/Spi1y/tsp-solver/solver2/types"
)
// Solver is a TSP solver object. It is used to set a distance matrix and start
// calculations
type Solver struct {
RecursiveThreshold types.Index
// Distance matrix
matrix [][]types.Distance
// Iterator (see package docs)
iterator *iterator.Iterator
// Tasks queue
taskQueue *tasks.Queue
// Temporary buffer to optimize normalization
buffer []types.Distance
// Current best solution
bestSolution []types.Index
bestSolutionDistance types.Distance
}
// Solve solves the TSP problem with a given distance matrix.
func (s *Solver) Solve(m [][]types.Distance) ([]types.Index, types.Distance, error) {
size := len(m)
if size == 0 {
return nil, 0, errors.New("Distance matrix is empty")
}
for i := range m {
if len(m[i]) != size {
return nil, 0, errors.New("Distance matrix is not square")
}
}
s.matrix = m
s.bestSolution = []types.Index{}
s.bestSolutionDistance = 0
s.buffer = make([]types.Distance, size)
s.taskQueue = tasks.NewHeapQueue()
s.iterator = &iterator.Iterator{}
s.iterator.Init(types.Index(size))
newTasks := make([]tasks.Task, size)
rootTask := tasks.Task{
Path: []types.Index{0},
Distance: 0,
Estimate: 0,
}
s.taskQueue.Insert([]tasks.Task{rootTask})
for task, err := s.taskQueue.PopFirst(); err == nil; task, err = s.taskQueue.PopFirst() {
count, err := s.solveTask(task, newTasks)
if err != nil {
return nil, 0, err
}
s.taskQueue.Insert(newTasks[:count])
}
return s.bestSolution, s.bestSolutionDistance, nil
}
func (s *Solver) solveTask(t tasks.Task, newTasks []tasks.Task) (int, error) {
// TODO - try aggressive approach with full path first
err := s.iterator.SetPath(t.Path)
if err != nil {
return 0, err
}
nextNodes := s.iterator.NodesToVisit()
rows := s.iterator.RowsToIterate()
currNode := t.Path[len(t.Path)-1]
nodesLeft := len(nextNodes)
if nodesLeft <= int(s.RecursiveThreshold) {
tailpath, taildistance := s.solveRecursively(currNode, nextNodes)
path := make([]types.Index, len(t.Path), len(t.Path)+len(tailpath))
copy(path, t.Path)
path = append(path, tailpath...)
distance := t.Distance + taildistance
s.newSolutionFound(path, distance)
newTasks = newTasks[:0]
return 0, nil
}
if nodesLeft == 1 {
// Final node, calculating return distance to root node
// and notifying solver about found solution
finalNode := nextNodes[0]
path := make([]types.Index, len(t.Path), len(t.Path)+2)
copy(path, t.Path)
path = append(path, finalNode, 0)
distance := t.Distance + s.matrix[currNode][finalNode] + s.matrix[finalNode][0]
s.newSolutionFound(path, distance)
newTasks = newTasks[:0]
return 0, nil
}
newPathLen := len(t.Path) + 1
pathsSlice := make([]types.Index, nodesLeft*newPathLen)
for i, nextNode := range nextNodes {
var estimate types.Distance
cols, err := s.iterator.ColsToIterate(nextNode)
if err != nil {
return 0, err
}
for rowIndex, row := range rows {
rowSlice := s.matrix[row]
min := rowSlice[0]
var val types.Distance
// First pass to calculate row minimum
for _, col := range cols {
if row == col {
continue
}
val := rowSlice[col]
if min > val {
min = val
}
}
estimate += min
// Second pass to update column minimums in the buffer
if rowIndex == 0 {
// Fast path for a first row
for colIndex, col := range cols {
if row == col {
continue
}
// First row, minimum values are set without comparison
s.buffer[colIndex] = rowSlice[col] - min
}
continue
}
// Normal path for other rows
for colIndex, col := range cols {
if row == col {
continue
}
val = rowSlice[col] - min
// Values are updated as needed
if s.buffer[colIndex] > val {
s.buffer[colIndex] = val
}
}
}
// Final pass on buffer to sum column minimums
for colIndex := range cols {
estimate += s.buffer[colIndex]
}
path := pathsSlice[i*newPathLen : (i+1)*newPathLen]
copy(path, t.Path)
path[newPathLen-1] = nextNode
distance := t.Distance + s.matrix[currNode][nextNode]
newTasks[i].Path = path
newTasks[i].Distance = distance
newTasks[i].Estimate = distance + estimate
}
return nodesLeft, nil
}
func (s *Solver) newSolutionFound(path []types.Index, distance types.Distance) {
if (s.bestSolutionDistance != 0) && (distance >= s.bestSolutionDistance) {
return
}
s.bestSolution = path
s.bestSolutionDistance = distance
s.taskQueue.TrimTail(distance)
} | solver2/solver.go | 0.64646 | 0.490175 | solver.go | starcoder |
package gl
import (
"fmt"
"image"
"image/draw"
"strings"
gl "github.com/go-gl/gl/v3.1/gles2"
fyne "github.com/wrzfeijianshen/fyne2"
"github.com/wrzfeijianshen/fyne2/canvas"
"github.com/wrzfeijianshen/fyne2/theme"
)
// Buffer represents a GL buffer
type Buffer uint32
// Program represents a compiled GL program
type Program uint32
// Texture represents an uploaded GL texture
type Texture uint32
// NoTexture is the zero value for a Texture
var NoTexture = Texture(0)
var textureFilterToGL = []int32{gl.LINEAR, gl.NEAREST}
func newTexture(textureFilter canvas.ImageScale) Texture {
var texture uint32
if int(textureFilter) >= len(textureFilterToGL) {
fyne.LogError(fmt.Sprintf("Invalid canvas.ImageScale value (%d), using canvas.ImageScaleSmooth as default value", textureFilter), nil)
textureFilter = canvas.ImageScaleSmooth
}
gl.GenTextures(1, &texture)
logError()
gl.ActiveTexture(gl.TEXTURE0)
gl.BindTexture(gl.TEXTURE_2D, texture)
logError()
gl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, textureFilterToGL[textureFilter])
gl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, textureFilterToGL[textureFilter])
gl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.TexParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
logError()
return Texture(texture)
}
func (p *glPainter) imgToTexture(img image.Image, textureFilter canvas.ImageScale) Texture {
switch i := img.(type) {
case *image.Uniform:
texture := newTexture(textureFilter)
r, g, b, a := i.RGBA()
r8, g8, b8, a8 := uint8(r>>8), uint8(g>>8), uint8(b>>8), uint8(a>>8)
data := []uint8{r8, g8, b8, a8}
gl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA,
gl.UNSIGNED_BYTE, gl.Ptr(data))
logError()
return texture
case *image.RGBA:
if len(i.Pix) == 0 { // image is empty
return 0
}
texture := newTexture(textureFilter)
gl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, int32(i.Rect.Size().X), int32(i.Rect.Size().Y),
0, gl.RGBA, gl.UNSIGNED_BYTE, gl.Ptr(i.Pix))
logError()
return texture
default:
rgba := image.NewRGBA(image.Rect(0, 0, img.Bounds().Dx(), img.Bounds().Dy()))
draw.Draw(rgba, rgba.Rect, img, image.ZP, draw.Over)
return p.imgToTexture(rgba, textureFilter)
}
}
func (p *glPainter) SetOutputSize(width, height int) {
gl.Viewport(0, 0, int32(width), int32(height))
logError()
}
func (p *glPainter) freeTexture(obj fyne.CanvasObject) {
texture := textures[obj]
if texture != 0 {
tex := uint32(texture)
gl.DeleteTextures(1, &tex)
logError()
delete(textures, obj)
}
}
func glInit() {
err := gl.Init()
if err != nil {
fyne.LogError("failed to initialise OpenGL", err)
return
}
gl.Disable(gl.DEPTH_TEST)
gl.Enable(gl.BLEND)
logError()
}
func compileShader(source string, shaderType uint32) (uint32, error) {
shader := gl.CreateShader(shaderType)
csources, free := gl.Strs(source)
gl.ShaderSource(shader, 1, csources, nil)
logError()
free()
gl.CompileShader(shader)
logError()
var status int32
gl.GetShaderiv(shader, gl.COMPILE_STATUS, &status)
if status == gl.FALSE {
var logLength int32
gl.GetShaderiv(shader, gl.INFO_LOG_LENGTH, &logLength)
info := strings.Repeat("\x00", int(logLength+1))
gl.GetShaderInfoLog(shader, logLength, nil, gl.Str(info))
return 0, fmt.Errorf("failed to compile %v: %v", source, info)
}
return shader, nil
}
const (
vertexShaderSource = `
#version 100
attribute vec3 vert;
attribute vec2 vertTexCoord;
varying highp vec2 fragTexCoord;
void main() {
fragTexCoord = vertTexCoord;
gl_Position = vec4(vert, 1);
}
` + "\x00"
fragmentShaderSource = `
#version 100
uniform sampler2D tex;
varying highp vec2 fragTexCoord;
void main() {
gl_FragColor = texture2D(tex, fragTexCoord);
}
` + "\x00"
)
func (p *glPainter) Init() {
vertexShader, err := compileShader(vertexShaderSource, gl.VERTEX_SHADER)
if err != nil {
panic(err)
}
fragmentShader, err := compileShader(fragmentShaderSource, gl.FRAGMENT_SHADER)
if err != nil {
panic(err)
}
prog := gl.CreateProgram()
gl.AttachShader(prog, vertexShader)
gl.AttachShader(prog, fragmentShader)
gl.LinkProgram(prog)
logError()
p.program = Program(prog)
}
func (p *glPainter) glClearBuffer() {
gl.UseProgram(uint32(p.program))
logError()
r, g, b, a := theme.BackgroundColor().RGBA()
max16bit := float32(255 * 255)
gl.ClearColor(float32(r)/max16bit, float32(g)/max16bit, float32(b)/max16bit, float32(a)/max16bit)
gl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)
logError()
}
func (p *glPainter) glScissorOpen(x, y, w, h int32) {
gl.Scissor(x, y, w, h)
gl.Enable(gl.SCISSOR_TEST)
logError()
}
func (p *glPainter) glScissorClose() {
gl.Disable(gl.SCISSOR_TEST)
logError()
}
func (p *glPainter) glCreateBuffer(points []float32) Buffer {
var vbo uint32
gl.GenBuffers(1, &vbo)
logError()
gl.BindBuffer(gl.ARRAY_BUFFER, vbo)
logError()
gl.BufferData(gl.ARRAY_BUFFER, 4*len(points), gl.Ptr(points), gl.STATIC_DRAW)
logError()
vertAttrib := uint32(gl.GetAttribLocation(uint32(p.program), gl.Str("vert\x00")))
gl.EnableVertexAttribArray(vertAttrib)
gl.VertexAttribPointer(vertAttrib, 3, gl.FLOAT, false, 5*4, gl.PtrOffset(0))
logError()
texCoordAttrib := uint32(gl.GetAttribLocation(uint32(p.program), gl.Str("vertTexCoord\x00")))
gl.EnableVertexAttribArray(texCoordAttrib)
gl.VertexAttribPointer(texCoordAttrib, 2, gl.FLOAT, false, 5*4, gl.PtrOffset(3*4))
logError()
return Buffer(vbo)
}
func (p *glPainter) glFreeBuffer(vbo Buffer) {
gl.BindBuffer(gl.ARRAY_BUFFER, 0)
logError()
buf := uint32(vbo)
gl.DeleteBuffers(1, &buf)
logError()
}
func (p *glPainter) glDrawTexture(texture Texture, alpha float32) {
// here we have to choose between blending the image alpha or fading it...
// TODO find a way to support both
if alpha != 1.0 {
gl.BlendColor(0, 0, 0, alpha)
gl.BlendFunc(gl.CONSTANT_ALPHA, gl.ONE_MINUS_CONSTANT_ALPHA)
} else {
gl.BlendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA)
}
logError()
gl.ActiveTexture(gl.TEXTURE0)
gl.BindTexture(gl.TEXTURE_2D, uint32(texture))
logError()
gl.DrawArrays(gl.TRIANGLE_STRIP, 0, 4)
logError()
}
func (p *glPainter) glCapture(width, height int32, pixels *[]uint8) {
gl.ReadBuffer(gl.FRONT)
logError()
gl.ReadPixels(0, 0, int32(width), int32(height), gl.RGBA, gl.UNSIGNED_BYTE, gl.Ptr(*pixels))
logError()
}
func logError() {
logGLError(gl.GetError())
} | sdk/painter/gl/gl_es.go | 0.695028 | 0.406509 | gl_es.go | starcoder |
package phass
import "fmt"
/**
* Equation
*/
// Equation represents the configuration for an equation used in a given
// assessment. This is represented by input parameters used in equation and a
// configuration.
type Equation struct {
in map[string]float64
conf *EquationConf
}
// NewEquation returnas a Equationer interface. It receives input parameters
// and configuration for this equation.
func NewEquation(in InParams, conf *EquationConf) Equationer {
return &Equation{in: in, conf: conf}
}
func (e *Equation) String() string {
return e.conf.Name
}
// In verifies if a given input parameter was provided and it's value.
func (e *Equation) In(k string) (float64, bool) {
v, ok := e.in[k]
return v, ok
}
// Validate execute provided validators and returns boolean indicating if it's
// valid or not, and any error associated.
func (e *Equation) Validate() (bool, error) {
for _, f := range e.conf.Validators {
if r, err := f(e); err != nil {
return r, err
}
}
return true, nil
}
// Calc returns this equation value, and errors if the equation can't be
// calculated.
func (e *Equation) Calc() (float64, error) {
if r, e := e.Validate(); !r {
return 0.0, e
}
return e.conf.Calc(e), nil
}
// EquationConf represents a configuration for a given equation. It's
// represented by a name, a function that extract input parameters, a slice of
// validators methods, and a calculatator function that represents an equation.
type EquationConf struct {
Name string
Extract Extractor
Validators []Validator
Calc Calculator
}
// NewEquationConf returns an EquationConf pointer, that receives a name, an
// extractor function, a slice of validators, and a calcutator function.
func NewEquationConf(name string, e Extractor, v []Validator, eq Calculator) *EquationConf {
return &EquationConf{
Name: name,
Extract: e,
Validators: v,
Calc: eq,
}
}
type (
// InParams is a map of string to float values, represent input params for
// an equation.
InParams map[string]float64
// Extractor is a function to extract input parameters, receives an
// interface and returns input params for an equation.
Extractor func(interface{}) InParams
// Validator is a function to validate input parameters provided in an
// equation, receives an equation pointer and returns a boolean and error.
Validator func(*Equation) (bool, error)
// Calculator is a function calculate the value based in a equation.
Calculator func(*Equation) float64
)
// Equationer is an interface that wraps an equation.
// In function is used to verify a given input parameter.
// Validate function is used to ensure input parameters are valid.
// Calc function is used to return this equation value.
type Equationer interface {
In(string) (float64, bool)
Validate() (bool, error)
Calc() (float64, error)
}
/**
* Validation
*/
// ValidateGender returns a Validator function, that ensure gender is equal to
// the one expected.
func ValidateGender(expect int) Validator {
return func(e *Equation) (bool, error) {
return validateGender(expect, e)
}
}
// validateGender ensure that gender is set and matches the expected value.
func validateGender(expect int, e *Equation) (bool, error) {
if g, ok := e.In("gender"); !ok {
return false, fmt.Errorf("Missing gender")
} else if int(g) != expect {
return false, fmt.Errorf("Valid for gender %d", expect)
}
return true, nil
}
// ValidateAge returns a Validator function, that ensure a given age is
// between lower and upper limits.
func ValidateAge(lower, upper float64) Validator {
return func(e *Equation) (bool, error) {
return validateAge(lower, upper, e)
}
}
// validateAge ensure that age is set and is between limits.
func validateAge(lower, upper float64, e *Equation) (bool, error) {
if age, ok := e.In("age"); !ok {
return false, fmt.Errorf("Missing age measure")
} else if age < lower || age > upper {
return false, fmt.Errorf("Valid for ages between %.0f and %.0f", lower, upper)
}
return true, nil
}
// ValidateMeasures returns a Validator function, that ensure a list of
// expected measures are available.
func ValidateMeasures(expect []string) Validator {
return func(e *Equation) (bool, error) {
return validateMeasures(expect, e)
}
}
// validateMeasures ensure that a list of measures are available.
func validateMeasures(expect []string, e *Equation) (bool, error) {
for _, k := range expect {
if _, ok := e.In(k); !ok {
return false, fmt.Errorf("Missing %s measure", k)
}
}
return true, nil
}
/**
* Classification
*/
// Classifier is used to classify a given value, with base in classes and a
// string mapper.
// classes is used to verify in which classification bin this value is
// contained, and mapper is used to convert the classification bin to a string.
func Classifier(value float64, classes map[int][2]float64, mapper map[int]string) string {
cid := classifierIndex(value, classes)
class, ok := mapper[cid]
if !ok {
return "No classification."
}
return class
}
// classifierIndex returns the classification bin index containing this value.
// Returns a positive integer representing the index where this value is
// classified, or -1 when no classification bin contains the provided value.
func classifierIndex(value float64, classes map[int][2]float64) int {
cid := -1
for index, limits := range classes {
if value >= limits[0] && value < limits[1] {
cid = index
}
}
return cid
} | common.go | 0.901897 | 0.621483 | common.go | starcoder |
package block
type emptyBlock struct {
meta Metadata
}
// NewEmptyBlock creates an empty block with the given metadata.
func NewEmptyBlock(meta Metadata) Block {
return &emptyBlock{meta: meta}
}
func (b *emptyBlock) Close() error { return nil }
func (b *emptyBlock) Info() BlockInfo {
return NewBlockInfo(BlockEmpty)
}
func (b *emptyBlock) WithMetadata(meta Metadata, _ []SeriesMeta) (Block, error) {
return NewEmptyBlock(meta), nil
}
func (b *emptyBlock) Meta() Metadata {
return b.meta
}
func (b *emptyBlock) StepIter() (StepIter, error) {
return &emptyStepIter{steps: b.meta.Bounds.Steps()}, nil
}
type emptyStepIter struct {
steps int
}
func (it *emptyStepIter) Close() {}
func (it *emptyStepIter) Err() error { return nil }
func (it *emptyStepIter) StepCount() int { return it.steps }
func (it *emptyStepIter) SeriesMeta() []SeriesMeta { return []SeriesMeta{} }
func (it *emptyStepIter) Next() bool { return false }
func (it *emptyStepIter) Current() Step { return nil }
func (b *emptyBlock) SeriesIter() (SeriesIter, error) {
return &emptySeriesIter{}, nil
}
type emptySeriesIter struct{}
func (it *emptySeriesIter) Close() {}
func (it *emptySeriesIter) Err() error { return nil }
func (it *emptySeriesIter) SeriesCount() int { return 0 }
func (it *emptySeriesIter) SeriesMeta() []SeriesMeta { return []SeriesMeta{} }
func (it *emptySeriesIter) Next() bool { return false }
func (it *emptySeriesIter) Current() Series { return Series{} }
// Unconsolidated returns the unconsolidated version for the block
func (b *emptyBlock) Unconsolidated() (UnconsolidatedBlock, error) {
return &ucEmptyBlock{
meta: b.meta,
}, nil
}
type ucEmptyBlock struct {
meta Metadata
}
func (b *ucEmptyBlock) Close() error { return nil }
func (b *ucEmptyBlock) WithMetadata(
meta Metadata, _ []SeriesMeta) (UnconsolidatedBlock, error) {
return &ucEmptyBlock{
meta: meta,
}, nil
}
func (b *ucEmptyBlock) Meta() Metadata {
return b.meta
}
func (b *ucEmptyBlock) Consolidate() (Block, error) {
return NewEmptyBlock(b.meta), nil
}
func (b *ucEmptyBlock) StepIter() (UnconsolidatedStepIter, error) {
return &ucEmptyStepIter{steps: b.meta.Bounds.Steps()}, nil
}
type ucEmptyStepIter struct{ steps int }
func (it *ucEmptyStepIter) Close() {}
func (it *ucEmptyStepIter) Err() error { return nil }
func (it *ucEmptyStepIter) StepCount() int { return it.steps }
func (it *ucEmptyStepIter) SeriesMeta() []SeriesMeta { return []SeriesMeta{} }
func (it *ucEmptyStepIter) Next() bool { return false }
func (it *ucEmptyStepIter) Current() UnconsolidatedStep { return nil }
func (b *ucEmptyBlock) SeriesIter() (UnconsolidatedSeriesIter, error) {
return &ucEmptySeriesIter{}, nil
}
type ucEmptySeriesIter struct{}
func (it *ucEmptySeriesIter) Close() {}
func (it *ucEmptySeriesIter) Err() error { return nil }
func (it *ucEmptySeriesIter) SeriesCount() int { return 0 }
func (it *ucEmptySeriesIter) SeriesMeta() []SeriesMeta { return []SeriesMeta{} }
func (it *ucEmptySeriesIter) Next() bool { return false }
func (it *ucEmptySeriesIter) Current() UnconsolidatedSeries { return UnconsolidatedSeries{} } | src/query/block/empty.go | 0.818628 | 0.62019 | empty.go | starcoder |
package day17
// Stores the points of all active cubes
type CubeMap map[Point]bool
type Point interface {
getAdjacent() []Point
}
type Point3D struct {
x, y, z int
}
type Point4D struct {
x, y, z, w int
}
// Runs the boot process for the input in 3D and returns the number of active cubes after boot is complete.
func RunBootProcess3D(input []string) int {
cubeMap := IntialiseCubeMap3D(input)
return runBootProcess(cubeMap)
}
// Runs the boot process for the input in 4D and returns the number of active cubes after boot is complete.
func RunBootProcess4D(input []string) int {
cubeMap := IntialiseCubeMap4D(input)
return runBootProcess(cubeMap)
}
func runBootProcess(cubeMap CubeMap) int {
for i := 1; i <= 6; i++ {
cubeMap = runCycle(cubeMap)
}
return len(cubeMap)
}
// Run a single cycle on a CubeMap
func runCycle(cubeMap CubeMap) CubeMap {
newMap := make(CubeMap)
// Caches non active points we've already calculated
activeNeighbours := make(map[Point]int)
for point := range cubeMap {
for _, adjacentPoint := range point.getAdjacent() {
activeNeighbours[adjacentPoint]++
}
}
for point, activeAdjacent := range activeNeighbours {
// Work out if cube is active in next step
if cubeMap[point] && (activeAdjacent == 2 || activeAdjacent == 3) {
newMap[point] = true
} else if !cubeMap[point] && activeAdjacent == 3 {
newMap[point] = true
}
}
return newMap
}
func (p Point3D) getAdjacent() []Point {
adjacent := make([]Point, 0)
for i := p.x - 1; i <= p.x+1; i++ {
for j := p.y - 1; j <= p.y+1; j++ {
for k := p.z - 1; k <= p.z+1; k++ {
point := Point3D{i, j, k}
if p != point {
adjacent = append(adjacent, point)
}
}
}
}
return adjacent
}
func (p Point4D) getAdjacent() []Point {
adjacent := make([]Point, 0)
for i := p.x - 1; i <= p.x+1; i++ {
for j := p.y - 1; j <= p.y+1; j++ {
for k := p.z - 1; k <= p.z+1; k++ {
for l := p.w - 1; l <= p.w+1; l++ {
point := Point4D{i, j, k, l}
if p != point {
adjacent = append(adjacent, point)
}
}
}
}
}
return adjacent
}
func IntialiseCubeMap3D(input []string) CubeMap {
cubeMap := make(CubeMap)
for x, rowString := range input {
for y, value := range rowString {
if value == '#' {
cubeMap[Point3D{x, y, 0}] = true
}
}
}
return cubeMap
}
func IntialiseCubeMap4D(input []string) CubeMap {
cubeMap := make(CubeMap)
for x, rowString := range input {
for y, value := range rowString {
if value == '#' {
cubeMap[Point4D{x, y, 0, 0}] = true
}
}
}
return cubeMap
} | day17/day17.go | 0.798737 | 0.468304 | day17.go | starcoder |
package heatMap
import (
"fmt"
"sort"
"github.com/go-graphite/carbonapi/expr/helper"
"github.com/go-graphite/carbonapi/expr/interfaces"
"github.com/go-graphite/carbonapi/expr/types"
"github.com/go-graphite/carbonapi/pkg/parser"
)
type heatMap struct {
interfaces.FunctionBase
}
func GetOrder() interfaces.Order {
return interfaces.Any
}
func New(_ string) []interfaces.FunctionMetadata {
return []interfaces.FunctionMetadata{{
F: &heatMap{},
Name: "heatMap",
}}
}
func (f *heatMap) Description() map[string]types.FunctionDescription {
return map[string]types.FunctionDescription{
"heatMap": {
Description: "Assume seriesList has values N values in total: (a[1], a[2], ..., a[N]). Then heatMap(seriesList) has N-1 values in total: (a[2] - a[1], a[3] - a[2], ..., a[N] - a[N-1]).",
Function: "heatMap(seriesList)",
Group: "Transform",
Module: "graphite.render.functions",
Name: "heatMap",
Params: []types.FunctionParam{
{
Name: "seriesList",
Required: true,
Type: types.SeriesList,
},
},
Proxied: true,
},
}
}
func (f *heatMap) Do(e parser.Expr, from, until int32, values map[parser.MetricRequest][]*types.MetricData) (resultData []*types.MetricData, resultError error) {
series, err := helper.GetSeriesArg(e.Args()[0], from, until, values)
if err != nil {
return nil, err
}
series = f.sortMetricData(series)
seriesQty := len(series)
result := make([]*types.MetricData, 0, seriesQty-1)
for i := 1; i < seriesQty; i++ {
curr, prev := series[i], series[i-1]
if err := f.validateNeighbourSeries(curr, prev); err != nil {
return nil, err
}
pointsQty := len(curr.Values)
r := &types.MetricData{FetchResponse: types.FetchResponse{
Name: fmt.Sprintf("heatMap(%s,%s)", curr.Name, prev.Name),
IsAbsent: make([]bool, pointsQty),
Values: make([]float64, pointsQty),
StartTime: curr.StartTime,
StopTime: curr.StopTime,
StepTime: curr.StepTime,
}}
for j := 0; j < pointsQty; j++ {
r.IsAbsent[j] = curr.IsAbsent[j] || prev.IsAbsent[j]
if !r.IsAbsent[j] {
r.Values[j] = curr.Values[j] - prev.Values[j]
}
}
result = append(result, r)
}
return result, nil
}
// sortMetricData returns *types.MetricData list sorted by sum of the first values
func (f *heatMap) sortMetricData(list []*types.MetricData) []*types.MetricData {
// take 5 first not null values
const points = 5
// mate series with its weight (sum of first values)
type metricDataWeighted struct {
data *types.MetricData
weight float64
}
seriesQty := len(list)
if seriesQty < 2 {
return list
}
listWeighted := make([]metricDataWeighted, seriesQty)
for j := 0; j < seriesQty; j++ {
listWeighted[j].data = list[j]
}
pointsFound := 0
valuesQty := len(list[0].Values)
for i := 0; i < valuesQty && pointsFound < points; i++ {
// make sure that each series has current point not null
absent := false
for j := 0; j < seriesQty && !absent; j++ {
absent = list[j].IsAbsent[i]
}
if absent {
continue
}
// accumulate sum of first not-null values
for j := 0; j < seriesQty; j++ {
listWeighted[j].weight += list[j].Values[i]
}
pointsFound++
}
// sort series by its weight
if pointsFound > 0 {
sort.SliceStable(listWeighted, func(i, j int) bool {
return listWeighted[i].weight < listWeighted[j].weight
})
for j := 0; j < seriesQty; j++ {
list[j] = listWeighted[j].data
}
}
return list
}
func (f *heatMap) validateNeighbourSeries(s1, s2 *types.MetricData) error {
if s1.StartTime != s2.StartTime {
return fmt.Errorf("StartTime differs: %d!=%d", s1.StartTime, s2.StartTime)
}
if s1.StopTime != s2.StopTime {
return fmt.Errorf("StartTime differs: %d!=%d", s1.StopTime, s2.StopTime)
}
if s1.StepTime != s2.StepTime {
return fmt.Errorf("StartTime differs: %d!=%d", s1.StepTime, s2.StepTime)
}
if len(s1.Values) != len(s2.Values) {
return fmt.Errorf("values quantity differs: %d!=%d", len(s1.Values), len(s2.Values))
}
for _, s := range []*types.MetricData{s1, s2} {
if len(s.IsAbsent) != len(s.Values) {
return fmt.Errorf("values and isAbsent quantities differ for %s: %d!=%d", s.Name, len(s.Values), len(s.IsAbsent))
}
}
return nil
} | expr/functions/heatMap/function.go | 0.675336 | 0.42185 | function.go | starcoder |
package knowledge
import (
"fmt"
"github.com/clems4ever/go-graphkb/internal/query"
"github.com/clems4ever/go-graphkb/internal/utils"
)
// RelationDirection the direction of a relation
type RelationDirection int
const (
// Left relation
Left RelationDirection = iota
// Right relation
Right RelationDirection = iota
// Either there is a relation but we don't know in which direction
Either RelationDirection = iota
// Both there is a relation in both directions
Both RelationDirection = iota
)
// QueryNode represent a node and its constraints
type QueryNode struct {
Labels []string
// Constraint expressions
Constraints AndOrExpression
// The scopes this node belongs to (MATCH or WHERE)
Scopes map[Scope]struct{}
id int
}
// QueryRelation represent a relation and its constraints
type QueryRelation struct {
Labels []string
// Constraint expressions
Constraints AndOrExpression
LeftIdx int
RightIdx int
Direction RelationDirection
// The scopes this relations belongs to (MATCH or WHERE)
Scopes map[Scope]struct{}
id int
}
// VariableType represent the type of a variable in the cypher query.
type VariableType int
const (
// NodeType variable of type node
NodeType VariableType = iota
// RelationType variable of type relation
RelationType VariableType = iota
)
// TypeAndIndex type and index of a variable from the cypher query
type TypeAndIndex struct {
Type VariableType
Index int
}
// PatternContext the context of the pattern pushed
type PatternContext int
const (
// MatchContext the node or relation is coming from a MATCH clause
MatchContext PatternContext = iota
// WhereContext the node or relation is coming from a WHERE clause
WhereContext PatternContext = iota
)
// Scope represent the context of the pattern and the ID. This is useful to know wether the pattern comes from the MATCH clause or a WHERE clause.
type Scope struct {
Context PatternContext
ID int
}
// QueryGraph the representation of a query graph. This structure helps create the relations between nodes to facilitate SQL translation and projections
type QueryGraph struct {
Nodes []QueryNode
Relations []QueryRelation
VariablesIndex map[string]TypeAndIndex
}
// NewQueryGraph create an instance of a query graph
func NewQueryGraph() QueryGraph {
return QueryGraph{
Nodes: []QueryNode{},
Relations: []QueryRelation{},
VariablesIndex: make(map[string]TypeAndIndex),
}
}
func (qg *QueryGraph) Clone() *QueryGraph {
relationsCopy := make([]QueryRelation, len(qg.Relations))
nodesCopy := make([]QueryNode, len(qg.Nodes))
variableIndexCopy := make(map[string]TypeAndIndex)
for k, v := range qg.VariablesIndex {
variableIndexCopy[k] = v
}
copy(relationsCopy, qg.Relations)
copy(nodesCopy, qg.Nodes)
queryGraphClone := QueryGraph{
Nodes: nodesCopy,
Relations: relationsCopy,
VariablesIndex: variableIndexCopy,
}
return &queryGraphClone
}
// PushNode push a node into the registry
func (qg *QueryGraph) PushNode(q query.QueryNodePattern, scope Scope) (*QueryNode, int, error) {
// If pattern comes with a variable name, search in the index if it does not already exist
if q.Variable != "" {
typeAndIndex, ok := qg.VariablesIndex[q.Variable]
// If found, add the scope and return the node
if ok {
if typeAndIndex.Type != NodeType {
return nil, -1, fmt.Errorf("Variable '%s' is assigned to a different type", q.Variable)
}
n := qg.Nodes[typeAndIndex.Index]
if !utils.AreStringSliceElementsEqual(n.Labels, q.Labels) && q.Labels != nil {
return nil, -1, fmt.Errorf("Variable '%s' already defined with a different type", q.Variable)
}
n.Scopes[scope] = struct{}{}
return &n, typeAndIndex.Index, nil
}
}
newIdx := len(qg.Nodes)
qn := QueryNode{Labels: q.Labels, Scopes: make(map[Scope]struct{}), id: newIdx}
qn.Scopes[scope] = struct{}{}
qg.Nodes = append(qg.Nodes, qn)
if q.Variable != "" {
qg.VariablesIndex[q.Variable] = TypeAndIndex{
Type: NodeType,
Index: newIdx,
}
}
return &qn, newIdx, nil
}
// PushRelation push a relation into the registry
func (qg *QueryGraph) PushRelation(q query.QueryRelationshipPattern, leftIdx, rightIdx int, scope Scope) (*QueryRelation, int, error) {
var varName string
var labels []string
if q.RelationshipDetail != nil {
varName = q.RelationshipDetail.Variable
labels = q.RelationshipDetail.Labels
}
// If pattern comes with a variable name, search in the index if it does not already exist
if varName != "" {
typeAndIndex, ok := qg.VariablesIndex[varName]
// If found, returns the node
if ok {
if typeAndIndex.Type != RelationType {
return nil, -1, fmt.Errorf("Variable '%s' is assigned to a different type", varName)
}
r := qg.Relations[typeAndIndex.Index]
if !utils.AreStringSliceElementsEqual(r.Labels, labels) {
return nil, -1, fmt.Errorf("Variable '%s' already defined with a different type", varName)
}
r.Scopes[scope] = struct{}{}
return &r, typeAndIndex.Index, nil
}
}
if leftIdx >= len(qg.Nodes) {
return nil, -1, fmt.Errorf("Cannot push relation bound to an unexisting node")
}
if rightIdx >= len(qg.Nodes) {
return nil, -1, fmt.Errorf("Cannot push relation bound to an unexisting node")
}
var direction RelationDirection
if !q.LeftArrow && !q.RightArrow {
direction = Either
} else if q.LeftArrow && q.RightArrow {
direction = Both
} else if q.LeftArrow {
direction = Left
} else if q.RightArrow {
direction = Right
} else {
return nil, -1, fmt.Errorf("Unable to detection the direction of the relation")
}
newIdx := len(qg.Relations)
qr := QueryRelation{
Labels: labels,
LeftIdx: leftIdx,
RightIdx: rightIdx,
Direction: direction,
Scopes: make(map[Scope]struct{}),
id: newIdx,
}
qr.Scopes[scope] = struct{}{}
qg.Relations = append(qg.Relations, qr)
if varName != "" {
qg.VariablesIndex[varName] = TypeAndIndex{
Type: RelationType,
Index: newIdx,
}
}
return &qr, newIdx, nil
}
// GetRelationsByNode get a node's relations.
func (qg *QueryGraph) GetRelationsByNodeId(nodeId int) []*QueryRelation {
var relations []*QueryRelation
for i, relation := range qg.Relations {
if nodeId == relation.LeftIdx || nodeId == relation.RightIdx {
relations = append(relations, &qg.Relations[i])
}
}
return relations
}
// GetNodesByRelation get nodes attached to a relation
func (qg *QueryGraph) GetNodesByRelation(relation *QueryRelation) (*QueryNode, *QueryNode, error) {
l, err := qg.GetNodeByID(relation.LeftIdx)
if err != nil {
return nil, nil, err
}
r, err := qg.GetNodeByID(relation.RightIdx)
if err != nil {
return nil, nil, err
}
return l, r, nil
}
// FindVariable find a variable by its name
func (qg *QueryGraph) FindVariable(name string) (TypeAndIndex, error) {
v, ok := qg.VariablesIndex[name]
if !ok {
return TypeAndIndex{}, fmt.Errorf("Unable to find variable: %s", name)
}
return v, nil
}
// GetNodeByID get a node by its id
func (qg *QueryGraph) GetNodeByID(idx int) (*QueryNode, error) {
if idx >= len(qg.Nodes) {
return nil, fmt.Errorf("Index provided to find node is invalid")
}
return &qg.Nodes[idx], nil
} | internal/knowledge/query_graph.go | 0.693473 | 0.402979 | query_graph.go | starcoder |
package types
import (
"crypto/md5"
"fmt"
"strings"
"time"
sdk "github.com/cosmos/cosmos-sdk/types"
)
const (
timeFrame = 24 * 3600
)
// DataRecordHash is the hash key of the records time frame
type DataRecordHash [16]byte
// NodeChannel holds information about the data channel of the DataNode
type NodeChannel struct {
ID string `json:"id,omitempty"` // id of the channel
Variable string `json:"variable"` // variable of the channel (ex. temperature, humidity)
}
// DataNode holds the configuration and the owner of the DataNode Device
type DataNode struct {
ID sdk.AccAddress `json:"id,omitempty"` // id of the datanode
Owner sdk.AccAddress `json:"owner"` // account address that owns the DataNode
Name string `json:"name"` // name of the datanode
Channels []NodeChannel `json:"channels"` // channel definition
Records []DataRecordHash `json:"records"` // datarecords associated to this DataNode
}
// Record holds a single record from the DataNode device
type Record struct {
TimeStamp uint32 `json:"t"` // timestamp in seconds since epoch
Value uint32 `json:"v"` // numeric value of the record
Misc string `json:"m"` // miscellaneous data for other non numeric records
}
// implement fmt.Stringer
func (r Record) String() string {
return strings.TrimSpace(fmt.Sprintf(`
TimeStamp: %d, Value: %f, Misc: %s
`, r.TimeStamp, r.Value, r.Misc))
}
// DataRecord is a time frame package of records
type DataRecord struct {
DataNode sdk.AccAddress `json:"datanode"` // datanode which push the records
NodeChannel NodeChannel `json:"channel"` // channel within the datanode
TimeFrame int64 `json:"timeframe"` // timeframe of the datarecord
Records []Record `json:"records"` // records of the timerange
}
// NewDataNode returns a new DataNode with the ID
func NewDataNode(address sdk.AccAddress, owner sdk.AccAddress) DataNode {
return DataNode{
ID: address,
Owner: owner,
Name: address.String(),
}
}
// implement fmt.Stringer
func (d DataNode) String() string {
return strings.TrimSpace(fmt.Sprintf(`
ID: %s
Owner: %s
Name: %s
`, d.ID, d.Owner, d.Name))
}
// NewDataRecord returns a new DataRecord with the DataNode and the NodeChannel and empty records set
func NewDataRecord(dataNode sdk.AccAddress, channel *NodeChannel, date int64) DataRecord {
records := []Record{}
return DataRecord{
DataNode: dataNode,
NodeChannel: *channel,
TimeFrame: date / timeFrame,
Records: records,
}
}
// GetActualDataRecordHash returns the hash key to be used for KVStore at actual time
func GetActualDataRecordHash(dataNode sdk.AccAddress, channel *NodeChannel) DataRecordHash {
now := time.Now()
return GetDataRecordHash(dataNode, channel, now.Unix())
}
// GetDataRecordHash returns the hash key to be used for KVStore
func GetDataRecordHash(dataNode sdk.AccAddress, channel *NodeChannel, date int64) DataRecordHash {
// use 1500000000 seconds as a safe time to detect if date is in seconds or days
if date > 1500000000 {
date = date / timeFrame
}
// Use days since epoch as daily time frame to group records
key := fmt.Sprintf("%s%s%s%d", dataNode.String(), channel.ID, channel.Variable, date)
return md5.Sum([]byte(key))
}
// implement fmt.Stringer
func (r DataRecord) String() string {
return strings.TrimSpace(fmt.Sprintf(`
DataNode: %s
Channel: %s:%s
TimeFrame: %d
Records: %d
From: %d
To: %d
`, string(r.DataNode), r.NodeChannel.ID, r.NodeChannel.Variable, r.TimeFrame, len(r.Records), r.Records[0].TimeStamp, r.Records[len(r.Records)-1].TimeStamp))
} | x/datanode/types/types.go | 0.720958 | 0.452294 | types.go | starcoder |
package v1alpha2
import (
v1alpha2 "github.com/openfaas-incubator/ingress-operator/pkg/apis/openfaas/v1alpha2"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
)
// FunctionIngressLister helps list FunctionIngresses.
type FunctionIngressLister interface {
// List lists all FunctionIngresses in the indexer.
List(selector labels.Selector) (ret []*v1alpha2.FunctionIngress, err error)
// FunctionIngresses returns an object that can list and get FunctionIngresses.
FunctionIngresses(namespace string) FunctionIngressNamespaceLister
FunctionIngressListerExpansion
}
// functionIngressLister implements the FunctionIngressLister interface.
type functionIngressLister struct {
indexer cache.Indexer
}
// NewFunctionIngressLister returns a new FunctionIngressLister.
func NewFunctionIngressLister(indexer cache.Indexer) FunctionIngressLister {
return &functionIngressLister{indexer: indexer}
}
// List lists all FunctionIngresses in the indexer.
func (s *functionIngressLister) List(selector labels.Selector) (ret []*v1alpha2.FunctionIngress, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha2.FunctionIngress))
})
return ret, err
}
// FunctionIngresses returns an object that can list and get FunctionIngresses.
func (s *functionIngressLister) FunctionIngresses(namespace string) FunctionIngressNamespaceLister {
return functionIngressNamespaceLister{indexer: s.indexer, namespace: namespace}
}
// FunctionIngressNamespaceLister helps list and get FunctionIngresses.
type FunctionIngressNamespaceLister interface {
// List lists all FunctionIngresses in the indexer for a given namespace.
List(selector labels.Selector) (ret []*v1alpha2.FunctionIngress, err error)
// Get retrieves the FunctionIngress from the indexer for a given namespace and name.
Get(name string) (*v1alpha2.FunctionIngress, error)
FunctionIngressNamespaceListerExpansion
}
// functionIngressNamespaceLister implements the FunctionIngressNamespaceLister
// interface.
type functionIngressNamespaceLister struct {
indexer cache.Indexer
namespace string
}
// List lists all FunctionIngresses in the indexer for a given namespace.
func (s functionIngressNamespaceLister) List(selector labels.Selector) (ret []*v1alpha2.FunctionIngress, err error) {
err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha2.FunctionIngress))
})
return ret, err
}
// Get retrieves the FunctionIngress from the indexer for a given namespace and name.
func (s functionIngressNamespaceLister) Get(name string) (*v1alpha2.FunctionIngress, error) {
obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name)
if err != nil {
return nil, err
}
if !exists {
return nil, errors.NewNotFound(v1alpha2.Resource("functioningress"), name)
}
return obj.(*v1alpha2.FunctionIngress), nil
} | pkg/client/listers/openfaas/v1alpha2/functioningress.go | 0.610686 | 0.456955 | functioningress.go | starcoder |
package types
// cypher-data\Numenera.txt
var NumeneraCyphers []Cypher = []Cypher {
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Wearable: Gloves",
"Usable: Handles with powerful suction cups",
},
Effect: `Allows for automatic climbing of any
surface, even horizontal ones. Lasts for ten
minutes per cypher level.`,
},
Cypher{
Name: "Antivenom",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Renders user immune to poisons of the
same level or lower for one hour per cypher
level and ends any such ongoing effects, if
any, already in the user’s system.`,
},
Cypher{
Name: "Attractor",
Level: "1d6 + 4",
Type: []string{
"Wearable: Glove of synth",
"Usable: Small handheld device",
},
Effect: `One unanchored item your size or smaller
within long range (very long range if the cypher
is level 8 or higher) is drawn immediately to
the device. This takes one round. The item has
no momentum when it arrives.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee",
},
Effect: `For the next 28 hours, each time the
weapon the nodule is attached to strikes a
solid creature or object, it generates a burst
of energy that teleports the creature or object
struck an immediate distance in a random
direction (not up or down). The teleported
creature’s actions (including defense) are
hindered on its next turn (hindered by two
steps if the cypher is level 5 or higher).`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next 28 hours, each time (but
not more than once per round) the wearer
of the armor the nodule is attached to is
struck hard enough to inflict damage, they
teleport an immediate distance in a random
direction (not up or down). Since the wearer
is prepared for this effect and their foe is
not, the wearer’s defenses are eased for
one round after they teleport (eased by two
steps if the cypher is level 5 or higher).`,
},
Cypher{
Name: "Catholicon",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Cures any disease of the cypher level
or lower.`,
},
Cypher{
Name: "Catseye",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Grants the ability to see in the dark for
five hours per cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `After one hour, the sweat of the user
produces 1d6 doses of a valuable liquid (these
doses are not considered cyphers). They must
be used within one week. Effects vary:
01–04 Euphoric for 1d6 hours
05–08 Hallucinogenic for 1d6 hours
09–12 Stimulant for 1d6 hours
13–16 Depressant for 1d6 hours
17–20 Nutrient supplement
21–25 Antivenom
26–30 Cures disease
31–35 See in the dark for one hour
36–45 Restores a number of Might Pool
points equal to cypher level
46–55 Restores a number of Speed Pool
points equal to cypher level
56–65 Restores a number of Intellect Pool
points equal to cypher level
66–75 Increases Might Edge by 1 for one
hour
76–85 Increases Speed Edge by 1 for one
hour
86–95 Increases Intellect Edge by 1 for one
hour
96–00 Restores all Pools to full`,
},
Cypher{
Name: "Comprehension Graft",
Level: "1d6 + 1",
Type: []string{
"Usable: Small metallic disk",
},
Effect: `When applied to a creature’s head, the
disk immediately unleashes microfilaments
that enter the brain. Within five minutes,
the creature can understand the words of
a specific language keyed to the graft (two
languages if the cypher is level 5 or higher).
This is true even of creatures that do not
normally have a language. If the creature
could already understand the language,
the cypher has no effect. Once the graft
attaches, the effect is permanent, and
this device no longer counts against the
number of cyphers that a PC can bear.`,
},
Cypher{
Name: "Controlled Blinking Nodule",
Level: "1d6 + 2",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next 28 hours, each time the
wearer of the armor the nodule is attached to
is struck hard enough to inflict damage (but
no more than once per round), they teleport
to a spot they desire within immediate
range. Since the wearer is prepared for
this effect and their foe is not, the wearer’s
defenses are eased for one round after they
teleport (eased by two steps if the cypher is
level 6 or higher).`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Temporary tattoo, amulet,",
"Usable: Small handheld device, crystal",
},
Effect: `Tapping into the datasphere’s
knowledge, the user can learn the answer
to one question (two questions if the
cypher is level 4 or higher, three questions
if the cypher is level 6 or higher).`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee",
},
Effect: `For the next 28 hours, each time the
weapon the nodule is attached to strikes
a solid creature or object, the weapon
suddenly increases dramatically in weight,
causing the blow to inflict an additional 2
points of damage (3 points if the cypher is
level 4 or higher).`,
},
Cypher{
Name: "Detonation",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device (thrown, short range)",
},
Effect: `Explodes in an immediate radius,
inflicting damage equal to the cypher level.
Roll for the type of damage:
01–10 Cell-disrupting (harms only flesh)
11–30 Corrosive
31–40 Electrical discharge
41–50 Heat drain (cold)
51–75 Fire
76–00 Shrapnel`,
},
Cypher{
Name: "Detonation (desiccating)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Bursts in an immediate radius, draining
moisture from everything within it. Living
creatures take damage equal to the cypher
level. Water in the area is vaporized.`,
},
Cypher{
Name: "Detonation (flash)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Bursts in an immediate radius, blinding
all within it for one minute (ten minutes if
the cypher is level 4 or higher).`,
},
Cypher{
Name: "Detonation (gravity)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Bursts in an immediate radius, inflicting
damage equal to the cypher level by
increasing gravity tremendously for one
second. All in the area are crushed to the
ground for one round and cannot take
physical actions.`,
},
Cypher{
Name: "Detonation (massive)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (very long range)",
"Usable: Handheld projector (very long range)",
},
Effect: `Explodes in a short-range radius,
inflicting damage equal to the cypher level.
Roll for the type of damage:
01–10 Cell-disrupting (harms only flesh)
11–30 Corrosive
31–40 Electrical discharge
41–50 Heat drain (cold)
51–75 Fire
76–00 Shrapnel`,
},
Cypher{
Name: "Detonation (matter Disruption)",
Level: "1d6 + 4",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Explodes in an immediate radius,
releasing nanites that rearrange matter in
random ways. Inflicts damage equal to the
cypher level.`,
},
Cypher{
Name: "Detonation (pressure)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device (thrown, short range)",
},
Effect: `Explodes in an immediate radius,
inflicting impact damage equal to the
cypher level. Also moves unattended
objects out of the area if they weigh less
than 20 pounds (9 kg) per cypher level.`,
},
Cypher{
Name: "Detonation (singularity)",
Level: "10",
Type: []string{
"Usable: Explosive device or ceramic sphere",
},
Effect: `Explodes and creates a momentary
singularity that tears at the fabric of the
universe. Inflicts 20 points of damage to all
within short range, drawing them (or their
remains) together to immediate range (if
possible). Player characters in the radius
move one step down the damage track if
they fail a Might defense roll.`,
},
Cypher{
Name: "Detonation (sonic)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Explodes with terrifying sound,
deafening all in an immediate radius for
ten minutes per cypher level.`,
},
Cypher{
Name: "Detonation (spawn)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Bursts in an immediate radius, blinding
all within it for one minute and inflicting
damage equal to the cypher level. The
burst spawns 1d6 additional detonations;
on the next round, each additional
detonation flies to a random spot within
short range and explodes in an immediate
radius. Roll for the type of damage dealt by
all detonations:
01–10 Cell-disrupting (harms only flesh)
11–30 Corrosive
31–40 Electrical discharge
41–50 Heat drain (cold)
51–75 Fire
76–00 Shrapnel`,
},
Cypher{
Name: "Detonation (web)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device (thrown, short range)",
},
Effect: `Explodes in an immediate radius and
creates sticky strands of goo that last
1 hour. PCs caught in the area must use
a Might-based action to get out, with the
difficulty determined by the cypher level.
NPCs break free if their level is higher than
the cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee",
},
Effect: `For the next 28 hours, each time the
weapon the nodule is attached to strikes
a solid creature or object, it generates a
burst of nanites that directly attack organic
cells. The affected target takes 1 additional
point of damage (2 points if the cypher is
level 4 or higher, 3 points if the cypher is
level 6 or higher) and loses its next action.`,
},
Cypher{
Name: "Eagleseye",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Grants the ability to see ten times as far
as normal for one hour per cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 4",
Type: []string{
"Usable: Spray canister",
},
Effect: `An object sprayed by this cypher has
Armor against fire damage equal to the
cypher’s level for 28 hours.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Wearable: Belt, ring, bracelet",
"Usable: Handheld device",
},
Effect: `Creates an immobile plane of permeable
energy up to 20 feet by 20 feet (6 m by 6 m)
for one hour per cypher level. The plane
conforms to the space available. Flames
passing through the plane are extinguished.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 3",
Type: []string{
"Wearable: Belt, ring, bracelet",
"Usable: Handheld device",
},
Effect: `Creates an immobile cube composed of
six planes of solid force, each 30 feet (9 m)
to a side, for one hour. The planes conform
to the space available.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next 28 hours, the armor the nodule
is attached to is bolstered by a powerful force
field, adding 2 to the Armor it provides (adding
3 to the Armor if the cypher is level 5 or higher).`,
},
Cypher{
Name: "Force Screen Projector",
Level: "1d6 + 3",
Type: []string{
"Wearable: Belt, ring, bracelet",
"Usable: Handheld device",
},
Effect: `Creates an immobile plane of solid
force up to 20 feet by 20 feet (6 m by 6 m)
for one hour per cypher level. The plane
conforms to the space available.`,
},
Cypher{
Name: "Force Shield Projector",
Level: "1d6 + 3",
Type: []string{
"Internal: Subdermal injection",
"Wearable: Belt, ring, bracelet",
"Usable: Handheld device",
},
Effect: `Creates a shimmering energy shield
around the user for one hour, during which
time they gain +3 Armor (+4 Armor if the
cypher is level 5 or higher).`,
},
Cypher{
Name: "Friction-Reducing Gel",
Level: "1d6",
Type: []string{
"Usable: Spray canister",
},
Effect: `Sprayed across an area up to 10 feet (3
m) square, this gel makes things extremely
slippery. For one hour per cypher level,
movement tasks in the area are hindered
by three steps.`,
},
Cypher{
Name: "Frigid Wall Projector",
Level: "1d6 + 2",
Type: []string{
"Usable: Complex device",
},
Effect: `Creates a wall of supercooled air up
to 30 feet by 30 feet by 1 foot (9 m by 9 m
by 30 cm) that inflicts damage equal to
the cypher level on anything that passes
through it. The wall conforms to the space
available. It lasts for ten minutes.`,
},
Cypher{
Name: "Gas Bomb",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Explosive device or ceramic sphere",
},
Effect: `Bursts in a poisonous cloud within an
immediate distance. The cloud lingers
for 1d6 rounds unless conditions dictate
otherwise. Effects vary:
01–10 Thick smoke: occludes sight while
the cloud lasts.
11–20 Choking gas: living creatures that
breathe lose their actions to choking
and coughing for a number of
rounds equal to the cypher level.
21–50 Poison gas: living creatures that
breathe suffer damage equal to the
cypher level.
51–60 Corrosive gas: everything suffers
damage equal to the cypher level.
61–65 Hallucinogenic gas: living creatures
that breathe lose their actions to
hallucinations and visions for a number
of rounds equal to the cypher level.
66–70 Nerve gas: living creatures that
breathe suffer Speed damage equal
to the cypher level.
71–80 Mind-numbing gas: living creatures
that breathe suffer Intellect damage
equal to the cypher level.
81–83 Fear gas: living creatures that
breathe and think flee in a random
direction in fear (or are paralyzed
with fear) for a number of rounds
equal to the cypher level.
84–86 Amnesia gas: living creatures that
breathe and think permanently lose
all memory of the last minute.
87–96 Sleep gas: living creatures that
breathe fall asleep for a number of
rounds equal to the cypher level or
until awoken by a violent action or
an extremely loud noise.
97–00 Rage gas: living creatures that
breathe and think make a melee
attack on the nearest creature and
continue to do so for a number of
rounds equal to the cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 3",
Type: []string{
"Internal: Subdermal injection",
"Wearable: Belt, boots, ring, bracelet",
"Usable: Small platform on which the user",
},
Effect: `For one hour, the user can float into the
air, moving vertically (but not horizontally
without some other action, such as pushing
along the ceiling) up to a short distance per
round. The user must weigh less than 50
pounds (22 kg) per level of the cypher.`,
},
Cypher{
Name: "Gravity-Nullifying Spray",
Level: "1d6 + 2",
Type: []string{
"Usable: Spray canister",
},
Effect: `A nonliving object up to the size of a human
(two humans if the cypher is level 6 or higher)
sprayed by this cypher floats 1d20 feet in the
air permanently and no longer has weight if
carried, though it needs to be strapped down.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee weapon",
},
Effect: `For the next 28 hours, each time the weapon
the nodule is attached to strikes a solid
creature or object, it generates a burst of heat,
inflicting an additional 2 points of damage
(3 points if the cypher is level 4 or higher, 4
points if the cypher is level 6 or higher).`,
},
Cypher{
Name: "Hunter/seeker",
Level: "1d6",
Type: []string{
"Wearable: Arm- or shoulder-mounted launcher",
"Usable: Complex device, handheld device",
},
Effect: `With long-range movement, this intelligent
missile tracks and attacks a specified target
(target must be within sight when selected). If
it misses, it continues to attack one additional
time per cypher level until it hits. For example,
a level 4 hunter/seeker will attack a maximum
of five times. Different hunter/seekers have
different effects:
01–50 Inflicts 8 points of damage.
51–80 Bears a poisoned needle that inflicts
3 points of damage plus poison.
81–90 Explodes, inflicting 6 points of
damage to all within immediate range.
91–95 Shocks for 4 points of electricity
damage, and stuns for one round
per cypher level.
96–00 Covers target in sticky goo that
immediately hardens, holding them
fast until they break out with a
Might action (difficulty equal to the
cypher level + 2).`,
},
Cypher{
Name: "Image Projector",
Level: "1d6",
Type: []string{
"Wearable: Headband with device on forehead",
"Usable: Handheld device with glass panel",
},
Effect: `Projects one of the following immobile
images in the area described for one hour.
The image appears up to a close distance
away (long distance if the cypher level
is 4 or higher, very long distance if the
cypher level is 6 or higher). Scenes include
movement, sound, and smell.
01–20 Terrifying creature of an unknown
species, perhaps no longer alive in
the world (10-foot [3 m] cube)
21–40 Huge machine that obscures sight
(30-foot [9 m] cube)
41–50 Beautiful pastoral scene (50-foot
[15 m] cube)
51–60 Food that looks delicious but may
not be familiar (10-foot [3 m] cube)
61–80 Solid color that obscures sight
(50-foot [15 m] cube)
81–00 Incomprehensible scene that is
disorienting and strange (20-foot
[6 m] cube)`,
},
Cypher{
Name: "Inferno Wall Projector",
Level: "1d6 + 2",
Type: []string{
"Usable: Complex device",
},
Effect: `Creates a wall of extreme heat up to
30 feet by 30 feet by 1 foot (9 m by 9 m
by 30 cm) that inflicts damage equal to
the cypher level on anything that passes
through it. The wall conforms to the space
available. It lasts for ten minutes.`,
},
Cypher{
Name: "Infiltrator",
Level: "1d6",
Type: []string{
"Internal: Phases into eye, phases out when",
"Wearable: Adheres to temple and launches",
"Usable: Handheld device that launches",
},
Effect: `Tiny capsule launches and moves at
great speed, mapping and scanning an
unknown area. It moves 500 feet (150 m)
per level, scanning an area up to 50 feet
(15 m) per level away from it. It identifies
basic layout, creatures, and major energy
sources. Its movement is blocked by any
physical or energy barrier.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Handheld device",
},
Effect: `Small device expands into a humanoid
automaton that is roughly 2 feet (60 cm)
tall. Its level is equal to the cypher level and
it can understand the verbal commands
of the character who activates it. Once
the servant is activated, commanding it
is not an action. It can make attacks or
perform actions as ordered to the best of
its abilities, but it cannot speak.
The automaton has short-range movement
but never goes farther than long range
away from the character who activated it.
At the GM’s discretion, the servant might
have specialized knowledge, such as how
to operate a particular device. Otherwise,
it has no special knowledge. In any case,
the servant is not artificially intelligent or
capable of initiating action. It does only as
commanded.
The servant operates for one hour per cypher
level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 3",
Type: []string{
"Usable: Handheld device",
},
Effect: `With the addition of water and air,
the small device expands into a simple
one-room structure with a door and a
transparent window (two rooms with
an internal door if the cypher is level 7
or higher). The structure is 10 feet by 10
feet by 20 feet (3 m by 3 m by 6 m). It is
made from a form of shapestone and is
permanent and immobile once created.`,
},
Cypher{
Name: "Intellect Enhancement",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `Substance adds 1 to Intellect Edge for
one hour (or adds 2 if the cypher is level 5
or higher).`,
},
Cypher{
Name: "Invisibility Nodule",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next ten hours per cypher level,
the armor the nodule is attached to is
invisible, making the wearer appear to be
unarmored.`,
},
Cypher{
Name: "Knowledge Enhancement",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `For the next 28 hours, the character has
training in a predetermined skill (or two skills
if the cypher is level 5 or higher). Although the
skill could be anything (including something
specific to the operation of one device or
something similar), common skills include:
01–10 Melee attacks
11–20 Ranged attacks
21–40 Understanding numenera
(sometimes specific to one device)
41–50 Repairing (sometimes specific to
one device)
51–60 Crafting (usually specific to one
thing)
61–70 Persuasion
71–75 Healing
76–80 Speed defense
81–85 Intellect defense
86–90 Swimming
91–95 Riding
96–00 Sneaking`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Complex device",
},
Effect: `Creates a wall of electric bolts up to
30 feet by 30 feet by 1 foot (9 m by 9 m
by 30 cm) that inflicts damage equal to
the cypher level on anything that passes
through it. The wall conforms to the space
available. It lasts for ten minutes.`,
},
Cypher{
Name: "<NAME>",
Level: "1d10",
Type: []string{
"Usable: Canister containing slime",
},
Effect: `Once released, this organic slime
dissolves 1 cubic foot of material each
round. After one round per cypher level,
the slime dies and becomes inert.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill",
"Wearable: Disk that adheres to forehead,",
"Usable: Injector",
},
Effect: `When activated, the cypher splits into
two pieces. One is affixed to a numenera
device and the other to a character. The
character can then use their mind to
control the device at long range, bidding it
to do anything it could do normally. Thus,
a device could be activated or deactivated,
and a vehicle could be piloted. The control
lasts for ten minutes per cypher level, and
once the device is chosen, it cannot be
changed.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Small sphere with a thick screw",
},
Effect: `The user throws this cypher at a target
within short range, and it drills into the
target for one round, inflicting damage
equal to the cypher level. If the target is
made of metal or wearing metal (such as
armor), the attack is eased.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Wearable: Gloves with metal plates",
"Usable: Small pyramid-shaped metallic device",
},
Effect: `Establishes a connection with one
metal object within short range that a
human could hold in one hand. After this
connection is established, the user can
move or manipulate the object anywhere
within short range (each movement or
manipulation is an action). For example,
the user could wield a weapon or drag a
helm affixed to a foe’s head to and fro. The
connection lasts for ten rounds per cypher
level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Wearable: Gloves with metal plates",
"Usable: Small pyramid-shaped metallic device",
},
Effect: `For ten minutes per cypher level, metal
objects cannot come within immediate
range of the activated device. Metal items
already in the area when the device is
activated are slowly pushed out.`,
},
Cypher{
Name: "Memory Lenses",
Level: "1d6",
Type: []string{
"Wearable: Contact lenses, eyeglasses, or",
},
Effect: `Allows the wearer to mentally record
everything they see for thirty seconds
per cypher level and store the recording
permanently in their long-term memory.
This cypher is useful for watching someone
pick a specific lock, enter a complex code,
or do something else that happens quickly.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Complex metal and glass device",
},
Effect: `Two rounds after being activated, the
device creates an invisible field that fills an
area within short range and lasts for one
minute. The field scrambles the mental
processes of all thinking creatures. The
effect lasts as long as they remain in the
field and for 1d6 rounds after, although an
Intellect defense roll is allowed each round
to act normally (both in the field and after
leaving it). Each mental scrambler is keyed
to a specific effect. Roll for effect:
01–30 Victims cannot act.
31–40 Victims cannot speak.
41–50 Victims move slowly (immediate
range) and clumsily.
51–60 Victims cannot see or hear.
61–70 Victims lose all sense of direction,
depth, and proportion.
71–80 Victims do not recognize anyone
they know.
81–88 Victims suffer partial amnesia.
89–94 Victims suffer total amnesia.
95–98 Victims lose all inhibitions,
revealing secrets and performing
surprising actions.
99–00 Victims’ ethics are inverted.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wrist-mounted sprayer",
"Usable: Canister with hose",
},
Effect: `Produces a stream of foam that covers
an area about 3 feet by 3 feet (1 m by 1 m),
transforming any metal that it touches into
a substance as brittle as thin glass. The
foam affects metal to a depth of about 6
inches (15 cm).`,
},
Cypher{
Name: "Monoblade",
Level: "1d6 + 2",
Type: []string{
"Internal: Injection into fingertip",
"Wearable: Glove",
"Usable: Device similar to hilt",
},
Effect: `Produces a 6-inch (15 cm) blade that’s
the same level as the cypher. The blade cuts
through any material of a level lower than its
own. If used as a weapon, it is a light weapon
that ignores Armor of a level lower than its
own. The blade lasts for ten minutes.`,
},
Cypher{
Name: "Motion Sensor",
Level: "1d6 + 2",
Type: []string{
"Internal: Injection into spine",
"Wearable: Amulet",
"Usable: Disk that can be affixed to the floor or",
},
Effect: `Indicates when any movement occurs
within short range, or when large creatures
or objects move within long range (the
cypher distinguishes between the two).
It also indicates the number and size of
the creatures or objects in motion. Once
activated, it operates for one hour per
cypher level.`,
},
Cypher{
Name: "Personal Environment Field",
Level: "1d6 + 2",
Type: []string{
"Wearable: Belt, medallion, ring",
"Usable: Handheld device",
},
Effect: `Creates an aura of temperature and
atmosphere that will sustain a human
safely for 28 hours. The aura extends to 1
foot (30 cm) around the user (double that
radius if the cypher is level 7 or higher). It
does not protect against sudden flashes of
temperature change (such as from a heat
ray). A small number of these cyphers (1%)
accommodate the preferred environment
of a nonhuman, nonterrestrial creature.`,
},
Cypher{
Name: "Phase Changer",
Level: "1d6 + 1",
Type: []string{
"Wearable: Belt, medallion, ring",
"Usable: Handheld device",
},
Effect: `Puts the user out of phase for one
minute (two minutes if the cypher is level
6 or higher). During this time, the user can
pass through solid objects as though they
were entirely insubstantial, like a ghost.
They cannot make physical attacks or be
physically attacked.`,
},
Cypher{
Name: "Phase Disruptor",
Level: "1d6 + 2",
Type: []string{
"Usable: Complex device, plate that affixes to",
},
Effect: `Puts a portion of a physical structure (like
a wall or floor) out of phase for one hour. It
affects an area equal to one 5-foot (1.5 m)
cube per cypher level. While the area is out of
phase, creatures and objects can pass freely
through it as if it were not there, although
one cannot see through it, and it blocks light.`,
},
Cypher{
Name: "Poison (emotion)",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible or injectable liquid",
"Wearable: Lipstick, false fingertip, ring with needle",
"Usable: Injector",
},
Effect: `The victim feels a specific emotion for
one hour.
01–20 Anger. Likely to attack anyone who
disagrees with them. Very hard to
interact with; all interaction tasks
are hindered by two steps.
21–40 Fear. Flees in terror for one minute
when threatened.
41–60 Lust. Cannot focus on any
nonsexual activity.
61–75 Sadness. All tasks are hindered.
76–85 Complacency. Has no motivation.
All tasks are hindered by two steps.
86–95 Joy. Easy to interact with in a
pleasant manner; all pleasant
interaction tasks are eased.
96–00 Love. Much easier to interact
with; all interaction tasks are
eased by two steps, but temporary
attachment is likely.`,
},
Cypher{
Name: "Poison (explosive)",
Level: "1d6 + 1",
Type: []string{
"Internal: Pill, ingestible or injectable liquid",
"Wearable: Lipstick, false fingertip, ring with",
"Usable: Injector",
},
Effect: `Once this substance enters the
bloodstream, it travels to the brain
and reorganizes into an explosive that
detonates when activated, inflicting 10
points of damage (ignoring Armor). Roll to
determine the means of detonation:
01–25 The detonator is activated (must be
within long range).
26–40 A specified amount of time passes.
41–50 The victim takes a specific action.
51–55 A specific note is sung or played on
an instrument within short range.
56–60 The victim smells a specific scent
within immediate range.
61–80 The victim comes within long range
of the detonator.
81–00 The victim is no longer within long
range of the detonator.`,
},
Cypher{
Name: "Poison (mind-Controlling)",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible or injectable liquid",
"Wearable: Lipstick, false fingertip, ring with",
"Usable: Injector",
},
Effect: `The victim must carry out a specific
action in response to a specific trigger.
01–20 Lies down for one minute with eyes
closed when told to do so.
21–40 Flees in terror for one minute when
threatened.
41–60 Answers questions truthfully for
one minute.
61–75 Attacks close friend for one round
when within immediate range.
76–85 Obeys next verbal command given
(if it is understood).
86–95 For 28 hours, becomes sexually
attracted to the next creature of its
own species that it sees.
96–00 For one minute, moves toward the
next red object seen in lieu of all
other actions, even ignoring selfpreservation.`,
},
Cypher{
Name: "Poison (mind-Disrupting)",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible or injectable liquid",
"Wearable: Lipstick, false fingertip, ring with",
"Usable: Injector",
},
Effect: `The victim suffers Intellect damage
equal to the cypher’s level and cannot take
actions for a number of rounds equal to
the cypher’s level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill",
"Wearable: Device that adheres to temple",
"Usable: Metallic disk",
},
Effect: `Allows the user to project a one-time,
one-way telepathic message of up to ten
words per cypher level, with an unlimited
range, to anyone they know.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Subdermal implant",
"Wearable: Contact lens, glove, ring, wristband,",
"Usable: Handheld device",
},
Effect: `Allows the user to project a ray of
destructive energy up to very long range
that inflicts damage equal to the cypher’s
level.
01–50 Heat/concentrated light
51–60 Cell-disrupting radiation
61–80 Force
81–87 Magnetic wave
88–93 Molecular bond disruption
94–00 Concentrated cold`,
},
Cypher{
Name: "<NAME> (numbing)",
Level: "1d6 + 2",
Type: []string{
"Internal: Subdermal implant",
"Wearable: Contact lens, glove, ring, wristband,",
"Usable: Handheld device",
},
Effect: `Allows the user to project a ray of
energy up to long range (very long range if
the cypher is level 6 or higher) that numbs
one limb of the target, making it useless
for one minute. A small number of these
devices (5%) induce numbing that lasts for
one hour.`,
},
Cypher{
Name: "<NAME> (paralysis)",
Level: "1d6 + 2",
Type: []string{
"Internal: Subdermal implant",
"Wearable: Contact lens, glove, ring,",
"Usable: Handheld device",
},
Effect: `Allows the user to project a ray of
energy up to very long range that paralyzes
the target for one minute. A small number
of these devices (5%) induce paralysis that
lasts for one hour.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 4",
Type: []string{
"Usable: Metallic spike",
},
Effect: `Once activated, the spike does not
move—ever—even if activated in midair.
A Might action will dislodge the spike, but
then it is ruined.`,
},
Cypher{
Name: "Rejuvenator",
Weight: 2,
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `Substance restores a number of points
equal to the cypher’s level to one random
Pool. Roll 1d100:
01–50 Might Pool
51–75 Speed Pool
76–00 Intellect Pool`,
},
Cypher{
Name: "Remote Viewer",
Level: "1d6",
Type: []string{
"Usable: Device that splits into two parts when",
},
Effect: `For one hour per cypher level, the glass
screen on one part shows everything going
on in the vicinity of the other part, regardless
of the distance between the two parts.`,
},
Cypher{
Name: "Repair Unit",
Level: "1d10",
Type: []string{
"Wearable: Shoulder- or arm-mounted launcher,",
"Usable: Handheld device",
},
Effect: `Device becomes a multiarmed sphere that
floats. It repairs one designated numenera
device (of a level equal to or less than its own)
that has been damaged but not destroyed. The
repair unit can even create spare parts, unless
the GM rules that the parts are too specialized
or rare (in which case, the unit repairs the
device entirely except for the specialized part).
Repair time is 1d100 + 20 minutes.`,
},
Cypher{
Name: "Retaliation Nodule",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next 28 hours, anyone striking
the armor the nodule is attached to
triggers a small burst of electricity that
inflicts 1 point of damage (2 points if the
cypher is level 4 or higher, 3 points if the
cypher is level 6 or higher). No action or
roll is required by the armor’s wearer.`,
},
Cypher{
Name: "Sheen",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `For one week, the user’s cells are coated with
a protective veneer that resists damage (+1 to
Armor, or +2 to Armor if the cypher is level 5 or
higher) and eases Might defense rolls by two
steps. However, healing is more difficult during
this time; all recovery rolls suffer a –1 penalty.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee weapon",
},
Effect: `For the next 28 hours, each time the weapon
the nodule is attached to strikes a solid creature
or object, it generates a burst of electricity,
inflicting 1 additional point of damage (2 points
if the cypher is level 4 or higher, 3 points if
the cypher is level 6 or higher).`,
},
Cypher{
Name: "Shocker",
Level: "1d6 + 4",
Type: []string{
"Internal: Subdermal implant",
"Wearable: Ring, palm disk",
"Usable: Short rod",
},
Effect: `Delivers a powerful burst of electricity
that shocks any creature touched, inflicting
damage equal to the cypher’s level.`,
},
Cypher{
Name: "Skill Boost",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Dramatically but temporarily alters the
user’s mind and body so that one specific
physical action they can perform is eased by
three steps. Once activated, this boost can be
used a number of times equal to the cypher’s
level, but only within a 28-hour period. The
boost takes effect each time the action is
performed, so a level 3 cypher boosts the
first three times the action is attempted. The
action can be one of a number of possibilities:
01–15 Melee attack
16–30 Ranged attack
31–40 Speed defense
41–50 Might defense
51–60 Intellect defense
61–68 Jumping
69–76 Climbing
77–84 Running
85–92 Swimming
93–94 Sneaking
95–96 Balancing
97–98 Perceiving
99 Carrying
00 Escaping`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Fingertip cusp, ring, glove",
"Usable: Injector, gas sprayer",
},
Effect: `Touch or ingestion puts the victim to
sleep for ten minutes or until awoken by a
violent action or an extremely loud noise.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Wearable: Wristband, ring, belt-mounted device",
"Usable: Small handheld device",
},
Effect: `Draws all sound within long range into the
device for one round per cypher level. Within
the affected area, no sound can be heard.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Implant",
"Wearable: Wristband, ring, belt-mounted",
"Usable: Small handheld device",
},
Effect: `Dampens all sound within immediate
range for one minute per cypher level,
providing an asset for all creatures in the
area to attempt stealthy actions.`,
},
Cypher{
Name: "<NAME>",
Level: "5",
Type: []string{
"Usable: Small metal ring",
},
Effect: `When affixed to another numenera
device that affects a single target at range,
that range is increased to 1 mile (1.5 km)
with no penalties. Space is temporarily
warped in terms of seeing and reaching the
target. If direct line of sight is important to
the device’s effect, it remains important.
Creating the spatial warp functions as one
use of the device.`,
},
Cypher{
Name: "Speed Boost",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `Substance adds 1 to Speed Edge for
one hour (or adds 2 if the cypher is level
5 or higher).`,
},
Cypher{
Name: "Stim",
Weight: 2,
Level: "6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Eases the next action taken by three steps.`,
},
Cypher{
Name: "Strength Boost",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `Substance adds 1 to Might Edge for one
hour (or adds 2 if the cypher is level 5 or
higher).`,
},
Cypher{
Name: "Subdual Field",
Level: "1d6 + 3",
Type: []string{
"Usable: Complex device",
},
Effect: `Two rounds after being activated, the
device creates an invisible field that fills a
specified area (such as a cube of a certain
size) within long range of the device.
The field lasts for one minute. It affects
the minds of thinking beings within the
field, preventing them from taking hostile
actions. The effect lasts as long as they
remain in the field and for 1d6 rounds after,
although an Intellect defense roll is allowed
each round to act normally (both in the
field and after leaving it).`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill",
"Wearable: Disk that adheres to forehead,",
"Usable: Injector",
},
Effect: `The user activates the device and
targets one creature within close range.
For one hour per cypher level, the device
enables two-way long-range mental
communication between the user and
the target. This lasts for one hour per
cypher level. Sometimes multiple cyphers
of this type are found together and allow
communication between all of them.`,
},
Cypher{
Name: "Teleporter (bounder)",
Level: "1d6 + 2",
Type: []string{
"Wearable: Belt, wristband, ring, full bodysuit",
"Usable: Complex device, handheld device",
},
Effect: `User teleports up to 100 × the cypher
level in feet (30 × cypher level in m) to a
location they can see. They arrive safely
with their possessions but cannot take
anything else with them.`,
},
Cypher{
Name: "Teleporter (traveler)",
Level: "1d6 + 4",
Type: []string{
"Wearable: Belt, wristband, ring, full bodysuit",
"Usable: Complex device, handheld device",
},
Effect: `User teleports up to 100 × the cypher
level in miles (160 x the cypher level in km)
to a location they have previously visited.
They arrive safely with their possessions
but cannot take anything else with them.`,
},
Cypher{
Name: "Temporal Viewer",
Level: "1d6 + 4",
Type: []string{
"Wearable: Wristband",
"Usable: Complex device, handheld device",
},
Effect: `Displays moving images and sound, up
to ten minutes per cypher level in length,
depicting events that occurred at the
current location up to one year prior. The
user specifies the time period shown by the
viewer.`,
},
Cypher{
Name: "Time Dilation Nodule (defensive)",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to armor",
},
Effect: `For the next 28 hours, the wearer of the
armor moves in seemingly random, rapid
jumps, a few inches to one side or the
other, when attacked. This is an asset that
eases attacks by two steps (three steps if
the cypher is level 6 or higher).`,
},
Cypher{
Name: "Time Dilation Nodule (offensive)",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to a melee",
},
Effect: `For the next 28 hours, the attacker
moves at almost instantaneous speeds
when they swing the weapon, easing their
attacks by two steps (three steps if the
cypher is level 6 or higher).`,
},
Cypher{
Name: "Tracer",
Level: "1d6",
Type: []string{
"Wearable: Wristband",
"Usable: Handheld device",
},
Effect: `Fires a microscopic tracer that clings to
any surface within short range (long range
if the cypher is level 4 or higher, very long
range if the cypher is level 6 or higher). For
the next 28 hours, the launcher shows the
distance and direction to the tracer, as long
as it is in the same dimension.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill or injection that produces",
"Usable: Tube of moldable paste",
},
Effect: `Changes the appearance of one humansized
creature, providing an asset to disguise
tasks (easing them by two steps if the cypher
is level 5 or higher). The change takes ten
minutes to apply and lasts for 28 hours.`,
},
Cypher{
Name: "Visual Displacement Device",
Level: "1d6",
Type: []string{
"Wearable: Belt or bracelet",
"Usable: Handheld device",
},
Effect: `Projects holographic images of the
wearer to confuse attackers. The images
appear around the wearer. This gives the
wearer an asset to Speed defense actions
for ten minutes per cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Headband",
"Usable: Disk that must be held to forehead",
},
Effect: `Translates everything said by the
user into a language that anyone can
understand for 28 hours per cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Subdermal implant",
"Wearable: Bodysuit, belt",
"Usable: Injector",
},
Effect: `Keeps the user warm and comfortable
in the harshest cold temperatures for
28 hours. During this time, the user
has Armor equal to the cypher level that
protects against cold damage.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Allows an air breather to extract oxygen
from water for five hours per cypher level
so they can breathe underwater.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 4",
Type: []string{
"Usable: Glass panel",
},
Effect: `When held up against a solid surface,
this panel allows the user to see through
up to 2 feet (60 cm) of material. The panel
works only if the cypher’s level is higher
than the material’s level. The effect lasts for
one minute per cypher level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 3",
Type: []string{
"Usable: Bulky handheld device attached to",
},
Effect: `When activated, creatures within short
range are coated with a fine mist of metallic
nano particles that coats their clothing (but
not flesh), hardening non-flexing portions
for about an hour and granting +1 Armor. If
the use is coordinated with attackers who
make up a community’s or horde’s combat
force, the force is treated as a defensive
horde during that community action.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device",
},
Effect: `When activated, any exposed metallic
weapons within short range bead with
acid for about an hour, granting affected
weapons 1 additional point of damage
when used in combat. If the use is
coordinated with attackers who make up a
community’s or horde’s combat force, the
community or horde inflicts +1 damage
during the next community-scale combat
interaction.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Handheld device",
},
Effect: `When activated, the cypher splits
into two pieces. One piece is affixed to a
numenera structure, and the other is kept
by a character. The character can then use
the connection between the two pieces to
trigger a spatial warp any time within the
next 28 hours. The spatial warp returns
the user and all targets within immediate
range to a location next to the numenera
structure, no matter how far apart the
character and the structure were.`,
},
Cypher{
Name: "Crafter’s Eyes",
Level: "1d6 + 2",
Type: []string{
"Wearable: Thick lenses on frames worn over",
},
Effect: `Informative images formed on the inner
lenses allow the user to automatically
succeed on any one crafting subtask whose
level is equal to or less than the cypher’s
level.`,
},
Cypher{
Name: "Deception Filter",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Badge",
},
Effect: `User cannot knowingly tell a lie for one
hour.`,
},
Cypher{
Name: "Destiny Nodule",
Level: "1d6",
Type: []string{
"Usable: Crystal nodule affixed to tool",
},
Effect: `For the next 28 hours, each time the
tool the nodule is attached to is used
to repair, craft, or modify an object or
structure, the tool provides the user an
additional asset to the task.`,
},
Cypher{
Name: "Detonation (horde Suppressor)",
Level: "1d6 + 1",
Type: []string{
"Wearable: Wristband projector (long range)",
"Usable: Handheld projector (long range)",
},
Effect: `Explodes to release a burst of neurotoxic
mist affecting all creatures within long
range, hindering all attacks for about an
hour. If effectively targeted during a conflict
against a ranked horde or community,
the affected horde or community inflicts 1
point of damage less than normal during
that community action.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `For the next 28 hours, the user can
access a thin connection to the datasphere
to gain a very specific effect. When applying
Effort to a task related to a predetermined
skill, the user can apply one free level of
Effort. The skills encoded are generally
restricted to the following.
01–10 Melee attacks
11–20 Ranged attacks
21–40 Understanding numenera
41–50 Salvaging numenera
51–60 Crafting numenera
61–70 Persuasion
71–75 Healing
76–80 Speed defense
81–85 Intellect defense
86–90 Swimming
91–95 Riding
96–00 Sneaking`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Wearable: Adhesive patch that activates when",
"Usable: Injector",
},
Effect: `For the next hour, the character loses
access to the part of their mind that
registers and produces emotion. This
allows the user to experience situations
they might not otherwise be able to handle,
to fairly weigh options without emotional
shortcuts, or to tell completely convincing
lies because they are no longer generating
micro-expressions or other telling responses
due to emotional inconsistency.`,
},
Cypher{
Name: "Farspeaker",
Level: "1d6",
Type: []string{
"Usable: Handheld device",
},
Effect: `For the next ten minutes, the user can be
heard at any distant location that they can
see, as long as nothing physically blocks
the intervening space. They can also hear
sounds made in return. The area in which
the user can speak and hear is up to a
short distance across in the target location.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Useable: Metal plates",
},
Effect: `Once placed, the plates change the
direction of gravity within an area up to
a short distance in diameter. The gravitic
warp could be continuous across the
affected area, or it could bend and curve,
changing directions within the area up to
a number of times equal to the level of the
cypher. Once gravity is warped, it cannot
be changed and persists for 28 hours.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Useable: Crystal orb, nodule, disc",
},
Effect: `For an hour after activation, the cypher
emits a combination of colored light, sound,
odor, and healing nanobots that permeate an
area up to a short distance across. All positive
interaction tasks made in the area gain an asset.
Creatures who’ve made up their minds about a
topic may be open to revisiting the issue. PCs
add +1 to recovery rolls made in the area.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device",
},
Effect: `When activated, creatures within short
range each gain 2 points of health (or 2
points to Might for PCs) for about an hour.
If the use is coordinated with attackers who
make up a community’s or horde’s combat
force, the force has +2 health during that
community action.`,
},
Cypher{
Name: "Hiding Alarm Nodule",
Level: "7",
Type: []string{
"Usable: Crystal nodule affixed to installation",
},
Effect: `For the next 28 hours, each time the
installation to which the nodule is attached
is struck hard enough to inflict damage (but
not more than once per round), the structure
or installation goes out of phase for one hour.
The installation must fit in a cube up to a short
distance wide, tall, and deep. All contents of
the out-of-phase structure also go out of phase,
unless they somehow leave the structure.`,
},
Cypher{
Name: "Immobilizer",
Level: "1d6 + 2",
Type: []string{
"Usable: Handheld device",
},
Effect: `The user directs one end of the cypher
at a target within short range, which is then
subject to an attack by a projected mass of
rapidly expanding and hardening foam. The
target is held immobile in the hardened
mass of foam for one hour, after which the
mass turns to powder.`,
},
Cypher{
Name: "Infrastructure Drill",
Level: "1d6 + 1",
Type: []string{
"Usable: Large sphere with a thick screw protrusion",
},
Effect: `The user attaches this cypher to a wall
or other structure. On the following round,
it drills into the object and keeps going
beneath the surface for the next hour,
creating a series of tunnels that weaken the
integrity of the structure and connecting
structures. This decreases the level of all
commonplace structures connected to
the initial structure within long range and
inflicts 3 points of damage to a ranked
community’s infrastructure.`,
},
Cypher{
Name: "Installation Enhancer",
Level: "1d6 + 3",
Type: []string{
"Wearable: Bulky device and several metal plates",
},
Effect: `When securely attached to an
installation, it increases the level of the
entire installation up to the level of the
cypher for about one hour (no effect on
installations of a level equal to or higher
than the cypher).`,
},
Cypher{
Name: "Installation Mover",
Level: "1d6 + 2",
Type: []string{
"Wearable: Bulky device and several metal plates",
},
Effect: `When securely attached to an installation,
it allows the entire installation to be moved
to another location within very long range
(which is normally a difficult task). The
transfer requires that the user be able to push
the installation to the new location, but for
the duration of about an hour, the installation
seems almost weightless. During the transfer,
the installation does not function.`,
},
Cypher{
Name: "Instant Item",
Level: "1d6",
Type: []string{
"Wearable: Bulky amulet",
},
Effect: `This cypher has a two-stage activation.
The first stage occurs when the wearer keys
an object that they can hold in one hand to
the cypher. The object falls into a subspace
pocket and remains indefinitely or until a
user activates the cypher a second (and final)
time, immediately retrieving the stored item.`,
},
Cypher{
Name: "Instant Wall",
Level: "1d6 + 1",
Type: []string{
"Usable: Handheld device",
},
Effect: `With the addition of water and air, the
small device expands into an immobile
plane up to 10 feet (3 m) tall, 30 feet (9
m) long, and 2 feet (60 cm) thick. It is
made from a form of shapestone and is
permanent and immobile once created.`,
},
Cypher{
Name: "Instant Workshop",
Level: "1d6 + 2",
Type: []string{
"Usable: Handheld device",
},
Effect: `This small device expands into a
workbench that fixes itself in place. The
workbench provides an adjustable-height
work surface with light and a variety
of attached tools suitable for working
with commonplace objects, numenera
objects, and structural components.
The workshop enables crafting tasks
by providing the appropriate tools and
workspace, though it does not provide
shelter from the elements. The workbench
and all its components are made from a
form of shapemetal, and the workbench is
permanent and immobile once created.`,
},
Cypher{
Name: "Iotum Stabilizer",
Level: "1d6 + 1",
Type: []string{
"Usable: Bulky handheld device",
},
Effect: `When used as part of a salvaging task,
the field emitted by the cypher stabilizes
all iotum within short range so that an
additional 1d6 units of iotum are recovered.
The additional iotum must be of the same
kind that has already been discovered, and
the iotum must be of a level equal to or less
than the level of this cypher.`,
},
Cypher{
Name: "Iotum Upgrader",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device",
},
Effect: `When attached to 1 unit of iotum, that
iotum can then be used as if it were a unit
of iotum 1 level higher in a crafting task.
For example, an upgraded unit of io (level 1)
could be used as if it were a unit of
responsive synth (level 2) for the purpose
of one crafting task, or a unit of quantium
(level 5) could be used as if it were a unit of
protomatter (level 6).`,
},
Cypher{
Name: "Iron Wind Resistance",
Level: "7",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Grants the user two assets to defense
tasks made to resist the effects of an
instance of the iron wind (and attempts
to resist any ongoing effects instigated by
previous iron wind contact, if any) for the
next 28 hours.`,
},
Cypher{
Name: "Mechanical Wright",
Level: "1d6 + 3",
Type: []string{
"Usable: Complex device",
},
Effect: `Crafts one numenera object or
structure whose level can be no higher
than the mechanical wright’s level
minus 3 (minimum level 1), as long as a
numenera plan is provided to work from.
Crafting occurs over the course of the
next 28 hours, regardless of the target
device’s level. Components, including
parts and iotum, are contributed from
the mechanical wright, which becomes
completely integrated into the new
numenera device over the course of the
crafting process.`,
},
Cypher{
Name: "Object Replicator",
Level: "1d6 + 1",
Type: []string{
"Usable: Crystal nodule affixed to object that",
},
Effect: `Replicates three additional copies of
most objects whose level is equal to or less
than this cypher’s level. A unit of iotum can
be replicated, but not functioning cyphers,
artifacts, or similar devices.`,
},
Cypher{
Name: "Organ Factory",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `Over the course of 28 hours, the
user grows an external, fully viable living
organ or limb. This is a somewhat painful
process that dazes the user for the last
half of the process. The completely grown
organ or limb can be transferred to another
creature simply by placing it on their body
and waiting a few rounds. If the recipient
creature has sustained damage to the
same kind of organ or limb, the newly
grown one replaces it over the course of
about a minute (during which time the
recipient creature is stunned). Otherwise,
the duplicate organ or limb withers and
dies.
Duplicate organs must be used within one
week. All force-grown organs and limbs are
sealed within a partially living, protective,
translucent caul. The cypher can produce
one of the following, chosen by the user at
the time of the cypher’s use:
01–04 Heart
05–08 Lung
09–12 Brain
13–16 Blood
17–20 Ribs
21–25 Eye
26–30 Ear
31–35 Hand
36–45 Foot
46–55 Full arm
56–65 Full leg
66–75 Nose
76–85 Mouth
86–95 Stomach
96–00 Intestines`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 1",
Type: []string{
"Wearable: Gloves",
},
Effect: `These gloves can selectively phase
through solid objects whose level is no
higher than the cypher level in a fashion
that allows the wearer to see what they are
touching, providing an asset for anyone
attempting to repair a disabled object or
structure. Someone wearing the gloves
could also search behind walls and under
floors using the gloves. The gloves retain
their phasing ability for up to one minute per
cypher level. If used as part of a salvaging
task, the gloves grant two assets.`,
},
Cypher{
Name: "Psychic Defense Nodule",
Level: "1d6 + 2",
Type: []string{
"Usable: Crystal nodule affixed to side of head",
},
Effect: `For the next 28 hours, each time the
wearer of the nodule is affected by an
attack that attempts to sway their actions
or beliefs or that inflicts Intellect damage,
they instead go into stasis for one round
and remain unaffected. While in stasis,
they also lose their next turn. Attacks
against a wearer in stasis are made as if
the wearer were protected by a force field
whose level is equal to the cypher’s level.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device attached to",
},
Effect: `When activated, creatures within
short range are coated with a fine mist of
insinuating nano particles that enter their
blood and stimulate their behavior for
about an hour, making them more violent
but less able to feel pain. This grants them
+1 Armor but an inability to use Effort
from their Intellect Pool. If the use is
coordinated with attackers who make up
a community’s or horde’s combat force,
the force is treated as a marauding horde
during that community action.`,
},
Cypher{
Name: "Retriever",
Level: "1d6 + 3",
Type: []string{
"Wearable: Glove of synth and small nodule",
"Usable: Small handheld device and small",
},
Effect: `These cyphers always come in at
least two parts: a nodule and some kind
of retrieving device. The nodule can be
attached to an item your size or smaller.
When the cypher is activated, that item is
teleported to the device.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 1",
Type: []string{
"Usable: Small metallic disc",
},
Effect: `When applied to a creature’s head, the
disk immediately unleashes microfilaments
that enter the brain. Within five minutes,
the creature is trained in tasks related to
salvaging numenera. If the creature is
already trained or specialized in salvaging
numenera, this graft has no effect. Once
the graft attaches, the effect is permanent,
and this device no longer counts against
the number of cyphers that a PC can bear,
but the disc remains.`,
},
Cypher{
Name: "Shapemetal",
Level: "6",
Type: []string{
"Usable: Canister containing a silvery,",
},
Effect: `This metallic clay can be shaped and
greatly expanded to create individual tools
or objects such as a hammer, a ladder, or
a basic structure such as a wall, bench,
floor, staircase, and so on, as long as the
total volume created could fit in a 10-foot
(3 m) cube. Once formed, the structure or
objects are permanent.`,
},
Cypher{
Name: "Slash-Retardant Spray",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device attached to",
},
Effect: `For the next 28 hours, walls and
structures sprayed with these repairing
nanites are treated as if 1 level higher. If
an hour is spent spraying down the outer
walls and defense structures of a ranked
community, that community gains +1
Armor during any conflict that happens
over the next 28 hours.`,
},
Cypher{
Name: "Stealth Thrower",
Level: "1d6 + 2",
Type: []string{
"Usable: Bulky handheld device attached to",
},
Effect: `When activated, creatures within
short range are coated with a fine mist
of dull nano particles that render them
more difficult to pick out from their
surroundings for about an hour, granting
them an asset to stealth tasks. If the use is
coordinated with attackers who make up a
community’s or horde’s combat force, the
force is treated as a stealthy horde during
that community action.`,
},
Cypher{
Name: "Summoning Alarm Nodule",
Level: "6",
Type: []string{
"Usable: Crystal nodule affixed to installation",
},
Effect: `For the next 28 hours, each time the
structure or installation to which the nodule
is attached is struck hard enough to inflict
damage (but not more than once per round),
whoever attached the nodule is teleported
from any location within very long range to a
location standing next to the installation.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `For the next number of days equal to
the cypher level, the wearer feels no ill
effects from not eating or from overeating.
They also gain an asset to any defense
tasks to withstand poison.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Wearable: Single boot with attached device",
},
Effect: `For the next 28 hours, each time the boot
touches down on solid ground, it deposits
a patch of nanites that mark the area with
a symbol. At any time before the duration
expires, the wearer (and up to one other
person that can be carried along) can use
an action to instantly teleport back to any
of these symbols. The teleportation effect
brings the cypher’s duration to an end.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6",
Type: []string{
"Internal: Pill, ingestible liquid",
"Usable: Injector",
},
Effect: `The user can see out-of-phase, invisible,
and transdimensional creatures, objects, and
sources of energy within long range for 28
hours. If ingested by a creature who attempts
to salvage a particular kind of iotum during
a salvage task, the effects of this cypher
grant the user one free level of Effort if they
first use a level of Effort on that task.`,
},
Cypher{
Name: "<NAME>",
Level: "1d6 + 4",
Type: []string{
"Useable: Handheld device",
},
Effect: `The user and any additional targets that
can fit into a space an immediate distance
in diameter are encapsulated in a spherical
force field for up to ten hours or until the
user collapses it. The sphere hovers at all
times and moves as the user wishes vertically
or horizontally up to a short distance each
round. If the sphere is destroyed or collapsed,
whatever it contains falls to the ground.`,
},
} | types/NumeneraCyphers.go | 0.5083 | 0.543045 | NumeneraCyphers.go | starcoder |
// Package types contains most of the data structures available to/from Noms.
package types
import (
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/hash"
)
// Type defines and describes Noms types, both built-in and user-defined.
// Desc provides the composition of the type. It may contain only a types.NomsKind, in the case of
// primitives, or it may contain additional information -- e.g. element Types for compound type
// specializations, field descriptions for structs, etc. Either way, checking Kind() allows code
// to understand how to interpret the rest of the data.
// If Kind() refers to a primitive, then Desc has no more info.
// If Kind() refers to List, Map, Ref, Set, or Union, then Desc is a list of Types describing the element type(s).
// If Kind() refers to Struct, then Desc contains a []field.
type Type struct {
Desc TypeDesc
h *hash.Hash
oid *hash.Hash
id uint32
serialization []byte
}
const initialTypeBufferSize = 128
func newType(desc TypeDesc, id uint32) *Type {
t := &Type{desc, &hash.Hash{}, &hash.Hash{}, id, nil}
if !t.HasUnresolvedCycle() {
serializeType(t)
}
return t
}
func serializeType(t *Type) {
w := &binaryNomsWriter{make([]byte, initialTypeBufferSize), 0}
enc := newValueEncoder(w, nil)
enc.writeType(t, nil)
t.serialization = w.data()
}
// Describe generate text that should parse into the struct being described.
func (t *Type) Describe() (out string) {
return EncodedValue(t)
}
func (t *Type) Kind() NomsKind {
return t.Desc.Kind()
}
func (t *Type) hasUnresolvedCycle(visited []*Type) bool {
_, found := indexOfType(t, visited)
if found {
return false
}
return t.Desc.HasUnresolvedCycle(append(visited, t))
}
func (t *Type) HasUnresolvedCycle() bool {
return t.hasUnresolvedCycle(nil)
}
// Value interface
func (t *Type) Equals(other Value) (res bool) {
return t == other || t.Hash() == other.Hash()
}
func (t *Type) Less(other Value) (res bool) {
return valueLess(t, other)
}
func (t *Type) Hash() hash.Hash {
if t.h.IsEmpty() {
*t.h = getHash(t)
}
return *t.h
}
func (t *Type) WalkValues(cb ValueCallback) {
switch desc := t.Desc.(type) {
case CompoundDesc:
for _, t := range desc.ElemTypes {
cb(t)
}
case StructDesc:
desc.IterFields(func(name string, t *Type) {
cb(t)
})
case PrimitiveDesc:
// Nothing, these have no child values
default:
d.Chk.Fail("Unexpected type desc implementation: %#v", t)
}
return
}
func (t *Type) WalkRefs(cb RefCallback) {
return
}
func (t *Type) Type() *Type {
return TypeType
}
func MakePrimitiveType(k NomsKind) *Type {
switch k {
case BoolKind:
return BoolType
case NumberKind:
return NumberType
case StringKind:
return StringType
case BlobKind:
return BlobType
case ValueKind:
return ValueType
case TypeKind:
return TypeType
}
d.Chk.Fail("invalid NomsKind: %d", k)
return nil
}
func MakePrimitiveTypeByString(p string) *Type {
switch p {
case "Bool":
return BoolType
case "Number":
return NumberType
case "String":
return StringType
case "Blob":
return BlobType
case "Value":
return ValueType
case "Type":
return TypeType
}
d.Chk.Fail("invalid type string: %s", p)
return nil
} | go/types/type.go | 0.724091 | 0.591399 | type.go | starcoder |
package dimlayer
import (
"errors"
"fmt"
)
type DimLayer struct {
w int32
p, d, s int32
}
func CreateDimLayer(w, p, d, s int32) DimLayer {
return DimLayer{
w: w,
p: p,
d: d,
s: s,
}
}
func CopyDimsLayer(src []DimLayer) (copy []DimLayer) {
copy = make([]DimLayer, len(src))
for i := range src {
copy[i] = CreateDimLayer(src[i].Get())
}
return copy
}
//Set sets the convolution values
func (l *DimLayer) Set(p, d, s int32) {
l.p, l.d, l.s = p, d, s
}
//Get returns the prevously set values
func (l *DimLayer) Get() (w, p, d, s int32) {
w, p, d, s = l.w, l.p, l.d, l.s
return w, p, d, s
}
//Out returns the output from the input considering the previously the previously set values
func (l *DimLayer) Out(x int32) (y int32) {
return findoutputdim(x, l.w, l.s, l.p, l.d)
}
//ReverseOut finds the output for the reverse convolution.
func (l *DimLayer) ReverseOut(y int32) (x int32) {
return findreverseoutputdim(y, l.w, l.s, l.p, l.d)
}
func findoutputdim(x, w, s, p, d int32) int32 {
y := x + (2 * p) - (((w - 1) * d) + 1)
if y < 0 {
return -1
}
return divideup(y, s) + 1
}
//MaxOutput will change the values of layers to achieve the max output for all the layers
//It will also return the value of the final layer
func MaxOutput(input int32, layers []DimLayer) (output int32) {
output = input
for i := range layers {
p := layers[i].w - 1
layers[i].Set(p, 1, 1)
output = layers[i].Out(output)
}
return output
}
/*
func withinboundsy(index, y, startx, endy int32) bool {
if y == (startx-endy-1)/(index+1) {
return true
}
return false
}
*/
func withinboundsx(numoflayers, index, x, startx, endy int32) bool {
boundx := ((startx) / (numoflayers)) * (index + 1)
if x >= boundx {
if index != 0 {
if x <= boundx+2 {
return true
}
} else {
if boundx <= startx {
return true
}
}
return false
}
return false
}
func Backwardspdv2(xgoal, ygoal int32, dlayer []*Ofhlpr) (int32, error) {
output := []int32{ygoal}
var err error
for i := len(dlayer) - 1; i >= 0; i-- {
output, err = dlayer[i].backwardmultinputs(output)
for i := range output {
fmt.Println(output[i])
}
if err != nil {
return -1, err
}
}
for i := range output {
fmt.Println(output[i])
if output[i] == xgoal {
return xgoal, nil
}
}
return -1, errors.New("backwardspdv2 Didn't work")
}
func Makeoutputfinderhelper(index, totaldimlayers, globalxgoal, globalygoal, mins, mind, minp, maxs, maxd, maxp int32, layer *DimLayer) *Ofhlpr {
fwd := indexes{s: maxs, d: maxd, p: maxp}
bwd := indexes{s: mins, d: mind, p: minp}
min := indexes{s: mins, d: mind, p: minp}
max := indexes{s: maxs, d: maxd, p: maxp}
return &Ofhlpr{
index: index,
totaldimlayers: totaldimlayers,
gbyg: globalygoal,
gbxg: globalxgoal,
max: max,
min: min,
layer: layer,
fwd: fwd,
bwd: bwd,
}
}
func (h *Ofhlpr) backwardmultinputs(yinputs []int32) (outputs []int32, err error) {
for i := range yinputs {
h.backwardspd(yinputs[i])
}
outputarray := h.getreverseoutputsvals()
if len(outputarray) < 1 || outputarray == nil {
return outputarray, fmt.Errorf("nothing found")
}
return outputarray, nil
}
func (h *Ofhlpr) backwardspd(yinput int32) (output []int32, err error) {
for ; h.bwd.s <= h.max.s; h.bwd.s++ {
for ; h.bwd.d <= h.max.d; h.bwd.d++ {
for ; h.bwd.p <= h.max.p; h.bwd.p++ {
output := findreverseoutputdim(yinput, h.layer.w, h.bwd.s, h.bwd.p, h.bwd.d)
if withinboundsx(h.totaldimlayers, h.index, output, h.gbxg, h.gbyg) {
fmt.Println(output)
h.append(yinput, output, h.bwd.s, h.bwd.d, h.bwd.d)
}
}
}
}
outputarray := h.getreverseoutputsvals()
if len(outputarray) < 1 || outputarray == nil {
return outputarray, fmt.Errorf("nothing found")
}
return outputarray, nil
}
func (h *Ofhlpr) getreverseoutputsvals() []int32 {
outputs := make([]int32, len(h.outputs))
for i := range h.outputs {
outputs[i] = h.outputs[i].output
}
return outputs
}
type Ofhlpr struct {
gbxg, gbyg, index, totaldimlayers int32
max indexes
min indexes
bwd indexes
fwd indexes
layer *DimLayer
outputs []routputs
}
func compairpdswithindex(a indexes, s, d, p int32) bool {
if a.s == s && a.p == p && a.d == d {
return true
}
return false
}
func compairindexes(a, b indexes) bool {
if a.s == b.s && a.p == b.p && a.d == b.d {
return true
}
return false
}
func makeroutput(output, input, s, d, p int32) routputs {
inputs := make([]rinput, 1)
inputs[0] = makeinput(input, s, d, p)
return routputs{
output: output,
inputs: inputs,
}
}
func makeinput(input, s, d, p int32) rinput {
combo := make([]indexes, 1)
combo[0] = indexes{s: s, d: d, p: p}
return rinput{
input: input,
combos: combo,
}
}
type rinput struct {
input int32
combos []indexes
}
func (r *routputs) append(input, s, d, p int32) {
var hasinput bool
if len(r.inputs) < 1 || r.inputs == nil {
r.inputs = make([]rinput, 0)
}
for i := range r.inputs {
if r.inputs[i].input == input {
hasinput = true
r.inputs[i].append(s, d, p)
break
}
}
if !hasinput {
r.inputs = append(r.inputs, makeinput(input, s, d, p))
}
}
func (r *rinput) append(s, d, p int32) {
var hascombo bool
if len(r.combos) < 1 || r.combos == nil {
r.combos = make([]indexes, 0)
hascombo = false
}
for i := range r.combos {
if r.combos[i].s == s && r.combos[i].d == d && r.combos[i].p == p {
hascombo = true
break
}
}
if !hascombo {
r.combos = append(r.combos, indexes{s: s, d: d, p: p})
}
}
func (h *Ofhlpr) append(input, output, s, d, p int32) {
var hasoutput bool
if h.outputs == nil || len(h.outputs) < 1 {
h.outputs = make([]routputs, 0)
}
for i := range h.outputs {
if h.outputs[i].output == output {
hasoutput = true
h.outputs[i].append(input, s, d, p)
break
}
}
if !hasoutput {
h.outputs = append(h.outputs, makeroutput(output, input, s, d, p))
}
}
type routputs struct {
output int32
inputs []rinput
}
type indexes struct {
s, d, p int32
}
func divideup(num, den int32) int32 {
if num%den != 0 {
return (num / den) + 1
}
return num / den
}
func findreverseoutputdim(y, w, s, p, d int32) int32 {
// output = 1+ ((input + (2*padding) - (((filter-1)*dilation)+1))/slide)
// *now input<==>output
//input= 1+ ((output + (2*padding) - (((filter-1)*dilation)+1))/slide)
// (input-1)*slide = (output +2*padding)-(((filter-1)*dilation)+1)
//output= 2*padding-(((filter-1)*dilation)+1)-(input-1)*slide
// input = 1 + (output + (2*padding) - (((filter-1)*dilation)+1))/slide
// slide *(input-1) = output + (2*padding) - (((filter-1)*dilation)+1)
// output = (slide *(input-1)) - (2*padding) + (((filter-1)*dilation)+1)
return (s * (y - 1)) - (2 * p) + (((w - 1) * d) + 1)
} | utils/outputdim/dimlayer/dimlayer.go | 0.754825 | 0.428233 | dimlayer.go | starcoder |
package mathx
// dotProduct calculates the algebraic dot product of two slices. This is just
// the sum of the products of corresponding elements in the slices. We use
// this when we multiply matrices together.
func dotProduct(a, b []float64) float64 {
var total float64
for i := 0; i < len(a); i++ {
total += a[i] * b[i]
}
return total
}
type Matrix struct {
DataArray
}
// New creates an r x c sized matrix that is filled with the provided data.
// The matrix data is represented as one long slice.
func NewMatrix(r, c int, data []float64) *Matrix {
d := Zeros(r, c)
d.Init(r, c, data)
return &Matrix{*d}
}
// Column returns a slice that represents a column from the matrix.
// This works by examining each row, and adding the nth element of
// each to the column slice.
func (p *Matrix) Column(n int) []float64 {
col := make([]float64, p.rows)
for i := 1; i <= p.rows; i++ {
col[i-1] = p.Row(i)[n-1]
}
return col
}
// Row returns a slice that represents a row from the matrix.
func (p *Matrix) Row(n int) []float64 {
return p.data[p.findIndex(n, 1):p.findIndex(n, p.columns+1)]
}
func (p *Matrix) Multiply(B *Matrix) *Matrix {
C := Zeros(p.rows, B.columns)
for r := 1; r <= C.rows; r++ {
A_row := p.Row(r)
for c := 1; c <= C.columns; c++ {
B_col := B.Column(c)
C.Set2(r, c, dotProduct(A_row, B_col))
}
}
return &Matrix{*C}
}
// Add adds two matrices together and returns the resulting matrix. To do
// this, we just add together the corresponding elements from each matrix.
func (p *Matrix) Add(B *Matrix) *Matrix {
C := Zeros(p.rows, p.columns)
for r := 1; r <= p.rows; r++ {
for c := 1; c <= p.columns; c++ {
C.Set2(r, c, p.Get2(r, c)+B.Get2(r, c))
}
}
return &Matrix{*C}
}
func (p *Matrix) Times(k float64) *Matrix {
C := Zeros(p.rows, p.columns)
for r := 1; r <= p.rows; r++ {
for c := 1; c <= p.columns; c++ {
C.Set2(r, c, p.Get2(r, c)*k)
}
}
return &Matrix{*C}
} | mathx/matrix.go | 0.810366 | 0.579757 | matrix.go | starcoder |
package abstract
import (
"vitess.io/vitess/go/vt/sqlparser"
"vitess.io/vitess/go/vt/vtgate/semantics"
)
type (
/*QueryGraph represents the FROM and WHERE parts of a query.
It is an intermediate representation of the query that makes it easier for the planner
to find all possible join combinations. Instead of storing the query information in a form that is close
to the syntax (AST), we extract the interesting parts into a graph form with the nodes being tables in the FROM
clause and the edges between them being predicates. We keep predicates in a hash map keyed by the dependencies of
the predicate. This makes it very fast to look up connections between tables in the query.
*/
QueryGraph struct {
// the Tables, including predicates that only depend on this particular table
Tables []*QueryTable
// innerJoins contains the predicates that need multiple Tables
innerJoins []*innerJoin
// NoDeps contains the predicates that can be evaluated anywhere.
NoDeps sqlparser.Expr
}
innerJoin struct {
deps semantics.TableSet
exprs []sqlparser.Expr
}
// QueryTable is a single FROM table, including all predicates particular to this table
QueryTable struct {
TableID semantics.TableSet
Alias *sqlparser.AliasedTableExpr
Table sqlparser.TableName
Predicates []sqlparser.Expr
IsInfSchema bool
}
)
var _ Operator = (*QueryGraph)(nil)
// PushPredicate implements the Operator interface
func (qg *QueryGraph) PushPredicate(expr sqlparser.Expr, semTable *semantics.SemTable) error {
for _, e := range sqlparser.SplitAndExpression(nil, expr) {
err := qg.collectPredicate(e, semTable)
if err != nil {
return err
}
}
return nil
}
// TableID implements the Operator interface
func (qg *QueryGraph) TableID() semantics.TableSet {
var ts semantics.TableSet
for _, table := range qg.Tables {
ts = ts.Merge(table.TableID)
}
return ts
}
// GetPredicates returns the predicates that are applicable for the two given TableSets
func (qg *QueryGraph) GetPredicates(lhs, rhs semantics.TableSet) []sqlparser.Expr {
var allExprs []sqlparser.Expr
for _, join := range qg.innerJoins {
tableSet, exprs := join.deps, join.exprs
if tableSet.IsSolvedBy(lhs.Merge(rhs)) &&
tableSet.IsOverlapping(rhs) &&
tableSet.IsOverlapping(lhs) {
allExprs = append(allExprs, exprs...)
}
}
return allExprs
}
func newQueryGraph() *QueryGraph {
return &QueryGraph{}
}
func (qg *QueryGraph) collectPredicates(sel *sqlparser.Select, semTable *semantics.SemTable) error {
predicates := sqlparser.SplitAndExpression(nil, sel.Where.Expr)
for _, predicate := range predicates {
err := qg.collectPredicate(predicate, semTable)
if err != nil {
return err
}
}
return nil
}
func (qg *QueryGraph) getPredicateByDeps(ts semantics.TableSet) ([]sqlparser.Expr, bool) {
for _, join := range qg.innerJoins {
if join.deps == ts {
return join.exprs, true
}
}
return nil, false
}
func (qg *QueryGraph) addJoinPredicates(ts semantics.TableSet, expr sqlparser.Expr) {
for _, join := range qg.innerJoins {
if join.deps == ts {
join.exprs = append(join.exprs, expr)
return
}
}
qg.innerJoins = append(qg.innerJoins, &innerJoin{
deps: ts,
exprs: []sqlparser.Expr{expr},
})
}
func (qg *QueryGraph) collectPredicate(predicate sqlparser.Expr, semTable *semantics.SemTable) error {
deps := semTable.RecursiveDeps(predicate)
switch deps.NumberOfTables() {
case 0:
qg.addNoDepsPredicate(predicate)
case 1:
found := qg.addToSingleTable(deps, predicate)
if !found {
// this could be a predicate that only has dependencies from outside this QG
qg.addJoinPredicates(deps, predicate)
}
default:
qg.addJoinPredicates(deps, predicate)
}
return nil
}
func (qg *QueryGraph) addToSingleTable(table semantics.TableSet, predicate sqlparser.Expr) bool {
for _, t := range qg.Tables {
if table == t.TableID {
t.Predicates = append(t.Predicates, predicate)
return true
}
}
return false
}
func (qg *QueryGraph) addNoDepsPredicate(predicate sqlparser.Expr) {
if qg.NoDeps == nil {
qg.NoDeps = predicate
} else {
qg.NoDeps = &sqlparser.AndExpr{
Left: qg.NoDeps,
Right: predicate,
}
}
}
// UnsolvedPredicates implements the Operator interface
func (qg *QueryGraph) UnsolvedPredicates(_ *semantics.SemTable) []sqlparser.Expr {
var result []sqlparser.Expr
for _, join := range qg.innerJoins {
set, exprs := join.deps, join.exprs
if !set.IsSolvedBy(qg.TableID()) {
result = append(result, exprs...)
}
}
return result
}
// CheckValid implements the Operator interface
func (qg *QueryGraph) CheckValid() error {
return nil
}
// Compact implements the Operator interface
func (qg *QueryGraph) Compact(*semantics.SemTable) (Operator, error) {
return qg, nil
} | go/vt/vtgate/planbuilder/abstract/querygraph.go | 0.646906 | 0.430866 | querygraph.go | starcoder |
package main
import (
"context"
"fmt"
"github.com/jamespfennell/hoard"
"github.com/jamespfennell/hoard/config"
"github.com/jamespfennell/hoard/internal/util"
"github.com/urfave/cli/v2"
"os"
"time"
)
const configFile = "config-file"
const endHour = "end-hour"
const enforceCompression = "enforce-compression"
const feed = "feed"
const flattenFeeds = "flatten-feeds"
const flattenHours = "flatten-hours"
const fix = "fix"
const keepPacked = "keep-packed"
const logLevel = "log-level"
const sync = "sync"
const port = "port"
const removeWorkspace = "remove-workspace"
const startHour = "start-hour"
const descriptionMain = `
Hoard is an application for collecting data feeds over time.
The central component of Hoard is the collector process, which is run using the
collector command. This process collects data from the configured feeds and stores
the results in remote object storage. The data can then be retrieved on any
computer using the retrieve command.
Hoard runs with a configuration file that specifies the feeds to collect, the object
storage locations in which to store data, and some other settings. Use the config
command to see an example config file.
Website: https://github.com/jamespfennell/hoard
`
const descriptionCollector = `
The Hoard collector is a process that generally runs all the time, collecting data
from the configured feeds and periodically uploading data to the configured remote
object storage locations.
The collector can (and generally should) be run simultaneously on multiple machines.
This will enable the collection process to continue even if one machine becomes
unavailable (for example, if the machine is being rebooted to apply OS updates).
The collector process launches an HTTP server that exports Prometheus metrics.
`
const descriptionPack = `
The pack action takes all downloaded files and bundles them into compressed archive
files.
`
const descriptionMerge = `
The merge action finds compressed archive files for the same hour, and merges them
into a single new archive file.
`
const descriptionUpload = `
The upload action finds compressed archive files in the local workspace and transfers
them to remote object storage. The local files will be deleted afterwards. This action
automatically merges multiple archives for the same hour if such archives exist in
remote object storage.
`
const descriptionVacate = `
Vacate is mainly used when a machine running Hoard is being decommissioned. It
transfers all local data (downloaded files and archive files) to remote object storage.
This action is equivalent to running pack, merge, and upload.
`
const descriptionAudit = `
Auditing looks for problems in the data in remote object storage and optionally fixes
them. Currently, an audit looks for the following problems:
* Hours that have multiple archive files for the same feed. This problem generally results
in unnecessary duplicate data being store remotely. Fixing this involves merging the
archives together.
* Archive files that are present in one object storage but not in another. Fixing this
will transfer files between remote storage.
* (Optional) Archive files that have the wrong compression settings. This problem is
ignored by default because fixing it involves recompressing the archive files which
can be extremely memory and CPU expensive. Use the flag --enforce-compression to
check for this problem.
`
func main() {
app := &cli.App{
Name: "Hoard",
Usage: "distributed data feed collection",
Description: descriptionMain,
Flags: []cli.Flag{
&cli.StringFlag{
Name: configFile,
Usage: "path to the Hoard config file",
Value: "hoard.yml",
DefaultText: "hoard.yml",
},
&cli.StringFlag{
Name: logLevel,
Usage: "the level to log at (debug, info or error)",
DefaultText: "info",
},
&cli.BoolFlag{
Name: sync,
Usage: "don't run feed operations concurrently",
DefaultText: "false",
},
&cli.StringSliceFlag{
Name: feed,
Aliases: nil,
Usage: "if set, work will only be done for feeds with the specified IDs",
},
},
Commands: []*cli.Command{
{
Name: "config",
Usage: "print an example Hoard configuration file",
Action: func(*cli.Context) error {
fmt.Print(config.SampleConfig)
return nil
},
},
{
Name: "verify",
Usage: "verify the provided Hoard config is valid",
Action: newAction(func(c *config.Config) error {
fmt.Println("Provided config is valid!")
return nil
}),
},
{
Name: "collector",
Usage: "run the Hoard collector",
Description: descriptionCollector,
Action: newAction(func(c *config.Config) error {
return hoard.RunCollector(util.WithSystemInterrupt(context.Background()), c)
}),
Flags: []cli.Flag{
&cli.IntFlag{
Name: port,
Usage: "port the collection HTTP server will listen on",
DefaultText: "read from config file",
},
},
},
{
Name: "retrieve",
Usage: "retrieve data from remote object storage",
ArgsUsage: "path",
Action: func(c *cli.Context) error {
cfg, err := configFromCliContext(c)
if err != nil {
fmt.Println(err)
return err
}
if c.Args().Len() != 1 {
return fmt.Errorf("expected exactly 1 argument (the path to retrieve to); recieved %d", c.Args().Len())
}
return hoard.Retrieve(cfg, hoard.RetrieveOptions{
Path: c.Args().First(),
KeepPacked: c.Bool(keepPacked),
FlattenTimeDirs: c.Bool(flattenHours),
FlattenFeedDirs: c.Bool(flattenFeeds),
Start: *c.Timestamp(startHour),
End: *c.Timestamp(endHour),
})
},
Description: "",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: keepPacked,
Usage: "don't unpack archives after retrieving",
Value: false,
},
&cli.BoolFlag{
Name: flattenFeeds,
Usage: "place files from different feeds in the same directories",
Value: false,
},
&cli.BoolFlag{
Name: flattenHours,
Usage: "place files from different hours in the same directories",
Value: false,
},
&cli.TimestampFlag{
Name: startHour,
Usage: "the first hour to retrieve in the form YYYY-MM-DD-HH",
DefaultText: "24 hours ago",
Value: cli.NewTimestamp(time.Now().UTC().Add(-24 * time.Hour)),
Layout: "2006-01-02-15",
},
&cli.TimestampFlag{
Name: endHour,
Usage: "the last hour to retrieve in the form YYYY-MM-DD-HH",
Value: cli.NewTimestamp(time.Now().UTC()),
DefaultText: "current time",
Layout: "2006-01-02-15",
},
},
},
{
Name: "download",
Usage: "run one download cycle for each feed",
Action: newAction(hoard.Download),
},
{
Name: "pack",
Usage: "run one pack cycle for each feed",
Description: descriptionPack,
Action: newAction(hoard.Pack),
},
{
Name: "merge",
Usage: "run one merge cycle for each feed",
Description: descriptionMerge,
Action: newAction(hoard.Merge),
},
{
Name: "upload",
Usage: "run one upload cycle for each feed",
Description: descriptionUpload,
Action: newAction(hoard.Upload),
},
{
Name: "vacate",
Usage: "move all local files from disk to remote object storage",
Description: descriptionVacate,
Action: func(c *cli.Context) error {
cfg, err := configFromCliContext(c)
if err != nil {
fmt.Println(err)
return err
}
return hoard.Vacate(cfg, c.Bool(removeWorkspace))
},
Flags: []cli.Flag{
&cli.BoolFlag{
Name: removeWorkspace,
Usage: "remove workspace after vacating files",
Value: false,
DefaultText: "false",
},
},
},
{
Name: "audit",
Usage: "perform an audit of the data stored remotely",
Description: descriptionAudit,
Action: func(c *cli.Context) error {
cfg, err := configFromCliContext(c)
if err != nil {
fmt.Println(err)
return err
}
_ = cfg
return hoard.Audit(
cfg, c.Timestamp(startHour), *c.Timestamp(endHour), c.Bool(enforceCompression), c.Bool(fix))
},
Flags: []cli.Flag{
&cli.BoolFlag{
Name: enforceCompression,
Usage: "fix remote archives that have the wrong compression format",
Value: false,
DefaultText: "false",
},
&cli.BoolFlag{
Name: fix,
Usage: "fix problems found in the audit",
Value: false,
DefaultText: "false",
},
&cli.TimestampFlag{
Name: startHour,
Usage: "the first hour in the audit",
DefaultText: "no lower bound on the hours audited",
Layout: "2006-01-02-15",
},
&cli.TimestampFlag{
Name: endHour,
Usage: "the last hour in the audit",
Value: cli.NewTimestamp(time.Now().UTC()),
DefaultText: "current time",
Layout: "2006-01-02-15",
},
},
},
},
}
if err := app.Run(os.Args); err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
}
func configFromCliContext(c *cli.Context) (*config.Config, error) {
b, err := os.ReadFile(c.String(configFile))
if err != nil {
return nil, fmt.Errorf("failed to read the Hoard config file: %w", err)
}
cfg, err := config.NewConfig(b)
if err != nil {
return nil, err
}
if c.IsSet(port) {
cfg.Port = c.Int(port)
}
if c.IsSet(logLevel) {
cfg.LogLevel = c.String(logLevel)
}
if c.IsSet(sync) {
cfg.Sync = c.Bool(sync)
}
if c.IsSet(feed) {
feedIDs := c.StringSlice(feed)
var feedsToKeep []config.Feed
for _, feedID := range feedIDs {
for _, feed := range cfg.Feeds {
if feed.ID == feedID {
feedsToKeep = append(feedsToKeep, feed)
}
}
}
cfg.Feeds = feedsToKeep
}
return cfg, nil
}
func newAction(f func(*config.Config) error) cli.ActionFunc {
return func(c *cli.Context) error {
cfg, err := configFromCliContext(c)
if err != nil {
return err
}
return f(cfg)
}
} | cmd/hoard.go | 0.592313 | 0.413004 | hoard.go | starcoder |
package rr
/*OW-SPEC
Simhyd:
inputs:
rainfall: mm
pet: mm
states:
SoilMoistureStore:
Groundwater:
TotalStore:
parameters:
baseflowCoefficient: ''
imperviousThreshold: ''
infiltrationCoefficient: ''
infiltrationShape: ''
interflowCoefficient: ''
perviousFraction: ''
rainfallInterceptionStoreCapacity: ''
rechargeCoefficient: ''
soilMoistureStoreCapacity: ''
outputs:
runoff: mm
quickflow: mm
baseflow: mm
store: mm
implementation:
function: simhyd
type: scalar
lang: go
outputs: params
init:
zero: true
lang: go
tags:
rainfall runoff
*/
import (
"math"
"github.com/flowmatters/openwater-core/data"
)
const SOIL_ET_CONST = 10.0
func simhyd(rainfall data.ND1Float64, pet data.ND1Float64,
initialStore float64, initialGW float64, initialTotalStore float64,
baseflowCoefficient float64, imperviousThreshold float64, infiltrationCoefficient float64,
infiltrationShape float64, interflowCoefficient float64, perviousFraction float64,
risc float64, rechargeCoefficient float64, smsc float64,
runoff, quickflow, baseflow, store data.ND1Float64) (
float64, // final store
float64, // final GW
float64) { // final total store
nDays := rainfall.Len1()
soilMoistureStore := initialStore
gw := initialGW
totalStore := initialTotalStore
idx := []int{0}
for i := 0; i < nDays; i++ {
idx[0] = i
rainToday := rainfall.Get(idx)
petToday := pet.Get(idx)
perviousIncident := rainToday
imperviousIncident := rainToday
imperviousEt := math.Min(imperviousThreshold, imperviousIncident)
imperviousRunoff := imperviousIncident - imperviousEt
interceptionEt := math.Min(perviousIncident, math.Min(petToday, risc))
throughfall := perviousIncident - interceptionEt
soilMoistureFraction := soilMoistureStore / smsc
infiltrationCapacity := infiltrationCoefficient * math.Exp(-infiltrationShape*soilMoistureFraction)
infiltration := math.Min(throughfall, infiltrationCapacity)
infiltrationXsRunoff := throughfall - infiltration
interflowRunoff := interflowCoefficient * soilMoistureFraction * infiltration
infiltrationAfterInterflow := infiltration - interflowRunoff
recharge := rechargeCoefficient * soilMoistureFraction * infiltrationAfterInterflow
soilInput := infiltrationAfterInterflow - recharge
soilMoistureStore += soilInput
soilMoistureFraction = soilMoistureStore / smsc
gw += recharge
if soilMoistureFraction > 1 {
gw += soilMoistureStore - smsc
soilMoistureStore = smsc
soilMoistureFraction = 1
}
baseflowRunoff := baseflowCoefficient * gw
gw -= baseflowRunoff
soilEt := math.Min(soilMoistureStore, math.Min(petToday-interceptionEt, soilMoistureFraction*SOIL_ET_CONST))
soilMoistureStore -= soilEt
totalStore = (soilMoistureStore + gw) * perviousFraction
//totalEt := (1 - perviousFraction) * imperviousEt + perviousFraction * (interceptionEt + soilEt);
eventRunoff := (1-perviousFraction)*imperviousRunoff +
perviousFraction*(infiltrationXsRunoff+interflowRunoff)
totalRunoff := eventRunoff + perviousFraction*baseflowRunoff
//effectiveRainfall := rainToday - totalEt;
store.Set(idx, soilMoistureStore)
baseflow.Set(idx, baseflowRunoff*perviousFraction)
runoff.Set(idx, totalRunoff)
quickflow.Set(idx, eventRunoff)
}
return soilMoistureStore, gw, totalStore
} | models/rr/simhyd.go | 0.574037 | 0.434341 | simhyd.go | starcoder |
package nav
import (
"fmt"
"math"
astar "github.com/beefsack/go-astar"
)
type KindType int32
// Kind* constants refer to tile kinds for input and output.
const (
KindTypePlain KindType = iota
KindTypeBlocker
KindTypeMoveItem
)
var (
kindTypeMap = map[KindType]string{
KindTypePlain: "地面",
KindTypeBlocker: "阻挡物",
KindTypeMoveItem: "攻击物",
}
)
func (kt KindType) String() string {
return kindTypeMap[kt]
}
// KindCosts map tile kinds to movement costs.
var kindCosts = map[KindType]float64{
KindTypePlain: 1.0,
KindTypeBlocker: 2.0,
KindTypeMoveItem: 2.0,
}
func (kt KindType) Cost() float64 {
return kindCosts[kt]
}
// A Tile is a tile in a grid which implements Pather.
type Tile struct {
// Kind is the kind of tile, potentially affecting movement.
kindType KindType
// X and Y are the coordinates of the tile.
x, z int
// W is a reference to the World that the tile is a part of.
w *World
}
func (t *Tile) IsMask() bool {
return t.kindType == KindTypePlain
}
func (t *Tile) String() string {
return fmt.Sprintf("tile:type:%s,x:%d,z%d", t.kindType.String(), t.x, t.z)
}
func (t *Tile) GetXZ() (x int, z int) {
return t.x, t.z
}
func NewTile(w *World, kt KindType, x int, z int) *Tile {
t := &Tile{
w: w,
kindType: kt,
x: x,
z: z,
}
return t
}
// func (t *Tile) Stand() {
// if t.kindType == KindTypePlain {
// t.kindType = KindTypeMoveItem
// }
// }
// func (t *Tile) Move() {
// if t.kindType == KindTypeMoveItem {
// t.kindType = KindTypePlain
// }
// }
// PathNeighbors returns the neighbors of the tile, excluding blockers and
// tiles off the edge of the board.
func (t *Tile) PathNeighbors() []astar.Pather {
neighbors := []astar.Pather{}
leftTile := t.w.Tile(t.z, t.x-1)
if leftTile != nil && leftTile.kindType == KindTypePlain {
neighbors = append(neighbors, leftTile)
}
leftUpTile := t.w.Tile(t.z+1, t.x-1)
if leftUpTile != nil && leftUpTile.kindType == KindTypePlain {
neighbors = append(neighbors, leftUpTile)
}
leftDownTile := t.w.Tile(t.z-1, t.x-1)
if leftDownTile != nil && leftDownTile.kindType == KindTypePlain {
neighbors = append(neighbors, leftDownTile)
}
rightTile := t.w.Tile(t.z, t.x+1)
if rightTile != nil && rightTile.kindType == KindTypePlain {
neighbors = append(neighbors, rightTile)
}
rightUpTile := t.w.Tile(t.z+1, t.x+1)
if rightUpTile != nil && rightUpTile.kindType == KindTypePlain {
neighbors = append(neighbors, rightUpTile)
}
rightDownTile := t.w.Tile(t.z-1, t.x+1)
if rightDownTile != nil && rightDownTile.kindType == KindTypePlain {
neighbors = append(neighbors, rightDownTile)
}
upTile := t.w.Tile(t.z+1, t.x)
if upTile != nil && upTile.kindType == KindTypePlain {
neighbors = append(neighbors, upTile)
}
downTile := t.w.Tile(t.z-1, t.x)
if downTile != nil && downTile.kindType == KindTypePlain {
neighbors = append(neighbors, downTile)
}
return neighbors
}
// PathNeighborCost returns the movement cost of the directly neighboring tile.
func (t *Tile) PathNeighborCost(to astar.Pather) float64 {
toT := to.(*Tile)
absX := toT.x - t.x
absZ := toT.z - t.z
distance := math.Sqrt(float64(absX*absX + absZ*absZ))
return toT.kindType.Cost() * distance
}
// PathEstimatedCost uses Manhattan distance to estimate orthogonal distance
// between non-adjacent nodes.
func (t *Tile) PathEstimatedCost(to astar.Pather) float64 {
toT := to.(*Tile)
absX := toT.x - t.x
if absX < 0 {
absX = -absX
}
absZ := toT.z - t.z
if absZ < 0 {
absZ = -absZ
}
return float64(absX + absZ)
}
// World is a two dimensional map of Tiles.
type World struct {
startX float64
startZ float64
accuracy int
tileOfTiles map[int]map[int]*Tile
}
// Tile gets the tile at the given coordinates in the world.
func (w *World) Tile(z, x int) *Tile {
ts, ok := w.tileOfTiles[z]
if !ok {
return nil
}
t, ok := ts[x]
if !ok {
return nil
}
return t
}
func (w *World) GetPositionForTile(tile *Tile) (x, z float64) {
xIndex, zIndex := tile.GetXZ()
return w.startX + float64(xIndex*w.accuracy), w.startZ - float64(zIndex*w.accuracy)
}
func (w *World) TileForPosition(x float64, z float64) *Tile {
xIndex := int(math.Floor((x - w.startX) / float64(w.accuracy)))
zIndex := int(math.Floor((w.startZ - z) / float64(w.accuracy)))
return w.Tile(zIndex, xIndex)
}
func NewWorld(startX int, startZ int, accuracy int, maskMap [][]int) *World {
w := &World{
startX: float64(startX),
startZ: float64(startZ),
accuracy: accuracy,
}
w.tileOfTiles = make(map[int]map[int]*Tile)
for z, xRow := range maskMap {
tiles := make(map[int]*Tile)
for x, mask := range xRow {
kt := KindTypePlain
if mask == 0 {
kt = KindTypeBlocker
}
t := NewTile(w, kt, x, z)
tiles[x] = t
}
w.tileOfTiles[z] = tiles
}
return w
}
func TileFromWorld(w *World, x float64, z float64) *Tile {
// xInt := int(math.Floor(x))
// zInt := int(math.Floor(z))
return w.TileForPosition(x, z)
} | core/nav/tile.go | 0.670932 | 0.513973 | tile.go | starcoder |
package ode
// #cgo pkg-config: ode
// #include <ode/ode.h>
import "C"
import (
"unsafe"
)
// World constants
const (
WorldStepThreadCountUnlimited = C.dWORLDSTEP_THREADCOUNT_UNLIMITED
)
var (
worldData = map[World]interface{}{}
)
// World represents a simulation world.
type World uintptr
// NewWorld returns a new World instance.
func NewWorld() World {
return cToWorld(C.dWorldCreate())
}
func cToWorld(c C.dWorldID) World {
return World(unsafe.Pointer(c))
}
func (w World) c() C.dWorldID {
return C.dWorldID(unsafe.Pointer(w))
}
// Destroy destroys the world.
func (w World) Destroy() {
delete(worldData, w)
C.dWorldDestroy(w.c())
}
// SetData associates user-specified data with the world.
func (w World) SetData(data interface{}) {
worldData[w] = data
}
// Data returns the user-specified data associated with the world.
func (w World) Data() interface{} {
return worldData[w]
}
// NewBody returns a new Body instance.
func (w World) NewBody() Body {
return cToBody(C.dBodyCreate(w.c()))
}
// SetGravity sets the gravity vector.
func (w World) SetGravity(grav Vector3) {
C.dWorldSetGravity(w.c(), C.dReal(grav[0]), C.dReal(grav[1]), C.dReal(grav[2]))
}
// Gravity returns the gravity vector.
func (w World) Gravity() Vector3 {
grav := NewVector3()
C.dWorldGetGravity(w.c(), (*C.dReal)(&grav[0]))
return grav
}
// SetERP sets the error reduction parameter.
func (w World) SetERP(erp float64) {
C.dWorldSetERP(w.c(), C.dReal(erp))
}
// ERP returns the error reduction parameter.
func (w World) ERP() float64 {
return float64(C.dWorldGetERP(w.c()))
}
// SetCFM sets the constraint force mixing value.
func (w World) SetCFM(cfm float64) {
C.dWorldSetCFM(w.c(), C.dReal(cfm))
}
// CFM returns the constraint force mixing value.
func (w World) CFM() float64 {
return float64(C.dWorldGetCFM(w.c()))
}
// SetStepIslandsProcessingMaxThreadCount sets the maximum number of threads to
// use for island stepping.
func (w World) SetStepIslandsProcessingMaxThreadCount(count int) {
C.dWorldSetStepIslandsProcessingMaxThreadCount(w.c(), C.unsigned(count))
}
// StepIslandsProcessingMaxThreadCount returns the maximum number of threads to
// use for island stepping.
func (w World) StepIslandsProcessingMaxThreadCount() int {
return int(C.dWorldGetStepIslandsProcessingMaxThreadCount(w.c()))
}
// UseSharedWorkingMemory enables sharing working memory with another world,
// and returns whether the operation succeeded.
func (w World) UseSharedWorkingMemory(from World) bool {
return C.dWorldUseSharedWorkingMemory(w.c(), from.c()) != 0
}
// CleanupWorkingMemory cleans up the world's working memory.
func (w World) CleanupWorkingMemory() {
C.dWorldCleanupWorkingMemory(w.c())
}
// Step executes a simulation step, and returns whether the operation
// succeeded.
func (w World) Step(stepSize float64) bool {
return C.dWorldStep(w.c(), C.dReal(stepSize)) != 0
}
// QuickStep executes a simulation quick step, and returns whether the
// operation succeeded.
func (w World) QuickStep(stepSize float64) bool {
return C.dWorldQuickStep(w.c(), C.dReal(stepSize)) != 0
}
// ImpulseToForce converts an impulse to a force over a step duration.
func (w World) ImpulseToForce(stepSize float64, impulse Vector3) Vector3 {
force := NewVector3()
C.dWorldImpulseToForce(w.c(), C.dReal(stepSize),
C.dReal(impulse[0]), C.dReal(impulse[1]), C.dReal(impulse[2]),
(*C.dReal)(&force[0]))
return force
}
// SetQuickStepNumIterations sets the number of iterations to execute during a
// quick step.
func (w World) SetQuickStepNumIterations(num int) {
C.dWorldSetQuickStepNumIterations(w.c(), C.int(num))
}
// QuickStepNumIterations returns the number of iterations to execute during a
// quick step.
func (w World) QuickStepNumIterations() int {
return int(C.dWorldGetQuickStepNumIterations(w.c()))
}
// SetQuickStepW sets the over-relaxation parameter.
func (w World) SetQuickStepW(overRelaxation float64) {
C.dWorldSetQuickStepW(w.c(), C.dReal(overRelaxation))
}
// QuickStepW returns the over-relaxation parameter.
func (w World) QuickStepW() float64 {
return float64(C.dWorldGetQuickStepW(w.c()))
}
// SetContactMaxCorrectingVelocity sets the maximum correcting velocity that
// contacts are allowed to generate.
func (w World) SetContactMaxCorrectingVelocity(overRelaxation float64) {
C.dWorldSetContactMaxCorrectingVel(w.c(), C.dReal(overRelaxation))
}
// ContactMaxCorrectingVelocity returns the maximum correcting velocity that
// contacts are allowed to generate.
func (w World) ContactMaxCorrectingVelocity() float64 {
return float64(C.dWorldGetContactMaxCorrectingVel(w.c()))
}
// SetContactSurfaceLayer sets the depth of the surface layer around all
// geometry objects.
func (w World) SetContactSurfaceLayer(depth float64) {
C.dWorldSetContactSurfaceLayer(w.c(), C.dReal(depth))
}
// ContactSurfaceLayer returns the depth of the surface layer around all
// geometry objects.
func (w World) ContactSurfaceLayer() float64 {
return float64(C.dWorldGetContactSurfaceLayer(w.c()))
}
// SetAutoDisableLinearThreshold sets the auto disable linear average threshold.
func (w World) SetAutoDisableLinearThreshold(linearThreshold float64) {
C.dWorldSetAutoDisableLinearThreshold(w.c(), C.dReal(linearThreshold))
}
// AutoDisableLinearThreshold returns the auto disable linear average threshold.
func (w World) AutoDisableLinearThreshold() float64 {
return float64(C.dWorldGetAutoDisableLinearThreshold(w.c()))
}
// SetAutoDisableAngularThreshold sets the auto disable angular average threshold.
func (w World) SetAutoDisableAngularThreshold(angularThreshold float64) {
C.dWorldSetAutoDisableAngularThreshold(w.c(), C.dReal(angularThreshold))
}
// AutoDisableAngularThreshold returns the auto disable angular average threshold.
func (w World) AutoDisableAngularThreshold() float64 {
return float64(C.dWorldGetAutoDisableAngularThreshold(w.c()))
}
// SetAutoAutoDisableAverageSamplesCount sets auto disable average sample count.
func (w World) SetAutoAutoDisableAverageSamplesCount(averageSamplesCount bool) {
C.dWorldSetAutoDisableAverageSamplesCount(w.c(), C.unsigned(btoi(averageSamplesCount)))
}
// AutoDisableAverageSamplesCount returns the auto disable sample count.
func (w World) AutoDisableAverageSamplesCount() bool {
return C.dWorldGetAutoDisableAverageSamplesCount(w.c()) != 0
}
// SetAutoDisableSteps sets the number of auto disable steps.
func (w World) SetAutoDisableSteps(steps int) {
C.dWorldSetAutoDisableSteps(w.c(), C.int(steps))
}
// AutoDisableSteps returns the number of auto disable steps.
func (w World) AutoDisableSteps() int {
return int(C.dWorldGetAutoDisableSteps(w.c()))
}
// SetAutoDisableTime sets the auto disable time.
func (w World) SetAutoDisableTime(time float64) {
C.dWorldSetAutoDisableTime(w.c(), C.dReal(time))
}
// AutoDisableTime returns the auto disable time.
func (w World) AutoDisableTime() float64 {
return float64(C.dWorldGetAutoDisableTime(w.c()))
}
// SetAutoDisable sets wether the body will be auto disabled.
func (w World) SetAutoDisable(doAutoDisable bool) {
C.dWorldSetAutoDisableFlag(w.c(), C.int(btoi(doAutoDisable)))
}
// AutoDisable returns whether the body will be auto disabled.
func (w World) AutoDisable() bool {
return C.dWorldGetAutoDisableFlag(w.c()) != 0
}
// SetLinearDamping sets the linear damping scale.
func (w World) SetLinearDamping(scale float64) {
C.dWorldSetLinearDamping(w.c(), C.dReal(scale))
}
// LinearDamping returns the linear damping scale.
func (w World) LinearDamping() float64 {
return float64(C.dWorldGetLinearDamping(w.c()))
}
// SetAngularDamping sets the angular damping scale.
func (w World) SetAngularDamping(scale float64) {
C.dWorldSetAngularDamping(w.c(), C.dReal(scale))
}
// AngularDamping returns the angular damping scale.
func (w World) AngularDamping() float64 {
return float64(C.dWorldGetAngularDamping(w.c()))
}
// SetLinearDampingThreshold sets the linear damping threshold.
func (w World) SetLinearDampingThreshold(threshold float64) {
C.dWorldSetLinearDampingThreshold(w.c(), C.dReal(threshold))
}
// LinearDampingThreshold returns the linear damping threshold.
func (w World) LinearDampingThreshold() float64 {
return float64(C.dWorldGetLinearDampingThreshold(w.c()))
}
// SetAngularDampingThreshold sets the angular damping threshold.
func (w World) SetAngularDampingThreshold(threshold float64) {
C.dWorldSetAngularDampingThreshold(w.c(), C.dReal(threshold))
}
// AngularDampingThreshold returns the angular damping threshold.
func (w World) AngularDampingThreshold() float64 {
return float64(C.dWorldGetAngularDampingThreshold(w.c()))
}
// SetMaxAngularSpeed sets the maximum angular speed.
func (w World) SetMaxAngularSpeed(maxSpeed float64) {
C.dWorldSetMaxAngularSpeed(w.c(), C.dReal(maxSpeed))
}
// MaxAngularSpeed returns the maximum angular speed.
func (w World) MaxAngularSpeed() float64 {
return float64(C.dWorldGetMaxAngularSpeed(w.c()))
}
// NewBallJoint returns a new BallJoint instance
func (w World) NewBallJoint(group JointGroup) BallJoint {
return cToJoint(C.dJointCreateBall(w.c(), group.c())).(BallJoint)
}
// NewHingeJoint returns a new HingeJoint instance
func (w World) NewHingeJoint(group JointGroup) HingeJoint {
return cToJoint(C.dJointCreateHinge(w.c(), group.c())).(HingeJoint)
}
// NewSliderJoint returns a new SliderJoint instance
func (w World) NewSliderJoint(group JointGroup) SliderJoint {
return cToJoint(C.dJointCreateSlider(w.c(), group.c())).(SliderJoint)
}
// NewContactJoint returns a new ContactJoint instance
func (w World) NewContactJoint(group JointGroup, contact *Contact) ContactJoint {
c := &C.dContact{}
contact.toC(c)
return cToJoint(C.dJointCreateContact(w.c(), group.c(), c)).(ContactJoint)
}
// NewUniversalJoint returns a new UniversalJoint instance
func (w World) NewUniversalJoint(group JointGroup) UniversalJoint {
return cToJoint(C.dJointCreateUniversal(w.c(), group.c())).(UniversalJoint)
}
// NewHinge2Joint returns a new Hinge2Joint instance
func (w World) NewHinge2Joint(group JointGroup) Hinge2Joint {
return cToJoint(C.dJointCreateHinge2(w.c(), group.c())).(Hinge2Joint)
}
// NewFixedJoint returns a new FixedJoint instance
func (w World) NewFixedJoint(group JointGroup) FixedJoint {
return cToJoint(C.dJointCreateFixed(w.c(), group.c())).(FixedJoint)
}
// NewNullJoint returns a new NullJoint instance
func (w World) NewNullJoint(group JointGroup) NullJoint {
return cToJoint(C.dJointCreateNull(w.c(), group.c())).(NullJoint)
}
// NewAMotorJoint returns a new AMotorJoint instance
func (w World) NewAMotorJoint(group JointGroup) AMotorJoint {
return cToJoint(C.dJointCreateAMotor(w.c(), group.c())).(AMotorJoint)
}
// NewLMotorJoint returns a new LMotorJoint instance
func (w World) NewLMotorJoint(group JointGroup) LMotorJoint {
return cToJoint(C.dJointCreateLMotor(w.c(), group.c())).(LMotorJoint)
}
// NewPlane2DJoint returns a new Plane2DJoint instance
func (w World) NewPlane2DJoint(group JointGroup) Plane2DJoint {
return cToJoint(C.dJointCreatePlane2D(w.c(), group.c())).(Plane2DJoint)
}
// NewPRJoint returns a new PRJoint instance
func (w World) NewPRJoint(group JointGroup) PRJoint {
return cToJoint(C.dJointCreatePR(w.c(), group.c())).(PRJoint)
}
// NewPUJoint returns a new PUJoint instance
func (w World) NewPUJoint(group JointGroup) PUJoint {
return cToJoint(C.dJointCreatePU(w.c(), group.c())).(PUJoint)
}
// NewPistonJoint returns a new PistonJoint instance
func (w World) NewPistonJoint(group JointGroup) PistonJoint {
return cToJoint(C.dJointCreatePiston(w.c(), group.c())).(PistonJoint)
}
// NewDBallJoint returns a new DBallJoint instance
func (w World) NewDBallJoint(group JointGroup) DBallJoint {
return cToJoint(C.dJointCreateDBall(w.c(), group.c())).(DBallJoint)
}
// NewDHingeJoint returns a new DHingeJoint instance
func (w World) NewDHingeJoint(group JointGroup) DHingeJoint {
return cToJoint(C.dJointCreateDHinge(w.c(), group.c())).(DHingeJoint)
}
// NewTransmissionJoint returns a new TransmissionJoint instance
func (w World) NewTransmissionJoint(group JointGroup) TransmissionJoint {
return cToJoint(C.dJointCreateTransmission(w.c(), group.c())).(TransmissionJoint)
} | world.go | 0.768733 | 0.620392 | world.go | starcoder |
package omnijson
/*
Result
{
"chain": "xxxx", (string) current network name as defined in BIP70 (main, test, regtest)
"blocks": xxxxxx, (numeric) the current number of blocks processed in the server
"headers": xxxxxx, (numeric) the current number of headers we have validated
"bestblockhash": "...", (string) the hash of the currently best block
"difficulty": xxxxxx, (numeric) the current difficulty
"mediantime": xxxxxx, (numeric) median time for the current best block
"verificationprogress": xxxx, (numeric) estimate of verification progress [0..1]
"chainwork": "xxxx" (string) total amount of work in active chain, in hexadecimal
"pruned": xx, (boolean) if the blocks are subject to pruning
"pruneheight": xxxxxx, (numeric) lowest-height complete block stored
"softforks": [ (array) status of softforks in progress
{
"id": "xxxx", (string) name of softfork
"version": xx, (numeric) block version
"enforce": { (object) progress toward enforcing the softfork rules for new-version blocks
"status": xx, (boolean) true if threshold reached
"found": xx, (numeric) number of blocks with the new version found
"required": xx, (numeric) number of blocks required to trigger
"window": xx, (numeric) maximum size of examined window of recent blocks
},
"reject": { ... } (object) progress toward rejecting pre-softfork blocks (same fields as "enforce")
}, ...
],
"bip9_softforks": { (object) status of BIP9 softforks in progress
"xxxx" : { (string) name of the softfork
"status": "xxxx", (string) one of "defined", "started", "locked_in", "active", "failed"
"bit": xx, (numeric) the bit (0-28) in the block version field used to signal this softfork (only for "started" status)
"startTime": xx, (numeric) the minimum median time past of a block at which the bit gains its meaning
"timeout": xx (numeric) the median time past of a block at which the deployment is considered failed if not yet locked in
}
}
}
*/
type GetBlockChainInfoResult struct {
Blocks int64 `json:"blocks"`
BestBlockHash string `json:"bestblockhash"`
}
type GetBlockChainInfoCommand struct{}
func (GetBlockChainInfoCommand) Method() string {
return "getblockchaininfo"
}
func (GetBlockChainInfoCommand) ID() string {
return "1"
}
func (GetBlockChainInfoCommand) Params() []interface{} {
return nil
} | omnijson/getblockchaininfo.go | 0.61878 | 0.468912 | getblockchaininfo.go | starcoder |
package table
import (
"sort"
"strings"
"github.com/giantswarm/columnize"
"github.com/giantswarm/microerror"
"github.com/giantswarm/gsctl/pkg/sortable"
)
// Table represents a data structure that can hold and display the contents of a table.
type Table struct {
columns []Column
rows [][]string
// columnizeConfig represents the configuration of the table formatter.
columnizeConfig *columnize.Config
}
// New creates a new Table.
func New() Table {
t := Table{}
t.columnizeConfig = columnize.DefaultConfig()
t.columnizeConfig.Glue = " "
return t
}
// SetColumns sets the table's columns.
func (t *Table) SetColumns(c []Column) {
t.columns = c[:]
}
// SetRows sets the table's rows.
func (t *Table) SetRows(r [][]string) {
t.rows = r[:][:]
}
// SortByColumnName sorts the table by a column name, in the given direction.
func (t *Table) SortByColumnName(n string, direction string) error {
// Skip if there is nothing to sort, or if there's no column name provided.
if len(t.rows) < 2 || n == "" {
return nil
}
colIndex, column, err := t.GetColumnByName(n)
if err != nil {
return microerror.Mask(err)
}
// Default to Ascending direction sorting.
sortDir := direction
if sortDir != sortable.ASC && sortDir != sortable.DESC {
sortDir = sortable.ASC
}
// Get the comparison algorithm for the current sorting type.
compareFunc := sortable.GetCompareFunc(column.SortType)
sort.Slice(t.rows, func(i, j int) bool {
var iVal string
{
if colIndex >= len(t.rows[i]) {
iVal = "n/a"
} else {
iVal = RemoveColors(t.rows[i][colIndex])
}
}
var jVal string
{
if colIndex >= len(t.rows[j]) {
jVal = "n/a"
} else {
jVal = RemoveColors(t.rows[j][colIndex])
}
}
return compareFunc(iVal, jVal, direction)
})
return nil
}
// GetColumnByName fetches the index and data structure of a column, by knowing its name.
func (t *Table) GetColumnByName(n string) (int, Column, error) {
var (
colIndex int
column Column
)
for i, col := range t.columns {
if col.Name == n {
colIndex = i
column = col
break
}
}
if column.Name == "" {
return 0, Column{}, microerror.Mask(fieldNotFoundError)
}
return colIndex, column, nil
}
// GetColumnNameFromInitials matches a given input with a name of an existent column,
// without caring about casing, or if the given input is the complete name of the column.
func (t *Table) GetColumnNameFromInitials(i string) (string, error) {
i = strings.ToLower(i)
var (
columnNames = make([]string, 0, len(t.columns))
matchingNames []string
)
for _, col := range t.columns {
if col.Name != "" {
columnNames = append(columnNames, col.Name)
nameLowerCased := strings.ToLower(col.Name)
if strings.HasPrefix(nameLowerCased, i) {
matchingNames = append(matchingNames, col.Name)
if nameLowerCased == i {
return matchingNames[0], nil
}
}
}
}
if len(matchingNames) == 0 {
return "", microerror.Maskf(fieldNotFoundError, "available fields for sorting: %v", strings.Join(columnNames, ", "))
} else if len(matchingNames) > 1 {
return "", microerror.Maskf(multipleFieldsMatchingError, "%v", strings.Join(matchingNames, ", "))
}
return matchingNames[0], nil
}
// String makes the Table data structure implement the Stringer interface,
// so we can easily pretty-print it.
func (t *Table) String() string {
rows := make([]string, 0, len(t.rows)+1)
{
columns := make([]string, 0, len(t.columns))
for _, col := range t.columns {
if !col.Hidden {
columns = append(columns, col.GetHeader())
}
}
rows = append(rows, strings.Join(columns, "|"))
}
{
for _, row := range t.rows {
rows = append(rows, strings.Join(row, "|"))
}
}
formattedTable := columnize.Format(rows, t.columnizeConfig)
return formattedTable
} | pkg/table/table.go | 0.700383 | 0.449151 | table.go | starcoder |
package util
import "sync/atomic"
// AtomicBool - Race condition free primative wrapper
type AtomicBool struct {
boolean int32
}
// Get - returns the underlying primative
func (b *AtomicBool) Get() bool {
return atomic.LoadInt32(&(b.boolean)) != 0
}
// Set - sets the underlying primative
func (b *AtomicBool) Set() {
atomic.StoreInt32(&(b.boolean), int32(1))
}
// Swap - exchanges the underlying primative
func (b *AtomicBool) Swap(value bool) bool {
i := int32(0)
if value {
i = 1
}
return atomic.SwapInt32(&(b.boolean), i) != 0
}
// Clear - resets the underlying primative to its unitialized default value
func (b *AtomicBool) Clear() {
atomic.StoreInt32(&(b.boolean), int32(0))
}
// AtomicInt32 - Race condition free primative wrapper
type AtomicInt32 struct {
value int32
}
// Get - returns the underlying primative
func (i *AtomicInt32) Get() int32 {
return atomic.LoadInt32(&(i.value))
}
// Set - sets the underlying primative
func (i *AtomicInt32) Set(value int32) {
atomic.StoreInt32(&(i.value), value)
}
// Incr - atomically increments underlying primative
func (i *AtomicInt32) Incr() int32 {
return atomic.AddInt32(&(i.value), 1)
}
// Decr - atomically decrements underlying primative
func (i *AtomicInt32) Decr() int32 {
return atomic.AddInt32(&(i.value), -1)
}
// Swap - exchanges the underlying primative
func (i *AtomicInt32) Swap(value int32) int32 {
return atomic.SwapInt32(&(i.value), value)
}
// Clear - resets the underlying primative to its unitialized default value
func (i *AtomicInt32) Clear() {
i.Set(0)
}
// AtomicInt - Race condition free primative wrapper
type AtomicInt struct {
value int64
}
// Get - returns the underlying primative
func (i *AtomicInt) Get() int {
return int(atomic.LoadInt64(&(i.value)))
}
// Set - sets the underlying primative
func (i *AtomicInt) Set(value int) {
atomic.StoreInt64(&(i.value), int64(value))
}
// Incr - atomically increments underlying primative
func (i *AtomicInt) Incr() int {
return int(atomic.AddInt64(&(i.value), 1))
}
// Decr - atomically decrements underlying primative
func (i *AtomicInt) Decr() int {
return int(atomic.AddInt64(&(i.value), -1))
}
// Swap - exchanges the underlying primative
func (i *AtomicInt) Swap(value int) int {
return int(atomic.SwapInt64(&(i.value), int64(value)))
}
// Clear - resets the underlying primative to its unitialized default value
func (i *AtomicInt) Clear() {
i.Set(0)
} | util/atomics.go | 0.853699 | 0.426919 | atomics.go | starcoder |
package section
import (
"fmt"
"math"
)
// Coord - coordinate of point in plane XOZ used for triangle points
type Coord struct {
X, Z float64 // coordinates // meter
}
// Triangle - elementary triangle element for design section and have 3 coordinate of points.
type Triangle struct {
P [3]Coord // 3 coordinate of points
}
func (t Triangle) area() float64 {
return 0.5 * math.Abs((t.P[0].X-t.P[2].X)*(t.P[1].Z-t.P[2].Z)-(t.P[1].X-t.P[2].X)*(t.P[0].Z-t.P[2].Z))
}
func (t Triangle) check() error {
if t.P[0].X == t.P[1].X && t.P[1].X == t.P[2].X {
return fmt.Errorf("Tree points on axe X")
}
if t.P[0].Z == t.P[1].Z && t.P[1].Z == t.P[2].Z {
return fmt.Errorf("Tree points on axe Z")
}
if (t.P[1].Z-t.P[0].Z)/(t.P[1].X-t.P[0].X) == (t.P[2].Z-t.P[1].Z)/(t.P[2].X-t.P[1].X) {
return fmt.Errorf("Points are colleniar")
}
return nil
}
func (t Triangle) momentInertiaX() (j float64) {
a := t.P[0]
b := t.P[1]
c := t.P[2]
switch {
case a.Z == b.Z:
var (
arm float64
height float64
width float64
)
width = math.Abs(a.X - b.X)
height = math.Abs(c.Z - a.Z)
arm = t.centerMassZ()
j = math.Abs(width*math.Pow(height, 3.0))/36.0 + t.area()*(arm*arm)
case c.Z == b.Z:
j = Triangle{[3]Coord{c, b, a}}.momentInertiaX()
case a.Z == c.Z:
j = Triangle{[3]Coord{a, c, b}}.momentInertiaX()
case c.Z > a.Z && a.Z > b.Z:
// point a - middle
midPoint := Coord{
X: b.X + (a.Z-b.Z)/(c.Z-b.Z)*(c.X-b.X),
Z: a.Z,
}
tr1 := Triangle{[3]Coord{a, midPoint, b}}
tr2 := Triangle{[3]Coord{a, midPoint, c}}
j = tr1.momentInertiaX() + tr2.momentInertiaX()
case b.Z > a.Z && a.Z > c.Z:
j = Triangle{[3]Coord{a, c, b}}.momentInertiaX()
case c.Z > b.Z && b.Z > a.Z:
j = Triangle{[3]Coord{b, a, c}}.momentInertiaX()
case a.Z > b.Z && b.Z > c.Z:
j = Triangle{[3]Coord{b, c, a}}.momentInertiaX()
case b.Z > c.Z && c.Z > a.Z:
j = Triangle{[3]Coord{c, a, b}}.momentInertiaX()
case a.Z > c.Z && c.Z > b.Z:
j = Triangle{[3]Coord{c, b, a}}.momentInertiaX()
}
return
}
func (t Triangle) momentInertiaZ() float64 {
return Triangle{[3]Coord{
Coord{X: t.P[0].Z, Z: t.P[0].X},
Coord{X: t.P[1].Z, Z: t.P[1].X},
Coord{X: t.P[2].Z, Z: t.P[2].X},
},
}.momentInertiaX()
}
func (t Triangle) centerMassZ() (cm float64) {
a := t.P[0]
b := t.P[1]
c := t.P[2]
switch {
case a.Z == b.Z:
height := math.Abs(c.Z - a.Z)
if c.Z > a.Z {
cm = a.Z + height/3.
} else {
cm = a.Z - height/3.
}
case c.Z == b.Z:
cm = Triangle{[3]Coord{c, b, a}}.centerMassZ()
case a.Z == c.Z:
cm = Triangle{[3]Coord{a, c, b}}.centerMassZ()
case c.Z > a.Z && a.Z > b.Z:
// point a - middle
midPoint := Coord{
X: b.X + (a.Z-b.Z)/(c.Z-b.Z)*(c.X-b.X),
Z: a.Z,
}
tr1 := Triangle{[3]Coord{a, midPoint, b}}
cm1 := tr1.centerMassZ()
ar1 := tr1.area()
tr2 := Triangle{[3]Coord{a, midPoint, c}}
cm2 := tr2.centerMassZ()
ar2 := tr2.area()
cm = (ar1*cm1 + ar2*cm2) / (ar1 + ar2)
case b.Z > a.Z && a.Z > c.Z:
cm = Triangle{[3]Coord{a, c, b}}.centerMassZ()
case c.Z > b.Z && b.Z > a.Z:
cm = Triangle{[3]Coord{b, a, c}}.centerMassZ()
case a.Z > b.Z && b.Z > c.Z:
cm = Triangle{[3]Coord{b, c, a}}.centerMassZ()
case b.Z > c.Z && c.Z > a.Z:
cm = Triangle{[3]Coord{c, a, b}}.centerMassZ()
case a.Z > c.Z && c.Z > b.Z:
cm = Triangle{[3]Coord{c, b, a}}.centerMassZ()
}
return
}
func (t Triangle) centerMassX() float64 {
return Triangle{[3]Coord{
Coord{X: t.P[0].Z, Z: t.P[0].X},
Coord{X: t.P[1].Z, Z: t.P[1].X},
Coord{X: t.P[2].Z, Z: t.P[2].X},
},
}.centerMassZ()
} | section/triangle.go | 0.717507 | 0.650856 | triangle.go | starcoder |
package ode
// #cgo pkg-config: ode
// #include <ode/ode.h>
import "C"
import (
"unsafe"
)
// HeightfieldData represents heightfield data.
type HeightfieldData uintptr
func cToHeightfieldData(c C.dHeightfieldDataID) HeightfieldData {
return HeightfieldData(unsafe.Pointer(c))
}
func (h HeightfieldData) c() C.dHeightfieldDataID {
return C.dHeightfieldDataID(unsafe.Pointer(h))
}
// NewHeightfieldData returns a new HeightfieldData instance.
func NewHeightfieldData() HeightfieldData {
return cToHeightfieldData(C.dGeomHeightfieldDataCreate())
}
// Destroy destroys the heightfield data.
func (h *HeightfieldData) Destroy() {
C.dGeomHeightfieldDataDestroy(h.c())
}
// Heightfield is a geometry representing a heightfield.
type Heightfield struct {
GeomBase
}
// Build builds a heightfield data set.
func (h Heightfield) Build(data HeightfieldData, heightSamples Matrix,
width, depth, scale, offset, thickness float64, doWrap bool) {
numWidthSamp, numDepthSamp := len(heightSamples), 0
var heightSamplesPtr *C.double
if numDepthSamp > 0 {
numWidthSamp = len(heightSamples[0])
if numWidthSamp > 0 {
heightSamplesPtr = (*C.double)(&heightSamples[0][0])
}
}
C.dGeomHeightfieldDataBuildDouble(data.c(), heightSamplesPtr, 1,
C.dReal(width), C.dReal(depth), C.int(numWidthSamp), C.int(numDepthSamp),
C.dReal(scale), C.dReal(offset), C.dReal(thickness), C.int(btoi(doWrap)))
}
// SetBounds sets the minimum and maximum height.
func (h Heightfield) SetBounds(data HeightfieldData, minHeight, maxHeight float64) {
C.dGeomHeightfieldDataSetBounds(data.c(), C.dReal(minHeight), C.dReal(maxHeight))
}
// SetHeightfieldData associates a data set to the heightfield.
func (h Heightfield) SetHeightfieldData(data HeightfieldData) {
C.dGeomHeightfieldSetHeightfieldData(h.c(), data.c())
}
// HeightfieldData returns the data set associated with the heightfield.
func (h Heightfield) HeightfieldData() HeightfieldData {
return cToHeightfieldData(C.dGeomHeightfieldGetHeightfieldData(h.c()))
} | heightfield.go | 0.736306 | 0.417093 | heightfield.go | starcoder |
package formatters
// Organise the possible columns to return in tables and CSVs.
// The machine keys can be used as selectors.
import (
"fmt"
"sort"
"strings"
"github.com/mroach/rom64/rom"
)
type columnValue func(rom.RomFile) string
type Column struct {
Header string
Description string
Generator columnValue
}
var Columns = map[string]Column{
"file_name": {
"File Name",
"File name on disk",
func(r rom.RomFile) string { return r.File.Name },
},
"file_format": {
"File Format",
"File format code. One of: z64, v64, n64",
func(r rom.RomFile) string { return r.File.Format.Code },
},
"file_format_desc": {
"File Format",
"File format description. example: Big-endian",
func(r rom.RomFile) string { return r.File.Format.Description },
},
"file_size_mbytes": {
"Size (MB)",
"File size in megabytes. Always a whole number. example: 32",
func(r rom.RomFile) string { return fmt.Sprintf("%d", r.File.Size) },
},
"file_size_mbits": {
"Size (Mb)",
"File size in megabits. Always a whole number. example: 256",
func(r rom.RomFile) string { return fmt.Sprintf("%d", r.File.Size*8) },
},
"file_md5": {
"MD5",
"MD5 hash/checksum of the file on disk. Lower-case hexadecimal.",
func(r rom.RomFile) string { return r.File.MD5 },
},
"file_sha1": {
"SHA1",
"SHA-1 hash/checksum of the file on disk. Lower-case hexadecimal.",
func(r rom.RomFile) string { return r.File.SHA1 },
},
"file_crc1": {
"Calculated CRC-1",
"CRC 1 (CRC HI) calculated from the ROM file.",
func(r rom.RomFile) string { return r.File.CRC1 },
},
"file_crc2": {
"Calculated CRC-2",
"CRC 2 (CRC LO) calculated from the ROM file.",
func(r rom.RomFile) string { return r.File.CRC2 },
},
"image_name": {
"Image Name",
"Image name / game title embedded in the ROM.",
func(r rom.RomFile) string { return r.ImageName },
},
"version": {
"Version",
"Version of the ROM. One of: 1.0, 1.1, 1.2, or 1.3.",
func(r rom.RomFile) string { return fmt.Sprintf("1.%d", r.Version) },
},
"region": {
"Region",
"Region description of the ROM derived from the ROM ID.",
func(r rom.RomFile) string { return r.Region.Description },
},
"region_short": {
"Region",
"Region short code",
func(r rom.RomFile) string { return r.Region.Short },
},
"video_system": {
"Video",
"Video system derived from the ROM region. NTSC or PAL.",
func(r rom.RomFile) string { return r.Region.VideoSystem },
},
"cic": {
"CIC",
"CIC chip type. example: 6102",
func(r rom.RomFile) string { return r.CIC },
},
"crc1": {
"CRC-1",
"CRC1 checksum of ROM internals. Also known as 'CRC HI'",
func(r rom.RomFile) string { return r.CRC1 },
},
"crc2": {
"CRC-2",
"CRC2 checksum of ROM internals. Also known as 'CRC LO'",
func(r rom.RomFile) string { return r.CRC2 },
},
"rom_id": {
"Rom ID",
"ROM ID / serial. example: NSME for Super Mario 64 (USA)",
func(r rom.RomFile) string { return r.Serial() },
},
}
func ValidateColumnIds(column_ids []string) (valid []string, invalid []string) {
valid = make([]string, 0)
invalid = make([]string, 0)
for _, column_id := range column_ids {
if _, ok := Columns[column_id]; ok {
valid = append(valid, column_id)
} else {
invalid = append(invalid, column_id)
}
}
return valid, invalid
}
func ColumnHelp() string {
lines := make([]string, 0)
for colid, col := range Columns {
lines = append(lines, fmt.Sprintf(" %-20s %s", colid, col.Description))
}
sort.Strings(lines)
return strings.Join(lines, "\n")
}
func ColumnHeaders(column_ids []string) []string {
headers := make([]string, 0)
for _, column_id := range column_ids {
headers = append(headers, Columns[column_id].Header)
}
return headers
}
func RomsToRecords(romfiles []rom.RomFile, column_ids []string) [][]string {
records := make([][]string, 0)
for _, romfile := range romfiles {
records = append(records, PluckRomValues(romfile, column_ids))
}
return records
}
func PluckRomValues(romfile rom.RomFile, column_ids []string) []string {
record := make([]string, 0)
for _, column_id := range column_ids {
column := Columns[column_id]
record = append(record, column.Generator(romfile))
}
return record
} | formatters/columns.go | 0.556882 | 0.493409 | columns.go | starcoder |
package main
import (
"math/rand"
"math"
"time"
"fmt"
"strconv"
)
func random_sequence(smallest, maximum int) []int { //returns a shuffled array
array := make([]int, 0)
for i := smallest; i < maximum; i++ { //create array and append numbers to it
array = append(array, i)
}
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(array), func(i, j int) { array[i], array[j] = array[j], array[i] }) //shuffle the array
return array
}
func max(array []int) int { //gets largest number in array
largest := array[0]
for _, i := range array {
if i > largest {
largest = i
}
}
return largest
}
func min(array []int) int { //gets smallest number in array
smallest := array[0]
for _, i := range array {
if i < smallest {
smallest = i
}
}
return smallest
}
func counting_sort(array []int, exp int) []int {
count := make([]int, max(array)+1)
for i := 0; i < len(array); i++ { //iterate through given array, and add 1 to the index which is the value of array[i]
count[digit(array[i], exp)] ++
}
for i := 1; i < len(count); i++ { //go through array and add previous index's value to the current index
count[i] += count[i-1]
}
output := make([]int, len(array)) //create output array
for i := len(array)-1; i > -1; i-- { //iterate through array and plug in sorted values
output[count[digit(array[i], exp)]-1] = array[i]
count[digit(array[i], exp)] --
}
return output
}
func digit(number, n int) int {
return number / int(math.Pow10(n)) % 10
}
func list(str string) []rune {
output := make([]rune, 0)
for _, i := range(str) {
output = append(output, i)
}
return output
}
func seperate(array []int, digit int) [][]int {
digit = int(math.Pow10(digit))
output := make([][]int, 0)
counter := 0
smallest := list(string(min(array)))
for i := 0; i < len(smallest[1:]); i++ {
smallest[i+1] = '0'
}
minimum, _ := strconv.Atoi(string(smallest))
var beg int
for i := minimum+digit; i < max(array)+digit+1; i += digit {
beg = counter
for counter <= len(array) {
if counter == len(array) || array[counter] >= i {
if counter-beg > 0 {
output = append(output, array[beg:counter])
}
break
}
counter++
}
}
return output
}
func radix_sort(array []int, digit int, mode string) []int {
if mode == "lsd" {
for i := 0; i < len(string(max(array)))+1; i++ {
array = counting_sort(array, i)
}
return array
} else if mode == "msd" {
if digit == -2 {
digit = len(string(max(array)))
}
output := make([]int, 0)
if digit >= 0 {
array = counting_sort(array, digit)
array := seperate(array, digit)
for _, i := range array {
output = append(output, radix_sort(i, digit-1, "msd")...)
}
} else {
output = array
}
return output
} else {
return radix_sort(array, 0, "lsd")
}
}
func main() {
shuffled_array := random_sequence(0, 1000)
fmt.Println("RADIX SORT")
fmt.Println(shuffled_array)
fmt.Println()
fmt.Println("LSD")
fmt.Println(radix_sort(shuffled_array, 0, "lsd"))
fmt.Println()
fmt.Println("MSD")
fmt.Println(radix_sort(shuffled_array, len(string(max(shuffled_array))), "msd"))
} | radix-sort/radix-sort.go | 0.572245 | 0.532243 | radix-sort.go | starcoder |
package vector
import "gonet/base/containers"
func assertIteratorImplementation() {
var _ containers.ReverseIteratorWithIndex = (*Iterator)(nil)
}
// Iterator holding the iterator's state
type Iterator struct {
vec *Vector
index int
}
// Iterator returns a stateful iterator whose values can be fetched by an index.
func (v *Vector) Iterator() Iterator {
return Iterator{vec: v, index: -1}
}
// Next moves the iterator to the next element and returns true if there was a next element in the container.
// If Next() returns true, then next element's index and value can be retrieved by Index() and Value().
// If Next() was called for the first time, then it will point the iterator to the first element if it exists.
// Modifies the state of the iterator.
func (v *Iterator) Next() bool {
if v.index < v.vec.elementCount {
v.index++
}
return v.vec.withinRange(v.index)
}
// Prev moves the iterator to the previous element and returns true if there was a previous element in the container.
// If Prev() returns true, then previous element's index and value can be retrieved by Index() and Value().
// Modifies the state of the iterator.
func (v *Iterator) Prev() bool {
if v.index >= 0 {
v.index--
}
return v.vec.withinRange(v.index)
}
// Value returns the current element's value.
// Does not modify the state of the iterator.
func (v *Iterator) Value() interface{} {
return v.vec.Get(v.index)
}
// Index returns the current element's index.
// Does not modify the state of the iterator.
func (v *Iterator) Index() int {
return v.index
}
// Begin resets the iterator to its initial state (one-before-first)
// Call Next() to fetch the first element if any.
func (v *Iterator) Begin() {
v.index = -1
}
// End moves the iterator past the last element (one-past-the-end).
// Call Prev() to fetch the last element if any.
func (v *Iterator) End() {
v.index = v.vec.elementCount
}
// First moves the iterator to the first element and returns true if there was a first element in the container.
// If First() returns true, then first element's index and value can be retrieved by Index() and Value().
// Modifies the state of the iterator.
func (v *Iterator) First() bool {
v.Begin()
return v.Next()
}
// Last moves the iterator to the last element and returns true if there was a last element in the container.
// If Last() returns true, then last element's index and value can be retrieved by Index() and Value().
// Modifies the state of the iterator.
func (v *Iterator) Last() bool {
v.End()
return v.Prev()
} | base/vector/iterator.go | 0.855685 | 0.405802 | iterator.go | starcoder |
package internal
import (
"reflect"
)
func IsScalarType(t reflect.Type) bool {
switch t.Kind() {
case
reflect.Bool,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
reflect.Uintptr,
reflect.Float32, reflect.Float64,
reflect.Complex64, reflect.Complex128,
reflect.String:
return true
default:
return false
}
}
func TypesIdenticalOrScalarAliases(a, b reflect.Type) bool {
if a == b {
return true
}
// reflect.TypeOf(interface{}(nil)) == nil
return a != nil && b != nil && (a.Kind() == b.Kind() && IsScalarType(a))
}
// interfaceTyp is the reflect.Type of interface{}
var interfaceTyp reflect.Type
func init() {
var x interface{}
interfaceTyp = reflect.TypeOf(&x).Elem()
}
// MakeHashable converts a []interface{} slice into an equivalent fixed-length array
// [...]interface{} for use as a comparable map key
func MakeHashable(s []interface{}) interface{} {
d := make([]interface{}, len(s))
// Convert byte slices into strings as they are otherwise not comparable/hashable.
for i, elem := range s {
if b, ok := elem.([]byte); ok {
d[i] = string(b)
} else {
d[i] = elem
}
}
// Return arrays as they are comparable/hashable.
switch len(d) {
// fast code paths for short arrays:
case 0:
return [...]interface{}{}
case 1:
return [...]interface{}{d[0]}
case 2:
return [...]interface{}{d[0], d[1]}
case 3:
return [...]interface{}{d[0], d[1], d[2]}
case 4:
return [...]interface{}{d[0], d[1], d[2], d[3]}
case 5:
return [...]interface{}{d[0], d[1], d[2], d[3], d[4]}
case 6:
return [...]interface{}{d[0], d[1], d[2], d[3], d[4], d[5]}
case 7:
return [...]interface{}{d[0], d[1], d[2], d[3], d[4], d[5], d[6]}
case 8:
return [...]interface{}{d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7]}
default:
// slow catch-all:
array := reflect.New(reflect.ArrayOf(len(d), interfaceTyp)).Elem()
for i, elem := range d {
array.Index(i).Set(reflect.ValueOf(elem))
}
return array.Interface()
}
} | internal/reflect.go | 0.553505 | 0.463566 | reflect.go | starcoder |
package model
const (
getAllModelsQuery = `SELECT id, name, expression, lex_expression, tag FROM models WHERE user_id=$1;`
insertNewModelQuery = `INSERT INTO models (id, name, expression, lex_expression, tag, user_id) VALUES (DEFAULT, $1, $2, $3, $4, $5) RETURNING id, name, expression, lex_expression, tag;`
deleteModelByIdQuery = `DELETE FROM models WHERE id=$1;`
updateTag = `UPDATE models SET tag=$1 WHERE id=$2;`
)
type model struct {
name string
expression string
lexExpression string
tag string
}
var models = []model{
model{name: "linear_fit", expression: "a*x+b", lexExpression: "a \\cdot x+b", tag: "Linear"},
model{name: "polynomial n2", expression: "a*x**2+b*x+c", lexExpression: "a \\cdot x^{2}+b \\cdot x+c", tag: "Polynomial"},
model{name: "polynomial n3", expression: "a*x**3+b*x**2+c*x+d", lexExpression: "a \\cdot x^{3}+b \\cdot x^{2}+c \\cdot x+d", tag: "Polynomial"},
model{name: "polynomial n4", expression: "a*x**4+b*x**3+c*x**2+d*x+e", lexExpression: "a \\cdot x^{4}+b \\cdot x^{3}+c \\cdot x^{2}+d \\cdot x+e", tag: "Polynomial"},
model{name: "polynomial n5", expression: "a*x**5+b*x**4+c*x**3+d*x**2+e*x+f", lexExpression: "a \\cdot x^{5}+b \\cdot x^{4}+c \\cdot x^{3}+d \\cdot x^{2}+e \\cdot x+f", tag: "Polynomial"},
model{name: "polynomial n6", expression: "a*x**6+b*x**5+c*x**4+d*x**3+e*x**2+f*x+g", lexExpression: "a \\cdot x^{6}+b \\cdot x^{5}+c \\cdot x^{4}+d \\cdot x^{3}+e \\cdot x^{2}+f \\cdot x+g", tag: "Polynomial"},
model{name: "polynomial n7", expression: "a*x**7+b*x**6+c*x**5+d*x**4+e*x**3+f*x**2+g*x+h", lexExpression: "a \\cdot x^{7}+b \\cdot x^{6}+c \\cdot x^{5}+d \\cdot x^{4}+e \\cdot x^{3}+f \\cdot x^{2}+g \\cdot x+h", tag: "Polynomial"},
model{name: "polynomial n8", expression: "a*x**8+b*x**7+c*x**6+d*x**5+e*x**4+f*x**3+g*x**2+h*x+i", lexExpression: "a \\cdot x^{8}+b \\cdot x^{7}+c \\cdot x^{6}+d \\cdot x^{5}+e \\cdot x^{4}+f \\cdot x^{3}+g \\cdot x^{2}+h \\cdot x+i", tag: "Polynomial"},
model{name: "DR-LogLogitic", expression: "a+(1-a)/(1+exp(-(b+c*ln(x))))", lexExpression: "\\frac{-a+1}{e^{-c \\cdot \\ln \\cdot x-b}+1}+a", tag: "Dose-Response"},
model{name: "Rational Model", expression: "(a+b*x)/(1+c*x+d*x**2)", lexExpression: "\\frac{b \\cdot x+a}{c \\cdot x+d \\cdot x^{2}+1}", tag: "Miscellaneous"},
model{name: "DR-Multistage-3", expression: "a+(1-a)*(1-exp(-b*x-c*x**2-d*x**3))", lexExpression: "\\left(-a+1\\right) \\cdot \\left(-e^{-b \\cdot x-c \\cdot x^{2}-d \\cdot x^{3}}+1\\right)+a", tag: "Dose-Response"},
model{name: "truncated fourier", expression: "a*cos(x+d)+b*cos(2*x+d)+c*cos(3*x+d)", lexExpression: "\\mathrm{cos}\\left(d+x\\right) \\cdot a+\\mathrm{cos}\\left(2 \\cdot x+d\\right) \\cdot b+\\mathrm{cos}\\left(3 \\cdot x+d\\right) \\cdot c", tag: "Miscellaneous"},
model{name: "reciprocal quadratic", expression: "1/(a+b*x+c*x**2)", lexExpression: "\\frac{1}{\\left(b \\cdot x+c \\cdot x^{2}+a\\right)}", tag: "Yield-Spacing"},
model{name: "wavy:", expression: "a*cos(2*x)+b*sin(x)", lexExpression: "\\mathrm{cos}\\left(2 \\cdot x\\right) \\cdot a+\\mathrm{sin}\\left(x\\right) \\cdot b", tag: "Unassigned"},
model{name: "richards:", expression: "a/(1+exp(b-c*x))**(1/d)", lexExpression: "\\frac{a}{\\left(e^{-c \\cdot x+b}+1\\right)^{\\frac{1}{d}}}", tag: "Sigmoidal"},
model{name: "logistic:", expression: "a/(1+b*e**(-c*x))", lexExpression: "\\frac{a}{\\frac{b}{e^{c \\cdot x}}+1}", tag: "Sigmoidal"},
} | services/ModelService/model/sql_queries.go | 0.522446 | 0.575499 | sql_queries.go | starcoder |
package gocqlx
import (
"errors"
"fmt"
"reflect"
"github.com/gocql/gocql"
"github.com/scylladb/go-reflectx"
)
// structOnlyError returns an error appropriate for type when a non-scannable
// struct is expected but something else is given
func structOnlyError(t reflect.Type) error {
isStruct := t.Kind() == reflect.Struct
isScanner := reflect.PtrTo(t).Implements(_unmarshallerInterface)
if !isStruct {
return fmt.Errorf("expected %s but got %s", reflect.Struct, t.Kind())
}
if isScanner {
return fmt.Errorf("structscan expects a struct dest but the provided struct type %s implements unmarshaler", t.Name())
}
return fmt.Errorf("expected a struct, but struct %s has no exported fields", t.Name())
}
// reflect helpers
var _unmarshallerInterface = reflect.TypeOf((*gocql.Unmarshaler)(nil)).Elem()
func baseType(t reflect.Type, expected reflect.Kind) (reflect.Type, error) {
t = reflectx.Deref(t)
if t.Kind() != expected {
return nil, fmt.Errorf("expected %s but got %s", expected, t.Kind())
}
return t, nil
}
// isScannable takes the reflect.Type and the actual dest value and returns
// whether or not it's Scannable. Something is scannable if:
// * it is not a struct
// * it implements gocql.Unmarshaler
// * it has no exported fields
func isScannable(t reflect.Type) bool {
if reflect.PtrTo(t).Implements(_unmarshallerInterface) {
return true
}
if t.Kind() != reflect.Struct {
return true
}
// it's not important that we use the right mapper for this particular object,
// we're only concerned on how many exported fields this struct has
m := DefaultMapper
return len(m.TypeMap(t).Index) == 0
}
// fieldsByName fills a values interface with fields from the passed value based
// on the traversals in int. If ptrs is true, return addresses instead of values.
// We write this instead of using FieldsByName to save allocations and map lookups
// when iterating over many rows. Empty traversals will get an interface pointer.
// Because of the necessity of requesting ptrs or values, it's considered a bit too
// specialized for inclusion in reflectx itself.
func fieldsByTraversal(v reflect.Value, traversals [][]int, values []interface{}, ptrs bool) error {
v = reflect.Indirect(v)
if v.Kind() != reflect.Struct {
return errors.New("argument not a struct")
}
for i, traversal := range traversals {
if len(traversal) == 0 {
continue
}
f := reflectx.FieldByIndexes(v, traversal)
if ptrs {
values[i] = f.Addr().Interface()
} else {
values[i] = f.Interface()
}
}
return nil
}
func missingFields(transversals [][]int) (field int, err error) {
for i, t := range transversals {
if len(t) == 0 {
return i, errors.New("missing field")
}
}
return 0, nil
} | vendor/github.com/scylladb/gocqlx/gocqlx.go | 0.728362 | 0.426023 | gocqlx.go | starcoder |
package fixtures
import (
"github.com/gopinath-langote/1build/testing/def"
"github.com/gopinath-langote/1build/testing/utils"
"github.com/stretchr/testify/assert"
"io/ioutil"
"testing"
)
func featureSetTestsData() []Test {
feature := "set"
return []Test{
shouldSetNewCommand(feature),
shouldUpdateExistingCommand(feature),
shouldFailWhenConfigurationFileIsNotFound(feature),
shouldFailWhenConfigurationFileIsInInvalidFormat(feature),
shouldSetBeforeCommand(feature),
shouldSetAfterCommand(feature),
}
}
func shouldSetNewCommand(feature string) Test {
defaultFileContent := `
project: Sample Project
commands:
- build: go build
`
expectedOutput := `project: Sample Project
commands:
- build: go build
- Test: go Test
`
return Test{
Feature: feature,
Name: "shouldSetNewCommand",
CmdArgs: []string{"set", "Test", "go Test"},
Setup: func(dir string) error {
return utils.CreateConfigFile(dir, defaultFileContent)
},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
filePath := dir + "/" + def.ConfigFileName
assert.FileExists(t, dir+"/"+def.ConfigFileName)
content, _ := ioutil.ReadFile(filePath)
return assert.Contains(t, string(content), expectedOutput)
},
}
}
func shouldUpdateExistingCommand(feature string) Test {
defaultFileContent := `
project: Sample Project
commands:
- build: go build
`
expectedOutput := `project: Sample Project
commands:
- build: go build -o
`
return Test{
Feature: feature,
Name: "shouldUpdateExistingCommand",
CmdArgs: []string{"set", "build", "go build -o"},
Setup: func(dir string) error {
return utils.CreateConfigFile(dir, defaultFileContent)
},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
filePath := dir + "/" + def.ConfigFileName
assert.FileExists(t, dir+"/"+def.ConfigFileName)
content, _ := ioutil.ReadFile(filePath)
return assert.Contains(t, string(content), expectedOutput)
},
}
}
func shouldFailWhenConfigurationFileIsNotFound(feature string) Test {
return Test{
Feature: feature,
Name: "shouldFailWhenConfigurationFileIsNotFound",
CmdArgs: []string{"set", "build", "go build -o"},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
return assert.Contains(t, actualOutput, "no '"+def.ConfigFileName+"' file found in current directory")
},
}
}
func shouldFailWhenConfigurationFileIsInInvalidFormat(feature string) Test {
return Test{
Feature: feature,
Name: "shouldFailWhenConfigurationFileIsInInvalidFormat",
CmdArgs: []string{"set", "build", "go build"},
Setup: func(dir string) error {
return utils.CreateConfigFile(dir, "invalid config content")
},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
return assert.Contains(t, actualOutput, "Unable to parse '"+def.ConfigFileName+"' config file. Make sure file is in correct format.")
},
}
}
func shouldSetBeforeCommand(feature string) Test {
defaultFileContent := `
project: Sample Project
commands:
- build: go build
`
expectedOutput := `project: Sample Project
after: yo
commands:
- build: go build
`
return Test{
Feature: feature,
Name: "shouldSetBeforeCommand",
CmdArgs: []string{"set", "after", "yo"},
Setup: func(dir string) error {
return utils.CreateConfigFile(dir, defaultFileContent)
},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
filePath := dir + "/" + def.ConfigFileName
assert.FileExists(t, dir+"/"+def.ConfigFileName)
content, _ := ioutil.ReadFile(filePath)
return assert.Contains(t, string(content), expectedOutput)
},
}
}
func shouldSetAfterCommand(feature string) Test {
defaultFileContent := `
project: Sample Project
commands:
- build: go build
`
expectedOutput := `project: Sample Project
after: yo
commands:
- build: go build
`
return Test{
Feature: feature,
Name: "shouldSetBeforeCommand",
CmdArgs: []string{"set", "after", "yo"},
Setup: func(dir string) error {
return utils.CreateConfigFile(dir, defaultFileContent)
},
Assertion: func(dir string, actualOutput string, t *testing.T) bool {
filePath := dir + "/" + def.ConfigFileName
assert.FileExists(t, dir+"/"+def.ConfigFileName)
content, _ := ioutil.ReadFile(filePath)
return assert.Contains(t, string(content), expectedOutput)
},
}
} | testing/fixtures/command_set_fixtures.go | 0.504883 | 0.428293 | command_set_fixtures.go | starcoder |
package jgl
import (
// "fmt"
// "math"
"github.com/Stymphalian/go.math/lmath"
"github.com/go-gl/gl/v3.3-compatibility/gl"
)
type TriMesh struct {
// LightFlag bool
Mat Material
verts []lmath.Vec3
tris [][3]int
}
func NewTriMesh() *TriMesh {
out := &TriMesh{}
out.verts = make([]lmath.Vec3, 0, 8)
out.tris = make([][3]int, 0, 12)
return out
}
func (this* TriMesh) Load1(){
scale := 0.3
this.verts = append(this.verts, lmath.Vec3{-scale, -scale, scale})
this.verts = append(this.verts, lmath.Vec3{scale, -scale, scale})
this.verts = append(this.verts, lmath.Vec3{scale, scale, scale})
this.verts = append(this.verts, lmath.Vec3{-scale, scale, scale})
this.verts = append(this.verts, lmath.Vec3{-scale, -scale, -scale})
this.verts = append(this.verts, lmath.Vec3{scale, -scale, -scale})
this.verts = append(this.verts, lmath.Vec3{scale, scale, -scale})
this.verts = append(this.verts, lmath.Vec3{-scale, scale, -scale})
this.tris = append(this.tris, [3]int{0, 1, 2})
this.tris = append(this.tris, [3]int{0, 2, 3})
this.tris = append(this.tris, [3]int{1, 5, 6})
this.tris = append(this.tris, [3]int{1, 6, 2})
this.tris = append(this.tris, [3]int{4, 6, 5})
this.tris = append(this.tris, [3]int{4, 7, 6})
this.tris = append(this.tris, [3]int{0, 4, 7})
this.tris = append(this.tris, [3]int{0, 7, 3})
this.tris = append(this.tris, [3]int{3, 2, 6})
this.tris = append(this.tris, [3]int{3, 6, 7})
this.tris = append(this.tris, [3]int{0, 1, 5})
this.tris = append(this.tris, [3]int{0, 5, 4})
}
func (this *TriMesh) Draw(transform lmath.Mat4) {
gl.Color3f(1.0, 1.0, 0)
gl.Begin(gl.TRIANGLES)
for i := 0; i < len(this.tris); i++ {
v := transform.MultVec3(this.verts[this.tris[i][0]])
gl.Vertex3f(v.Dumpf32())
v = transform.MultVec3(this.verts[this.tris[i][1]])
gl.Vertex3f(v.Dumpf32())
v = transform.MultVec3(this.verts[this.tris[i][2]])
gl.Vertex3f(v.Dumpf32())
}
gl.End()
}
func (this *TriMesh) intersects(ray Ray, hit *HitRecord, transform *lmath.Mat4,index int) bool{
vec_a := transform.MultVec3(this.verts[this.tris[index][0]])
vec_b := transform.MultVec3(this.verts[this.tris[index][1]])
vec_c := transform.MultVec3(this.verts[this.tris[index][2]])
var t, gamma, beta float64;
a := vec_a.X - vec_b.X;
b := vec_a.Y - vec_b.Y;
c := vec_a.Y - vec_b.Y;
d := vec_a.X - vec_c.X;
e := vec_a.Y - vec_c.Y;
f := vec_a.Y - vec_c.Y;
j := vec_a.X - ray.Origin.X;
k := vec_a.Y - ray.Origin.Y;
l := vec_a.Y - ray.Origin.Y;
ei_hf := (e * ray.Dir.Z) - (f * ray.Dir.Y);
gf_di := (f * ray.Dir.X) - (d * ray.Dir.Z);
dh_eg := (d * ray.Dir.Y) - (e * ray.Dir.X);
M := (a * ei_hf) + (b * gf_di) + (c * dh_eg);
ak_jb := (a * k) - (j * b);
jc_al := (j * c) - (a * l);
bl_kc := (b * l) - (k * c);
t = - ((f * ak_jb) + (e * jc_al) + (d * bl_kc)) / M;
if t < hit.MinDist || t > hit.MaxDist {
return false
}
gamma = ((ray.Dir.Z * ak_jb) + (ray.Dir.Y * jc_al) + (ray.Dir.X * bl_kc)) / M;
if gamma < 0 || gamma > 1 {
return false;
}
beta = ((j * ei_hf) + (k * gf_di) + (l * dh_eg)) / M;
if beta < 0 || beta > (1 - gamma){
return false;
}
hit.Hit = true;
hit.Dist = t;
hit.MaxDist = t;
hit.HitIndex = index;
return true;
}
func (this *TriMesh) Intersects(ray Ray, hit HitRecord, transform lmath.Mat4) (h HitRecord) {
size := len(this.tris)
h = hit
for i := 0; i < size; i++ {
this.intersects(ray,&h,&transform,i)
}
return
}
func (this *TriMesh) VecsFromIndex(index int)(lmath.Vec3,lmath.Vec3, lmath.Vec3){
return this.verts[this.tris[index][0]],
this.verts[this.tris[index][1]],
this.verts[this.tris[index][2]]
}
func (this *TriMesh) normal(index int,transform lmath.Mat4) lmath.Vec3{
a,b,c := this.VecsFromIndex(index)
a = transform.MultVec3(a)
b = transform.MultVec3(b)
c = transform.MultVec3(c)
return c.Sub(a).Cross(b.Sub(a)).Normalize()
}
func (this *TriMesh) Normal(hitPoint lmath.Vec3, hit HitRecord) lmath.Vec3 {
return this.normal(hit.HitIndex,hit.Transform)
}
func (this TriMesh) Material() Material {
return this.Mat
} | jgl/tri_mesh.go | 0.557123 | 0.419826 | tri_mesh.go | starcoder |
package idemix
import (
"github.com/hyperledger/udo-amcl/amcl"
"github.com/hyperledger/udo-amcl/amcl/FP256BN"
"github.com/pkg/errors"
)
// credRequestLabel is the label used in zero-knowledge proof (ZKP) to identify that this ZKP is a credential request
const credRequestLabel = "credRequest"
// Credential issuance is an interactive protocol between a user and an issuer
// The issuer takes its secret and public keys and user attribute values as input
// The user takes the issuer public key and user secret as input
// The issuance protocol consists of the following steps:
// 1) The issuer sends a random nonce to the user
// 2) The user creates a Credential Request using the public key of the issuer, user secret, and the nonce as input
// The request consists of a commitment to the user secret (can be seen as a public key) and a zero-knowledge proof
// of knowledge of the user secret key
// The user sends the credential request to the issuer
// 3) The issuer verifies the credential request by verifying the zero-knowledge proof
// If the request is valid, the issuer issues a credential to the user by signing the commitment to the secret key
// together with the attribute values and sends the credential back to the user
// 4) The user verifies the issuer's signature and stores the credential that consists of
// the signature value, a randomness used to create the signature, the user secret, and the attribute values
// NewCredRequest creates a new Credential Request, the first message of the interactive credential issuance protocol
// (from user to issuer)
func NewCredRequest(sk *FP256BN.BIG, IssuerNonce []byte, ipk *IssuerPublicKey, rng *amcl.RAND) *CredRequest {
// Set Nym as h_{sk}^{sk}
HSk := EcpFromProto(ipk.HSk)
Nym := HSk.Mul(sk)
// generate a zero-knowledge proof of knowledge (ZK PoK) of the secret key
// Sample the randomness needed for the proof
rSk := RandModOrder(rng)
// Step 1: First message (t-values)
t := HSk.Mul(rSk) // t = h_{sk}^{r_{sk}}, cover Nym
// Step 2: Compute the Fiat-Shamir hash, forming the challenge of the ZKP.
// proofData is the data being hashed, it consists of:
// the credential request label
// 3 elements of G1 each taking 2*FieldBytes+1 bytes
// hash of the issuer public key of length FieldBytes
// issuer nonce of length FieldBytes
proofData := make([]byte, len([]byte(credRequestLabel))+3*(2*FieldBytes+1)+2*FieldBytes)
index := 0
index = appendBytesString(proofData, index, credRequestLabel)
index = appendBytesG1(proofData, index, t)
index = appendBytesG1(proofData, index, HSk)
index = appendBytesG1(proofData, index, Nym)
index = appendBytes(proofData, index, IssuerNonce)
copy(proofData[index:], ipk.Hash)
proofC := HashModOrder(proofData)
// Step 3: reply to the challenge message (s-values)
proofS := Modadd(FP256BN.Modmul(proofC, sk, GroupOrder), rSk, GroupOrder) // s = r_{sk} + C \cdot sk
// Done
return &CredRequest{
Nym: EcpToProto(Nym),
IssuerNonce: IssuerNonce,
ProofC: BigToBytes(proofC),
ProofS: BigToBytes(proofS)}
}
// Check cryptographically verifies the credential request
func (m *CredRequest) Check(ipk *IssuerPublicKey) error {
Nym := EcpFromProto(m.GetNym())
IssuerNonce := m.GetIssuerNonce()
ProofC := FP256BN.FromBytes(m.GetProofC())
ProofS := FP256BN.FromBytes(m.GetProofS())
HSk := EcpFromProto(ipk.HSk)
if Nym == nil || IssuerNonce == nil || ProofC == nil || ProofS == nil {
return errors.Errorf("one of the proof values is undefined")
}
// Verify Proof
// Recompute t-values using s-values
t := HSk.Mul(ProofS)
t.Sub(Nym.Mul(ProofC)) // t = h_{sk}^s / Nym^C
// Recompute challenge
proofData := make([]byte, len([]byte(credRequestLabel))+3*(2*FieldBytes+1)+2*FieldBytes)
index := 0
index = appendBytesString(proofData, index, credRequestLabel)
index = appendBytesG1(proofData, index, t)
index = appendBytesG1(proofData, index, HSk)
index = appendBytesG1(proofData, index, Nym)
index = appendBytes(proofData, index, IssuerNonce)
copy(proofData[index:], ipk.Hash)
if *ProofC != *HashModOrder(proofData) {
return errors.Errorf("zero knowledge proof is invalid")
}
return nil
} | idemix/credrequest.go | 0.636014 | 0.495545 | credrequest.go | starcoder |
package iso20022
// Currency conversion accepted by the customer, either to convert the amount to dispense in the base currency of the ATM, or to convert the total requested amount in the currency of the customer (so called dynamic currency conversion).
type CurrencyConversion9 struct {
// Identification of the currency conversion operation.
CurrencyConversionIdentification *Max35Text `xml:"CcyConvsId,omitempty"`
// Currency into which the amount is converted (ISO 4217, 3 alphanumeric characters).
TargetCurrency *CurrencyDetails2 `xml:"TrgtCcy"`
// Amount converted in the target currency, including commission and mark-up.
ResultingAmount *ImpliedCurrencyAndAmount `xml:"RsltgAmt"`
// Exchange rate, expressed as a percentage, applied to convert the original amount into the resulting amount.
ExchangeRate *PercentageRate `xml:"XchgRate"`
// Exchange rate, expressed as a percentage, applied to convert the resulting amount into the original amount.
InvertedExchangeRate *PercentageRate `xml:"NvrtdXchgRate,omitempty"`
// Date and time at which the exchange rate has been quoted.
QuotationDate *ISODateTime `xml:"QtnDt,omitempty"`
// Validity limit of the exchange rate.
ValidUntil *ISODateTime `xml:"VldUntil,omitempty"`
// Currency from which the amount is converted (ISO 4217, 3 alphanumeric characters).
SourceCurrency *CurrencyDetails2 `xml:"SrcCcy"`
// Original amount in the source currency.
OriginalAmount *ImpliedCurrencyAndAmount `xml:"OrgnlAmt"`
// Commission or additional charges made as part of a currency conversion.
CommissionDetails []*Commission19 `xml:"ComssnDtls,omitempty"`
// Mark-up made as part of a currency conversion.
MarkUpDetails []*Commission18 `xml:"MrkUpDtls,omitempty"`
// Card scheme declaration (disclaimer) to present to the cardholder.
DeclarationDetails *ActionMessage5 `xml:"DclrtnDtls,omitempty"`
}
func (c *CurrencyConversion9) SetCurrencyConversionIdentification(value string) {
c.CurrencyConversionIdentification = (*Max35Text)(&value)
}
func (c *CurrencyConversion9) AddTargetCurrency() *CurrencyDetails2 {
c.TargetCurrency = new(CurrencyDetails2)
return c.TargetCurrency
}
func (c *CurrencyConversion9) SetResultingAmount(value, currency string) {
c.ResultingAmount = NewImpliedCurrencyAndAmount(value, currency)
}
func (c *CurrencyConversion9) SetExchangeRate(value string) {
c.ExchangeRate = (*PercentageRate)(&value)
}
func (c *CurrencyConversion9) SetInvertedExchangeRate(value string) {
c.InvertedExchangeRate = (*PercentageRate)(&value)
}
func (c *CurrencyConversion9) SetQuotationDate(value string) {
c.QuotationDate = (*ISODateTime)(&value)
}
func (c *CurrencyConversion9) SetValidUntil(value string) {
c.ValidUntil = (*ISODateTime)(&value)
}
func (c *CurrencyConversion9) AddSourceCurrency() *CurrencyDetails2 {
c.SourceCurrency = new(CurrencyDetails2)
return c.SourceCurrency
}
func (c *CurrencyConversion9) SetOriginalAmount(value, currency string) {
c.OriginalAmount = NewImpliedCurrencyAndAmount(value, currency)
}
func (c *CurrencyConversion9) AddCommissionDetails() *Commission19 {
newValue := new(Commission19)
c.CommissionDetails = append(c.CommissionDetails, newValue)
return newValue
}
func (c *CurrencyConversion9) AddMarkUpDetails() *Commission18 {
newValue := new(Commission18)
c.MarkUpDetails = append(c.MarkUpDetails, newValue)
return newValue
}
func (c *CurrencyConversion9) AddDeclarationDetails() *ActionMessage5 {
c.DeclarationDetails = new(ActionMessage5)
return c.DeclarationDetails
} | CurrencyConversion9.go | 0.788949 | 0.597344 | CurrencyConversion9.go | starcoder |
package animate
import (
"math"
"golang.org/x/mobile/exp/sprite/clock"
"gomatcha.io/matcha/comm"
)
// ColorInterpolater represents an object that interpolates between floats given a float64 between 0-1.
type FloatInterpolater interface {
Interpolate(float64) float64
}
// FloatInterpolate wraps n and returns a notifier with the corresponding interpolated floats.
func FloatInterpolate(w comm.Float64Notifier, l FloatInterpolater) comm.Float64Notifier {
return &floatInterpolater{
watcher: w,
interpolater: l,
}
}
type floatInterpolater struct {
watcher comm.Float64Notifier
interpolater FloatInterpolater
}
func (w *floatInterpolater) Notify(f func()) comm.Id {
return w.watcher.Notify(f)
}
func (w *floatInterpolater) Unnotify(id comm.Id) {
w.watcher.Unnotify(id)
}
func (w *floatInterpolater) Value() float64 {
return w.interpolater.Interpolate(w.watcher.Value())
}
var (
DefaultEase FloatInterpolater = CubicBezierEase{0.25, 0.1, 0.25, 1}
DefaultInEase FloatInterpolater = CubicBezierEase{0.42, 0, 1, 1}
DefaultOutEase FloatInterpolater = CubicBezierEase{0, 0, 0.58, 1}
DefaultInOutEase FloatInterpolater = CubicBezierEase{0.42, 0, 0.58, 1}
)
// CubicBezierEase interpolates between 1-0 using a Cubic Bézier curve. The parameters are cubic control parameters. The curve starts at (0,0) going toward (x0,y0), and arrives at (1,1) coming from (x1,y1).
type CubicBezierEase struct {
X0, Y0, X1, Y1 float64
}
// Interpolate implements the Interpolater interface.
func (e CubicBezierEase) Interpolate(a float64) float64 {
f := clock.CubicBezier(float32(e.X0), float32(e.Y0), float32(e.X1), float32(e.Y1))
t := f(0, 100000, clock.Time(a*100000))
return float64(t)
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e).
func (e CubicBezierEase) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
}
// LinearEase interpolates between 1-0 in a linear fashion.
type LinearEase struct {
}
// Interpolate implements the Interpolater interface.
func (e LinearEase) Interpolate(a float64) float64 {
return a
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e)
func (e LinearEase) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
}
// PolyInEase interpolates between Start and End with a polynomial easing.
type PolyInEase struct {
Exp float64
}
// Interpolate implements the Interpolater interface.
func (e PolyInEase) Interpolate(a float64) float64 {
return math.Pow(a, e.Exp)
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e)
func (e PolyInEase) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
}
// PolyOutEase interpolates between Start and End with a reverse polynomial easing.
type PolyOutEase struct {
Exp float64
}
// Interpolate implements the Interpolater interface.
func (e PolyOutEase) Interpolate(a float64) float64 {
return 1 - math.Pow(1-a, e.Exp)
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e)
func (e PolyOutEase) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
}
// PolyInOutEase interpolates between Start and End with a symmetric polynomial easing.
type PolyInOutEase struct {
ExpIn float64
ExpOut float64
}
// Interpolate implements the Interpolater interface.
func (e PolyInOutEase) Interpolate(a float64) float64 {
if a < 0.5 {
return math.Pow(a, e.ExpIn)
} else {
return 1 - math.Pow(1-a, e.ExpOut)
}
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e)
func (e PolyInOutEase) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
}
// FloatLerp interpolates between Start and End linearly.
type FloatLerp struct {
Start, End float64
}
// Interpolate implements the Interpolater interface.
func (f FloatLerp) Interpolate(a float64) float64 {
return f.Start + (f.End-f.Start)*a
}
// Notifier is a convenience method around animate.FloatInterpolate(n, e)
func (e FloatLerp) Notifier(a comm.Float64Notifier) comm.Float64Notifier {
return FloatInterpolate(a, e)
} | animate/float.go | 0.907772 | 0.45647 | float.go | starcoder |
package engine
// Simulator represents a system that should be called as part of the main loop.
type Simulator interface {
Simulate(dt float64)
}
// PreSimulator allows systems to be called before the main Simulator systems.
type PreSimulator interface {
PreSimulate(dt float64)
}
// PreSimulator allows systems to be called after the main Simulator systems.
type PostSimulator interface {
PostSimulate(dt float64)
}
// Renderer represents a system that has rendering functionality. In the default
// SimulationStepper this will be triggered separately to the different Simulators.
type Renderer interface {
Render(ipl float64)
}
// PreRenderer allows render systems to be called before the main Renderer systems.
type PreRenderer interface {
PreRender(ipl float64)
}
// PostRenderer allows render systems to be called after the main Renderer systems.
type PostRenderer interface {
PostRender(ipl float64)
}
// World contains all of the different systems and is responsible for
// orchestrating the execution of the encapsulated systems.
type World struct {
preSimulators []PreSimulator
postSimulators []PostSimulator
simulators []Simulator
preRenderers []PreRenderer
postRenderers []PostRenderer
renderers []Renderer
}
// NewWorld returns a new World instance.
func NewWorld() *World {
return &World{}
}
// AddPreSimulator adds the given PreSimulator to the World.
func (w *World) AddPreSimulator(s PreSimulator) {
w.preSimulators = append(w.preSimulators, s)
}
// AddPostSimulator adds the given PostSimulator to the World.
func (w *World) AddPostSimulator(s PostSimulator) {
w.postSimulators = append(w.postSimulators, s)
}
// AddSimulator adds the given Simulator to the World.
func (w *World) AddSimulator(s Simulator) {
w.simulators = append(w.simulators, s)
}
// AddRenderer adds the given Renderer to the World.
func (w *World) AddRenderer(r Renderer) {
w.renderers = append(w.renderers, r)
}
// AddPreRenderer adds teh given PreRenderer to the World.
func (w *World) AddPreRenderer(r PreRenderer) {
w.preRenderers = append(w.preRenderers, r)
}
// AddPostRenderer adds the given PostRenderer to the World.
func (w *World) AddPostRenderer(r PostRenderer) {
w.postRenderers = append(w.postRenderers, r)
}
// Simulate will usually be called by the main game loop. It will loop through PreSimulators then
// the Simulators and finally the PreSimulators in a single call. All Simulators will be called in the
// order that they were added.
func (w *World) Simulate(dt float64) {
for _, s := range w.preSimulators {
s.PreSimulate(dt)
}
for _, s := range w.simulators {
s.Simulate(dt)
}
for _, s := range w.postSimulators {
s.PostSimulate(dt)
}
}
// Render calls all of the Renderers in the order that they were added.
func (w *World) Render(ipl float64) {
for _, r := range w.preRenderers {
r.PreRender(ipl)
}
for _, r := range w.renderers {
r.Render(ipl)
}
for _, r := range w.postRenderers {
r.PostRender(ipl)
}
} | pkg/engine/world.go | 0.81615 | 0.773901 | world.go | starcoder |
package transform
import (
"RenG/config"
"RenG/core"
"RenG/lang/ast"
"RenG/lang/evaluator"
"RenG/lang/object"
"RenG/lang/token"
"fmt"
"strconv"
)
func TransformEval(node ast.Node, texture *core.SDL_Texture, env *object.Environment) object.Object {
switch node := node.(type) {
case *ast.BlockStatement:
return evalBlockStatements(node, texture, env)
case *ast.ExpressionStatement:
return TransformEval(node.Expression, texture, env)
case *ast.PrefixExpression:
if rightValue, ok := node.Right.(*ast.Identifier); ok {
return evalAssignPrefixExpression(node.Operator, rightValue, env)
} else {
right := TransformEval(node.Right, texture, env)
if isError(right) {
return right
}
return evalPrefixExpression(node.Operator, right)
}
case *ast.InfixExpression:
if leftValue, ok := node.Left.(*ast.Identifier); ok && isAssign(node.Operator) {
right := TransformEval(node.Right, texture, env)
if isError(right) {
return right
}
return evalAssignInfixExpression(node.Operator, leftValue, right, env)
} else {
left := TransformEval(node.Left, texture, env)
if isError(left) {
return left
}
right := TransformEval(node.Right, texture, env)
if isError(right) {
return right
}
return evalInfixExpression(node.Operator, left, right)
}
case *ast.IfExpression:
return evalIfExpression(node, texture, env)
case *ast.ForExpression:
return evalForExpression(node, texture, env)
case *ast.WhileExpression:
return evalWhileExpression(node, texture, env)
case *ast.CallFunctionExpression:
function := TransformEval(node.Function, texture, env)
if isError(function) {
return function
}
args := evalExpressions(node.Arguments, texture, env)
if len(args) == 1 && isError(args[0]) {
return args[0]
}
return applyFunction(function, texture, args)
case *ast.IndexExpression:
left := TransformEval(node.Left, texture, env)
if isError(left) {
return left
}
index := TransformEval(node.Index, texture, env)
if isError(index) {
return index
}
return evalIndexExpression(left, index)
case *ast.Identifier:
return evalIdentifier(node, env)
case *ast.Boolean:
return &object.Boolean{Value: node.Value}
case *ast.IntegerLiteral:
return &object.Integer{Value: node.Value}
case *ast.FloatLiteral:
return &object.Float{Value: node.Value}
case *ast.StringLiteral:
return evalStringLiteral(node, texture, env)
case *ast.ArrayLiteral:
elements := evalExpressions(node.Elements, texture, env)
if len(elements) == 1 && isError(elements[0]) {
return elements[0]
}
return &object.Array{Elements: elements}
case *ast.TransformExpression:
return evalTransformExpression(node, texture, env)
case *ast.XPosExpression:
result := TransformEval(node.Value, texture, env)
xpos := result.(*object.Integer).Value
texture.Xpos = int(xpos)
case *ast.YPosExpression:
result := TransformEval(node.Value, texture, env)
ypos := result.(*object.Integer).Value
texture.Ypos = int(ypos)
case *ast.XSizeExpression:
result := TransformEval(node.Value, texture, env)
xsize := result.(*object.Integer).Value
texture.Width = int(xsize)
case *ast.YSizeExpression:
result := TransformEval(node.Value, texture, env)
ysize := result.(*object.Integer).Value
texture.Height = int(ysize)
}
return nil
}
func evalBlockStatements(block *ast.BlockStatement, texture *core.SDL_Texture, env *object.Environment) object.Object {
var result object.Object
for _, statement := range block.Statements {
result = TransformEval(statement, texture, env)
if result != nil {
rt := result.Type()
if rt == object.ERROR_OBJ {
return result
}
}
}
return result
}
func evalTransformExpression(transform *ast.TransformExpression, texture *core.SDL_Texture, env *object.Environment) object.Object {
switch transform.Name.Value {
case "default":
transform.Body.Statements = append(transform.Body.Statements, &ast.ExpressionStatement{
Token: token.Token{
Type: token.XPOS,
Literal: "xpos",
},
Expression: &ast.XPosExpression{
Token: token.Token{
Type: token.XPOS,
Literal: "xpos",
},
Value: &ast.InfixExpression{
Token: token.Token{
Type: token.SLASH,
Literal: "/",
},
Left: &ast.InfixExpression{
Token: token.Token{
Type: token.MINUS,
Literal: "-",
},
Left: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: strconv.Itoa(config.Width),
},
Value: int64(config.Width),
},
Operator: "-",
Right: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: strconv.Itoa(texture.Width),
},
Value: int64(texture.Width),
},
},
Operator: "/",
Right: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: "2",
},
Value: 2,
},
},
},
})
transform.Body.Statements = append(transform.Body.Statements, &ast.ExpressionStatement{
Token: token.Token{
Type: token.YPOS,
Literal: "ypos",
},
Expression: &ast.YPosExpression{
Token: token.Token{
Type: token.YPOS,
Literal: "ypos",
},
Value: &ast.InfixExpression{
Token: token.Token{
Type: token.SLASH,
Literal: "/",
},
Left: &ast.InfixExpression{
Token: token.Token{
Type: token.MINUS,
Literal: "-",
},
Left: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: strconv.Itoa(config.Height),
},
Value: int64(config.Height),
},
Operator: "-",
Right: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: strconv.Itoa(texture.Height),
},
Value: int64(texture.Height),
},
},
Operator: "/",
Right: &ast.IntegerLiteral{
Token: token.Token{
Type: token.INT,
Literal: "2",
},
Value: 2,
},
},
},
})
}
TransformEval(transform.Body, texture, env)
return nil
}
func evalExpressions(exps []ast.Expression, texture *core.SDL_Texture, env *object.Environment) []object.Object {
var result []object.Object
for _, e := range exps {
evaluated := TransformEval(e, texture, env)
if isError(evaluated) {
return []object.Object{evaluated}
}
result = append(result, evaluated)
}
return result
}
func evalStringLiteral(str *ast.StringLiteral, texture *core.SDL_Texture, env *object.Environment) *object.String {
result := &object.String{Value: str.Value}
// TODO : 최적화하기
// 일단 고쳤지만 여러 최적화가 필요할듯
var (
index = 0
expIndex = 0
)
for stringIndex := 0; stringIndex < len(str.Values); stringIndex++ {
for isCurrentExp(index, str) {
val := TransformEval(str.Exp[expIndex].Exp, texture, env)
switch value := val.(type) {
case *object.Integer:
result.Value += strconv.Itoa(int(value.Value))
case *object.Float:
result.Value += fmt.Sprintf("%f", value.Value)
case *object.Boolean:
result.Value += strconv.FormatBool(value.Value)
case *object.String:
result.Value += value.Value
default:
result.Value = "ErrorType"
}
expIndex++
index++
}
result.Value += str.Values[stringIndex].Str
index++
}
return result
}
func evalIndexExpression(left, index object.Object) object.Object {
switch {
case left.Type() == object.ARRAY_OBJ && index.Type() == object.INTEGER_OBJ:
return evalArrayIndexExpression(left, index)
default:
return newError("index operator not supported : %s", left.Type())
}
}
func evalArrayIndexExpression(array, index object.Object) object.Object {
arrayObject := array.(*object.Array)
idx := index.(*object.Integer).Value
max := int64(len(arrayObject.Elements) - 1)
if idx < 0 || idx > max {
return NULL
}
return arrayObject.Elements[idx]
}
func evalIfExpression(ie *ast.IfExpression, texture *core.SDL_Texture, env *object.Environment) object.Object {
condition := TransformEval(ie.Condition, texture, env)
if isError(condition) {
return condition
}
if isTruthy(condition) {
return TransformEval(ie.Consequence, texture, env)
}
for _, ee := range ie.Elif {
if ee != nil {
elifCondition := TransformEval(ee.Condition, texture, env)
if isError(elifCondition) {
return elifCondition
}
if isTruthy(elifCondition) {
return TransformEval(ee.Consequence, texture, env)
}
}
}
if ie.Alternative != nil {
return TransformEval(ie.Alternative, texture, env)
} else {
return NULL
}
}
func evalForExpression(node *ast.ForExpression, texture *core.SDL_Texture, env *object.Environment) object.Object {
var define, condition, result, run object.Object
define = TransformEval(node.Define, texture, env)
if isError(define) {
return define
}
condition = TransformEval(node.Condition, texture, env)
if isError(condition) {
return condition
}
for isTruthy(condition) {
result = TransformEval(node.Body, texture, env)
if isError(result) {
return result
}
run = TransformEval(node.Run, texture, env)
if isError(run) {
return run
}
condition = TransformEval(node.Condition, texture, env)
if isError(condition) {
return condition
}
}
return nil
}
func evalWhileExpression(node *ast.WhileExpression, texture *core.SDL_Texture, env *object.Environment) object.Object {
condition := TransformEval(node.Condition, texture, env)
if isError(condition) {
return condition
}
for isTruthy(condition) {
result := TransformEval(node.Body, texture, env)
if isError(result) {
return result
}
condition = TransformEval(node.Condition, texture, env)
if isError(condition) {
return condition
}
}
return nil
}
func evalIdentifier(node *ast.Identifier, env *object.Environment) object.Object {
if val, ok := env.Get(node.Value); ok {
return val
}
if builtin, ok := evaluator.FunctionBuiltins[node.Value]; ok {
return builtin
}
return newError("identifier not found: " + node.Value)
} | reng-core/reng/transform/eval.go | 0.594198 | 0.496521 | eval.go | starcoder |
package store
import "math"
type CollapsingHighestDenseStore struct {
DenseStore
maxNumBins int
isCollapsed bool
}
func NewCollapsingHighestDenseStore(maxNumBins int) *CollapsingHighestDenseStore {
return &CollapsingHighestDenseStore{
DenseStore: DenseStore{minIndex: math.MaxInt32, maxIndex: math.MinInt32},
maxNumBins: maxNumBins,
isCollapsed: false,
}
}
func (s *CollapsingHighestDenseStore) Add(index int) {
s.AddWithCount(index, float64(1))
}
func (s *CollapsingHighestDenseStore) AddBin(bin Bin) {
index := bin.Index()
count := bin.Count()
if count == 0 {
return
}
s.AddWithCount(index, count)
}
func (s *CollapsingHighestDenseStore) AddWithCount(index int, count float64) {
if count == 0 {
return
}
arrayIndex := s.normalize(index)
s.bins[arrayIndex] += count
s.count += count
}
// Normalize the store, if necessary, so that the counter of the specified index can be updated.
func (s *CollapsingHighestDenseStore) normalize(index int) int {
if index > s.maxIndex {
if s.isCollapsed {
return len(s.bins) - 1
} else {
s.extendRange(index, index)
if s.isCollapsed {
return len(s.bins) - 1
}
}
} else if index < s.minIndex {
s.extendRange(index, index)
}
return index - s.offset
}
func (s *CollapsingHighestDenseStore) getNewLength(newMinIndex, newMaxIndex int) int {
return min(s.DenseStore.getNewLength(newMinIndex, newMaxIndex), s.maxNumBins)
}
func (s *CollapsingHighestDenseStore) extendRange(newMinIndex, newMaxIndex int) {
newMinIndex = min(newMinIndex, s.minIndex)
newMaxIndex = max(newMaxIndex, s.maxIndex)
if s.IsEmpty() {
initialLength := s.getNewLength(newMinIndex, newMaxIndex)
s.bins = make([]float64, initialLength)
s.offset = newMinIndex
s.minIndex = newMinIndex
s.maxIndex = newMaxIndex
s.adjust(newMinIndex, newMaxIndex)
} else if newMinIndex >= s.offset && newMaxIndex < s.offset+len(s.bins) {
s.minIndex = newMinIndex
s.maxIndex = newMaxIndex
} else {
// To avoid shifting too often when nearing the capacity of the array,
// we may grow it before we actually reach the capacity.
newLength := s.getNewLength(newMinIndex, newMaxIndex)
if newLength > len(s.bins) {
tmpBins := make([]float64, newLength)
copy(tmpBins, s.bins)
s.bins = tmpBins
}
s.adjust(newMinIndex, newMaxIndex)
}
}
// Adjust bins, offset, minIndex and maxIndex, without resizing the bins slice in order to make it fit the
// specified range.
func (s *CollapsingHighestDenseStore) adjust(newMinIndex, newMaxIndex int) {
if newMaxIndex-newMinIndex+1 > len(s.bins) {
// The range of indices is too wide, buckets of lowest indices need to be collapsed.
newMaxIndex = newMinIndex + len(s.bins) - 1
if newMaxIndex <= s.minIndex {
// There will be only one non-empty bucket.
s.bins = make([]float64, len(s.bins))
s.offset = newMinIndex
s.maxIndex = newMaxIndex
s.bins[len(s.bins)-1] = s.count
} else {
shift := s.offset - newMinIndex
if shift > 0 {
// Collapse the buckets.
n := float64(0)
for i := newMaxIndex + 1; i <= s.maxIndex; i++ {
n += s.bins[i-s.offset]
}
s.resetBins(newMaxIndex+1, s.maxIndex)
s.bins[newMaxIndex-s.offset] += n
s.maxIndex = newMaxIndex
// Shift the buckets to make room for newMinIndex.
s.shiftCounts(shift)
} else {
// Shift the buckets to make room for newMaxIndex.
s.shiftCounts(shift)
s.maxIndex = newMaxIndex
}
}
s.minIndex = newMinIndex
s.isCollapsed = true
} else {
s.centerCounts(newMinIndex, newMaxIndex)
}
}
func (s *CollapsingHighestDenseStore) MergeWith(other Store) {
if other.IsEmpty() {
return
}
o, ok := other.(*CollapsingHighestDenseStore)
if !ok {
for bin := range other.Bins() {
s.AddBin(bin)
}
return
}
if o.minIndex < s.minIndex || o.maxIndex > s.maxIndex {
s.extendRange(o.minIndex, o.maxIndex)
}
idx := o.maxIndex
for ; idx > s.maxIndex && idx >= o.minIndex; idx-- {
s.bins[len(s.bins)-1] += o.bins[idx-o.offset]
}
for ; idx > o.minIndex; idx-- {
s.bins[idx-s.offset] += o.bins[idx-o.offset]
}
// This is a separate test so that the comparison in the previous loop is strict (>) and handles
// o.minIndex = Integer.MIN_VALUE.
if idx == o.minIndex {
s.bins[idx-s.offset] += o.bins[idx-o.offset]
}
s.count += o.count
}
func (s *CollapsingHighestDenseStore) Copy() Store {
bins := make([]float64, len(s.bins))
copy(bins, s.bins)
return &CollapsingHighestDenseStore{
DenseStore: DenseStore{
bins: bins,
count: s.count,
offset: s.offset,
minIndex: s.minIndex,
maxIndex: s.maxIndex,
},
maxNumBins: s.maxNumBins,
isCollapsed: s.isCollapsed,
}
} | vendor/github.com/DataDog/sketches-go/ddsketch/store/collapsing_highest_dense_store.go | 0.614741 | 0.486514 | collapsing_highest_dense_store.go | starcoder |
package lpoint
import (
"bytes"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/nathangreene3/kmeans"
)
// LPoint labels a point.
type LPoint struct {
ID int `json:"id"`
Label string `json:"label"`
Point kmeans.Point `json:"point"`
}
// New returns a new labeled point.
func New(id int, label string, values ...float64) LPoint {
lp := LPoint{
ID: id,
Label: label,
Point: append(make(kmeans.Point, 0, len(values)), values...),
}
return lp
}
// Copy a labeled point.
func (lp LPoint) Copy() LPoint {
cpy := LPoint{
ID: lp.ID,
Label: lp.Label,
Point: lp.Point.Copy(),
}
return cpy
}
// Dims returns the number of dimensions in a list of points. If the
// number of dimensions is inconsistent in the set of points, zero is
// returned.
func Dims(lps ...LPoint) int {
if err := Validate(lps...); err != nil {
return 0
}
return len(lps[0].Point)
}
// Equals determines if two labeled points are equal.
func (lp LPoint) Equals(lq LPoint) bool {
return lp.ID == lq.ID && lp.Label == lq.Label && lp.Point.Equals(lq.Point)
}
// JSON returns the points marshalled into a json-encoded string.
func JSON(lps ...LPoint) (string, error) {
var sb strings.Builder
if err := json.NewEncoder(&sb).Encode(lps); err != nil {
return "", err
}
return sb.String(), nil
}
// Labels returns a list of the distinct labels found in the given set
// of labeled points. The list will be sorted.
func Labels(lps ...LPoint) []string {
var (
labelFreq = LabelFreq(lps...)
labels = make([]string, 0, len(labelFreq))
)
for label := range labelFreq {
labels = append(labels, label)
}
sort.Strings(labels)
return labels
}
// Parse ...
func Parse(s string) (LPoint, error) {
var lp LPoint
if len(s) < 2 {
// Minimum labeled point: {0 {}}
return LPoint{}, errors.New("invalid format")
}
return lp, nil
}
// ParseJSON parses labeled points from a json-encoded string.
func ParseJSON(s string) ([]LPoint, error) {
var lps []LPoint
if err := json.NewDecoder(strings.NewReader(s)).Decode(&lps); err != nil {
return nil, err
}
return lps, nil
}
// Points returns a list of points.
func Points(lps ...LPoint) []kmeans.Point {
ps := make([]kmeans.Point, 0, len(lps))
for i := 0; i < len(lps); i++ {
ps = append(ps, append(make(kmeans.Point, 0, len(lps[i].Point)), lps[i].Point...))
}
return ps
}
// ReadCSVFile ...
func ReadCSVFile(file string, header bool) ([]LPoint, error) {
file = filepath.Clean(file)
if !strings.EqualFold(filepath.Ext(file), ".csv") {
file += ".csv"
}
b, err := os.ReadFile(file)
if err != nil {
return nil, err
}
records, err := csv.NewReader(bytes.NewReader(b)).ReadAll()
if err != nil {
return nil, err
}
var (
i int
n = len(records)
)
if header {
i++
n--
}
ps := make([]LPoint, 0, n)
for ; i < len(records); i++ {
p := make(kmeans.Point, 0, len(records[i])-2)
for j := 2; j < len(records[i]); j++ {
pj, err := strconv.ParseFloat(records[i][j], 64)
if err != nil {
return nil, err
}
p = append(p, pj)
}
id, err := strconv.Atoi(records[i][0])
if err != nil {
return nil, err
}
lp := LPoint{
ID: id,
Label: strings.ToLower(records[i][1]),
Point: p,
}
ps = append(ps, lp)
}
return ps, nil
}
// ReadJSONFile returns labeled points parsed from a json file. If
// the file extension .json is not provided in the file name, it will
// be appended before reading the file.
func ReadJSONFile(file string) ([]LPoint, error) {
file = filepath.Clean(file)
if !strings.EqualFold(filepath.Ext(file), ".json") {
file += ".json"
}
b, err := os.ReadFile(file)
if err != nil {
return nil, err
}
return ParseJSON(string(b))
}
// LabelFreq returns a mapping of each label to the frequency of that
// label in the given list of labeled points.
func LabelFreq(lps ...LPoint) map[string]int {
labelFreq := make(map[string]int)
for i := 0; i < len(lps); i++ {
labelFreq[lps[i].Label]++
}
return labelFreq
}
// String returns a representation of a labeled point.
func (lp LPoint) String() string {
return fmt.Sprintf("{%d %s %v}", lp.ID, lp.Label, lp.Point)
}
// Validate a list of labeled points.
func Validate(lps ...LPoint) error {
for i := 1; i < len(lps); i++ {
if len(lps[0].Point) != len(lps[i].Point) {
return errors.New("dimension mismatch")
}
}
return nil
}
// WriteCSVFile writes a list of points to a csv file. The header will
// only be written if provided. The last column will be the label.
func WriteCSVFile(file string, dimNames []string, lps ...LPoint) error {
file = filepath.Clean(file)
if !strings.EqualFold(filepath.Ext(file), ".csv") {
file += ".csv"
}
var (
buf = bytes.NewBuffer(make([]byte, 0))
w = csv.NewWriter(buf)
)
dims := Dims(lps...)
if len(dimNames) != 0 {
if len(dimNames) != dims {
return errors.New("dimension mismatch")
}
header := append(
make([]string, 0, len(dimNames)+2),
"id",
"label",
)
for j := 0; j < len(dimNames); j++ {
header = append(header, strings.ToLower(dimNames[j]))
}
if err := w.Write(header); err != nil {
return err
}
}
for i := 0; i < len(lps); i++ {
record := append(
make([]string, 0, len(lps[i].Point)+2),
strconv.Itoa(lps[i].ID),
lps[i].Label,
)
for j := 0; j < len(lps[i].Point); j++ {
record = append(record, strconv.FormatFloat(lps[i].Point[j], 'f', -1, 64))
}
if err := w.Write(record); err != nil {
return err
}
}
w.Flush()
if err := w.Error(); err != nil {
return err
}
return os.WriteFile(file, buf.Bytes(), os.ModePerm)
}
// WriteJSONFile writes labeled points to a json file. If the file
// extension .json is not provided in the file name, it will be
// appended before writing the file.
func WriteJSONFile(file string, lps ...LPoint) error {
s, err := JSON(lps...)
if err != nil {
return err
}
file = filepath.Clean(file)
if !strings.EqualFold(filepath.Ext(file), ".json") {
file += ".json"
}
return os.WriteFile(file, []byte(s), os.ModePerm)
} | lpoint/lpoint.go | 0.718298 | 0.459258 | lpoint.go | starcoder |
package test
import (
"fmt"
"os"
"strconv"
"strings"
"testing"
toolchainv1alpha1 "github.com/codeready-toolchain/api/api/v1alpha1"
"github.com/codeready-toolchain/host-operator/pkg/counter"
"github.com/codeready-toolchain/host-operator/pkg/metrics"
commontest "github.com/codeready-toolchain/toolchain-common/pkg/test"
"github.com/codeready-toolchain/toolchain-common/pkg/test/masteruserrecord"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/runtime"
)
type CounterAssertion struct {
t *testing.T
counts counter.Counts
}
func AssertThatCountersAndMetrics(t *testing.T) *CounterAssertion {
counts, err := counter.GetCounts()
require.NoError(t, err)
return &CounterAssertion{
t: t,
counts: counts,
}
}
func AssertThatUninitializedCounters(t *testing.T) *CounterAssertion {
counts, err := counter.GetCounts()
require.EqualErrorf(t, err, "counter is not initialized", "should be error because counter hasn't been initialized yet")
return &CounterAssertion{
t: t,
counts: counts,
}
}
func (a *CounterAssertion) HaveUserAccountsForCluster(clusterName string, number int) *CounterAssertion {
assert.Equal(a.t, number, a.counts.UserAccountsPerClusterCounts[clusterName])
AssertMetricsGaugeEquals(a.t, number, metrics.UserAccountGaugeVec.WithLabelValues(clusterName))
return a
}
func (a *CounterAssertion) HaveUsersPerActivationsAndDomain(expected toolchainv1alpha1.Metric) *CounterAssertion {
actual := a.counts.UserSignupsPerActivationAndDomainCounts
assert.Equal(a.t, map[string]int(expected), actual)
for key, count := range expected {
AssertMetricsGaugeEquals(a.t, count, metrics.UserSignupsPerActivationAndDomainGaugeVec.WithLabelValues(strings.Split(key, ",")...))
}
return a
}
func (a *CounterAssertion) HaveMasterUserRecordsPerDomain(expected toolchainv1alpha1.Metric) *CounterAssertion {
actual := a.counts.MasterUserRecordPerDomainCounts
assert.Equal(a.t, map[string]int(expected), actual, "invalid counter values")
for domain, count := range expected {
AssertMetricsGaugeEquals(a.t, count, metrics.MasterUserRecordGaugeVec.WithLabelValues(domain), "invalid gauge value for domain '%v'", domain)
}
return a
}
func CreateMultipleMurs(t *testing.T, prefix string, number int, targetCluster string) []runtime.Object {
murs := make([]runtime.Object, number)
for index := range murs {
murs[index] = masteruserrecord.NewMasterUserRecord(t, fmt.Sprintf("%s%d", prefix, index), masteruserrecord.TargetCluster(targetCluster))
}
return murs
}
func CreateMultipleUserSignups(prefix string, number int) []runtime.Object {
usersignups := make([]runtime.Object, number)
for index := range usersignups {
usersignups[index] = NewUserSignup(
WithName(fmt.Sprintf("%s%d", prefix, index)),
WithAnnotation(toolchainv1alpha1.UserSignupActivationCounterAnnotationKey, strconv.Itoa(index+1)),
)
}
return usersignups
}
func InitializeCounters(t *testing.T, toolchainStatus *toolchainv1alpha1.ToolchainStatus, initObjs ...runtime.Object) {
os.Setenv("WATCH_NAMESPACE", commontest.HostOperatorNs)
counter.Reset()
t.Cleanup(counter.Reset)
initializeCounters(t, commontest.NewFakeClient(t, initObjs...), toolchainStatus)
}
func InitializeCountersWithoutReset(t *testing.T, toolchainStatus *toolchainv1alpha1.ToolchainStatus) {
os.Setenv("WATCH_NAMESPACE", commontest.HostOperatorNs)
t.Cleanup(counter.Reset)
initializeCounters(t, commontest.NewFakeClient(t), toolchainStatus)
}
func initializeCounters(t *testing.T, cl *commontest.FakeClient, toolchainStatus *toolchainv1alpha1.ToolchainStatus) {
t.Logf("toolchainStatus members: %v", toolchainStatus.Status.Members)
err := counter.Synchronize(cl, toolchainStatus)
require.NoError(t, err)
} | test/counter.go | 0.578448 | 0.404655 | counter.go | starcoder |
package osm
import (
"fmt"
"sort"
"strconv"
"strings"
)
// Type is the type of different osm objects.
// ie. node, way, relation, changeset, note, user.
type Type string
// Constants for the different object types.
const (
TypeNode Type = "node"
TypeWay Type = "way"
TypeRelation Type = "relation"
TypeChangeset Type = "changeset"
TypeNote Type = "note"
TypeUser Type = "user"
TypeBounds Type = "bounds"
)
// objectID returns an object id from the given type.
func (t Type) objectID(ref int64, v int) (ObjectID, error) {
switch t {
case TypeNode:
return NodeID(ref).ObjectID(v), nil
case TypeWay:
return WayID(ref).ObjectID(v), nil
case TypeRelation:
return RelationID(ref).ObjectID(v), nil
case TypeChangeset:
return ChangesetID(ref).ObjectID(), nil
case TypeNote:
return NoteID(ref).ObjectID(), nil
case TypeUser:
return UserID(ref).ObjectID(), nil
case TypeBounds:
var b *Bounds
return b.ObjectID(), nil
}
return 0, fmt.Errorf("unknown type: %v", t)
}
// FeatureID returns a feature id from the given type.
func (t Type) FeatureID(ref int64) (FeatureID, error) {
switch t {
case TypeNode:
return NodeID(ref).FeatureID(), nil
case TypeWay:
return WayID(ref).FeatureID(), nil
case TypeRelation:
return RelationID(ref).FeatureID(), nil
}
return 0, fmt.Errorf("unknown type: %v", t)
}
const (
versionBits = 16
versionMask = 0x000000000000FFFF
refMask = 0x00FFFFFFFFFF0000
featureMask = 0x7FFFFFFFFFFF0000
typeMask = 0x7F00000000000000
boundsMask = 0x0800000000000000
nodeMask = 0x1000000000000000
wayMask = 0x2000000000000000
relationMask = 0x3000000000000000
changesetMask = 0x4000000000000000
noteMask = 0x5000000000000000
userMask = 0x6000000000000000
)
// A FeatureID is an identifier for a feature in OSM.
// It is meant to represent all the versions of a given element.
type FeatureID int64
// Type returns the Type of the feature.
// Returns empty string for invalid type.
func (id FeatureID) Type() Type {
switch id & typeMask {
case nodeMask:
return TypeNode
case wayMask:
return TypeWay
case relationMask:
return TypeRelation
}
return ""
}
// Ref return the ID reference for the feature. Not unique without the type.
func (id FeatureID) Ref() int64 {
return int64((id & refMask) >> versionBits)
}
// ObjectID is a helper to convert the id to an object id.
func (id FeatureID) ObjectID(v int) ObjectID {
return ObjectID(id.ElementID(v))
}
// ElementID is a helper to convert the id to an element id.
func (id FeatureID) ElementID(v int) ElementID {
return ElementID(id | (versionMask & FeatureID(v)))
}
// NodeID returns the id of this feature as a node id.
// The function will panic if this feature is not of TypeNode..
func (id FeatureID) NodeID() NodeID {
if id&nodeMask != nodeMask {
panic(fmt.Sprintf("not a node: %v", id))
}
return NodeID(id.Ref())
}
// WayID returns the id of this feature as a way id.
// The function will panic if this feature is not of TypeWay.
func (id FeatureID) WayID() WayID {
if id&wayMask != wayMask {
panic(fmt.Sprintf("not a way: %v", id))
}
return WayID(id.Ref())
}
// RelationID returns the id of this feature as a relation id.
// The function will panic if this feature is not of TypeRelation.
func (id FeatureID) RelationID() RelationID {
if id&relationMask != relationMask {
panic(fmt.Sprintf("not a relation: %v", id))
}
return RelationID(id.Ref())
}
// String returns "type/ref" for the feature.
func (id FeatureID) String() string {
t := Type("unknown")
switch id & typeMask {
case nodeMask:
t = TypeNode
case wayMask:
t = TypeWay
case relationMask:
t = TypeRelation
}
return fmt.Sprintf("%s/%d", t, id.Ref())
}
// ParseFeatureID takes a string and tries to determine the feature id from it.
// The string must be formatted at "type/id", the same as the result of the String method.
func ParseFeatureID(s string) (FeatureID, error) {
parts := strings.Split(s, "/")
if len(parts) != 2 {
return 0, fmt.Errorf("invalid feature id: %v", s)
}
n, err := strconv.ParseInt(parts[1], 10, 64)
if err != nil {
return 0, fmt.Errorf("invalid feature id: %v: %v", s, err)
}
id, err := Type(parts[0]).FeatureID(n)
if err != nil {
return 0, fmt.Errorf("invalid feature id: %s: %v", s, err)
}
return id, nil
}
// FeatureIDs is a slice of FeatureIDs with some helpers on top.
type FeatureIDs []FeatureID
// Counts returns the number of each type of feature in the set of ids.
func (ids FeatureIDs) Counts() (nodes, ways, relations int) {
for _, id := range ids {
switch id.Type() {
case TypeNode:
nodes++
case TypeWay:
ways++
case TypeRelation:
relations++
}
}
return
}
type featureIDsSort FeatureIDs
// Sort will order the ids by type, node, way, relation, changeset,
// and then id.
func (ids FeatureIDs) Sort() {
sort.Sort(featureIDsSort(ids))
}
func (ids featureIDsSort) Len() int { return len(ids) }
func (ids featureIDsSort) Swap(i, j int) { ids[i], ids[j] = ids[j], ids[i] }
func (ids featureIDsSort) Less(i, j int) bool {
return ids[i] < ids[j]
} | feature.go | 0.701917 | 0.511961 | feature.go | starcoder |
package uiprogress
import (
"bytes"
"errors"
"fmt"
"sync"
"time"
"github.com/neflyte/uiprogress/util/strutil"
)
const (
oneHundredPercent = 100.00 // OneHundredPercent represents the float value of 100%
)
var (
// Fill is the default character representing completed progress
Fill byte = '='
// Head is the default character that moves when progress is updated
Head byte = '>'
// Empty is the default character that represents the empty progress
Empty byte = '-'
// LeftEnd is the default character in the left most part of the progress indicator
LeftEnd byte = '['
// RightEnd is the default character in the right most part of the progress indicator
RightEnd byte = ']'
// Width is the default width of the progress bar
Width = 70
// ErrMaxCurrentReached is error when trying to set current value that exceeds the total value
ErrMaxCurrentReached = errors.New("errors: current value is greater total value")
)
// Bar represents a progress bar
type Bar struct {
TimeStarted time.Time // TimeStarted is time progress began
appendFuncs []DecoratorFunc
prependFuncs []DecoratorFunc
Total int // Total of the total for the progress bar
Width int // Width is the width of the progress bar
timeElapsed time.Duration // timeElapsed is the time elapsed for the progress
current int
mtx *sync.RWMutex
LeftEnd byte // LeftEnd is character in the left most part of the progress indicator. Defaults to '['
RightEnd byte // RightEnd is character in the right most part of the progress indicator. Defaults to ']'
Fill byte // Fill is the character representing completed progress. Defaults to '='
Head byte // Head is the character that moves when progress is updated. Defaults to '>'
Empty byte // Empty is the character that represents the empty progress. Default is '-'
HideProgressBar bool // HideProgressBar is a flag that indicates the progress bar is not to be rendered
}
// DecoratorFunc is a function that can be prepended and appended to the progress bar
type DecoratorFunc func(b *Bar) string
// NewBar returns a new progress bar
func NewBar(total int) *Bar {
return &Bar{
Total: total,
Width: Width,
LeftEnd: LeftEnd,
RightEnd: RightEnd,
Head: Head,
Fill: Fill,
Empty: Empty,
HideProgressBar: false,
mtx: &sync.RWMutex{},
}
}
// Set the current count of the bar. It returns ErrMaxCurrentReached when trying n exceeds the total value. This is atomic operation and concurrency safe.
func (b *Bar) Set(n int) error {
b.mtx.Lock()
defer b.mtx.Unlock()
if n > b.Total {
return ErrMaxCurrentReached
}
b.current = n
return nil
}
// Incr increments the current value by 1, time elapsed to current time and returns true. It returns false if the cursor has reached or exceeds total value.
func (b *Bar) Incr() bool {
b.mtx.Lock()
defer b.mtx.Unlock()
n := b.current + 1
if n > b.Total {
return false
}
var t time.Time
if b.TimeStarted == t {
b.TimeStarted = time.Now()
}
b.timeElapsed = time.Since(b.TimeStarted)
b.current = n
return true
}
// Current returns the current progress of the bar
func (b *Bar) Current() int {
b.mtx.RLock()
defer b.mtx.RUnlock()
return b.current
}
// AppendFunc runs the decorator function and renders the output on the right of the progress bar
func (b *Bar) AppendFunc(f DecoratorFunc) *Bar {
b.mtx.Lock()
defer b.mtx.Unlock()
b.appendFuncs = append(b.appendFuncs, f)
return b
}
// AppendCompleted appends the completion percent to the progress bar
func (b *Bar) AppendCompleted() *Bar {
b.AppendFunc(func(b *Bar) string {
return b.CompletedPercentString()
})
return b
}
// AppendElapsed appends the time elapsed the be progress bar
func (b *Bar) AppendElapsed() *Bar {
b.AppendFunc(func(b *Bar) string {
return strutil.PadLeft(b.TimeElapsedString(), 5, ' ')
})
return b
}
// PrependFunc runs decorator function and render the output left the progress bar
func (b *Bar) PrependFunc(f DecoratorFunc) *Bar {
b.mtx.Lock()
defer b.mtx.Unlock()
b.prependFuncs = append(b.prependFuncs, f)
return b
}
// PrependCompleted prepends the percent completed to the progress bar
func (b *Bar) PrependCompleted() *Bar {
b.PrependFunc(func(b *Bar) string {
return b.CompletedPercentString()
})
return b
}
// PrependElapsed prepends the time elapsed to the beginning of the bar
func (b *Bar) PrependElapsed() *Bar {
b.PrependFunc(func(b *Bar) string {
return strutil.PadLeft(b.TimeElapsedString(), 5, ' ')
})
return b
}
// Bytes returns the byte presentation of the progress bar
func (b *Bar) Bytes() []byte {
pb := make([]byte, 0)
if !b.HideProgressBar {
completedWidth := int(float64(b.Width) * (b.CompletedPercent() / oneHundredPercent))
// add fill and empty bits
var buf bytes.Buffer
for i := 0; i < completedWidth; i++ {
buf.WriteByte(b.Fill)
}
for i := 0; i < b.Width-completedWidth; i++ {
buf.WriteByte(b.Empty)
}
// set head bit
pb = buf.Bytes()
if completedWidth > 0 && completedWidth < b.Width {
pb[completedWidth-1] = b.Head
}
// set left and right ends bits
pb[0], pb[len(pb)-1] = b.LeftEnd, b.RightEnd
}
// render append functions to the right of the bar
for _, f := range b.appendFuncs {
pb = append(pb, ' ')
pb = append(pb, []byte(f(b))...)
}
// render prepend functions to the left of the bar
for _, f := range b.prependFuncs {
args := []byte(f(b))
args = append(args, ' ')
pb = append(args, pb...)
}
return pb
}
// String returns the string representation of the bar
func (b *Bar) String() string {
return string(b.Bytes())
}
// CompletedPercent return the percent completed
func (b *Bar) CompletedPercent() float64 {
return (float64(b.Current()) / float64(b.Total)) * oneHundredPercent
}
// CompletedPercentString returns the formatted string representation of the completed percent
func (b *Bar) CompletedPercentString() string {
return fmt.Sprintf("%3.f%%", b.CompletedPercent())
}
// TimeElapsed returns the time elapsed
func (b *Bar) TimeElapsed() time.Duration {
b.mtx.RLock()
defer b.mtx.RUnlock()
return b.timeElapsed
}
// TimeElapsedString returns the formatted string representation of the time elapsed
func (b *Bar) TimeElapsedString() string {
return strutil.PrettyTime(b.TimeElapsed())
}
// NoProgressBar sets the HideProgressBar flag to true
func (b *Bar) NoProgressBar() *Bar {
b.HideProgressBar = true
return b
} | bar.go | 0.688259 | 0.437763 | bar.go | starcoder |
package mission
// GeodesicCoordinates .
type GeodesicCoordinates struct {
latitudeDegree float64
longitudeDegree float64
}
func NewGeodesicCoordinatesFromDegree(
latitudeDegree, longitudeDegree float64,
) GeodesicCoordinates {
return GeodesicCoordinates{
latitudeDegree: latitudeDegree,
longitudeDegree: longitudeDegree,
}
}
// Height .
type Height struct {
heightM float64
}
// NewHeightFromM .
func NewHeightFromM(heightM float64) Height {
return Height{heightM: heightM}
}
// Speed .
type Speed struct {
speedMS float64
}
// NewSpeedFromMS .
func NewSpeedFromMS(speedMS float64) Speed {
return Speed{speedMS: speedMS}
}
// Waypoint .
type Waypoint struct {
pointOrder int
coordinates GeodesicCoordinates
relativeHeight Height
speed Speed
}
func NewWaypoint(
pointOrder int,
latitudeDegree, longitudeDegree, relativeHeightM, speedMS float64,
) Waypoint {
return Waypoint{
pointOrder,
NewGeodesicCoordinatesFromDegree(latitudeDegree, longitudeDegree),
NewHeightFromM(relativeHeightM),
NewSpeedFromMS(speedMS),
}
}
// UploadID .
type UploadID string
// Navigation .
type Navigation struct {
currentOrder int
takeoffPointGroundHeightWGS84EllipsoidM Height
waypoints []Waypoint
uploadID UploadID
}
// NewNavigation .
func NewNavigation(takeoffPointGroundHeightWGS84EllipsoidM float64) *Navigation {
return &Navigation{
currentOrder: 0,
takeoffPointGroundHeightWGS84EllipsoidM: NewHeightFromM(takeoffPointGroundHeightWGS84EllipsoidM),
waypoints: []Waypoint{},
}
}
// GetTakeoffPointGroundHeightWGS84EllipsoidM .
func (n *Navigation) GetTakeoffPointGroundHeightWGS84EllipsoidM() float64 {
return n.takeoffPointGroundHeightWGS84EllipsoidM.heightM
}
// PushNextWaypoint .
func (n *Navigation) PushNextWaypoint(
latitudeDegree, longitudeDegree, relativeHeightM, speedMS float64,
) {
n.currentOrder = n.currentOrder + 1
n.waypoints = append(
n.waypoints,
NewWaypoint(
n.currentOrder,
latitudeDegree,
longitudeDegree,
relativeHeightM,
speedMS,
),
)
}
// GetUploadID .
func (n *Navigation) GetUploadID() UploadID {
return n.uploadID
}
// ProvideWaypointsInterest .
func (n *Navigation) ProvideWaypointsInterest(
waypoint func(pointOrder int, latitudeDegree, longitudeDegree, relativeHeightM, speedMS float64),
) {
for _, w := range n.waypoints {
waypoint(
w.pointOrder,
w.coordinates.latitudeDegree,
w.coordinates.longitudeDegree,
w.relativeHeight.heightM,
w.speed.speedMS,
)
}
} | fleet-formation/pkg/mission/domain/mission/navigation.go | 0.824321 | 0.660624 | navigation.go | starcoder |
package tiny
import (
"bytes"
"encoding/binary"
"errors"
"os"
"github.com/boltdb/bolt"
)
// DB is a small database abstraction built on top of Bolt DB modelled after Google's Big Table.
type DB interface {
// ListTables lists all the table names in the database.
ListTables() [][]byte
// GetOrCreateTable returns a table and creating it if id doesn't exist.
GetOrCreateTable(name []byte) (Table, error)
// DropTable drops a table from the database.
DropTable(name []byte) error
// Close closes the database for future reads and writes.
Close() error
}
// Table represents a table in the database.
type Table interface {
// GetName returns the name of the table.
GetName() []byte
// WriteRows writes a list of rows to the table overwritting any existing values that overlap.
WriteRows(r ...Row) error
// WriteColumns writes a column to a row and column family returning an error if there is one. The row and column family
// will be created if they do not exist.
WriteColumns(r []byte, cf []byte, c ...Column) error
// IncrementColumn increments the value of a column by one. If the row or column family does not exist, they will be
// created and return an error if it fails. If the column does not exist, a value will be created to store an
// unsigned 64-bit integer. If the existing value is not 8 bytes, an error will be returned. Otherwise the value will be
// incremented.
IncrementColumn(r, cf, c []byte, v int) error
// ReadRow reads a single row from the table. Returns an error if the row does not exist.
ReadRow(key []byte) (Row, error)
// ScanRows scans a table for all rows with the given prefix. Note: All rows should be read from the channel to avoid memory leak.
ScanRows(prefix []byte) chan Row
// ScanColumns scans a row and column family for all columns with the given prefix. Note: All columns should be read from the channel to avoid memory leaks.
ScanColumns(r, cf []byte, prefix []byte) chan Column
}
// Row represents a single record in a Table.
type Row struct {
// RowKey is the unique record ID.
RowKey []byte
// ColumnFamilies is a list of column families for a single row.
ColumnFamilies []ColumnFamily
}
// GetColumnFamily returns a column family by name if it exists for this row. Otherwise an error is returned.
func (r Row) GetColumnFamily(name []byte) (ColumnFamily, error) {
for _, cf := range r.ColumnFamilies {
if bytes.Equal(cf.Name, name) {
return cf, nil
}
}
return ColumnFamily{}, errors.New("Column Family does not exist")
}
// ColumnFamily is a group of columns for a single record.
type ColumnFamily struct {
// Name is the name of the column family.
Name []byte
// Columns is a list of columns in the column family.
Columns []Column
}
// GetColumn returns a column by key if it exists for this row. Otherwise, an error is returned.
func (cf ColumnFamily) GetColumn(key []byte) (Column, error) {
for _, c := range cf.Columns {
if bytes.Equal(c.Key, key) {
return c, nil
}
}
return Column{}, errors.New("Column does not exist")
}
// Column represents a single column for a record.
type Column struct {
Key []byte
Value []byte
}
// Uint64 reads a uint64 from the value. If the length of the value is not 8 bytes, 0 will be returned.
func (c Column) Uint64() uint64 {
if len(c.Value) != 8 {
return binary.LittleEndian.Uint64(c.Value)
}
return 0
}
// Int64 reads a int64 from the value. If the length of the value is not 8 bytes, 0 will be returned.
func (c Column) Int64() int64 {
return int64(c.Uint64())
}
// Open creates a new database if it doesn't exist or opens an existing database if it does.
func Open(filename string, mode os.FileMode) (DB, error) {
b, err := bolt.Open(filename, mode, nil)
if err != nil {
return nil, err
}
return db{b}, nil
}
type db struct {
internal *bolt.DB
}
// ListTables lists all the tables in the database.
func (db db) ListTables() [][]byte {
var tables [][]byte
db.internal.View(func(tx *bolt.Tx) error {
c := tx.Cursor()
for k, v := c.First(); k != nil; k, v = c.Next() {
if v == nil {
tables = append(tables, k)
}
}
return nil
})
return tables
}
func (db db) GetOrCreateTable(name []byte) (Table, error) {
err := db.internal.Update(func(tx *bolt.Tx) error {
_, e := tx.CreateBucketIfNotExists(name)
return e
})
return table{db.internal, name}, err
}
func (db db) DropTable(name []byte) error {
return db.internal.Update(func(tx *bolt.Tx) error {
return tx.DeleteBucket(name)
})
}
func (db db) Close() error {
return db.internal.Close()
}
type table struct {
internal *bolt.DB
Name []byte
}
// GetName returns the name of the table.
func (t table) GetName() []byte {
return t.Name
}
// WriteRows writes a list of rows to the table overwritting any existing values that overlap.
func (t table) WriteRows(rows ...Row) error {
return t.internal.Update(func(tx *bolt.Tx) error {
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
for _, r := range rows {
row, err := tbl.CreateBucketIfNotExists(r.RowKey)
if err != nil {
return err
}
for i := range r.ColumnFamilies {
cf, err := row.CreateBucketIfNotExists(r.ColumnFamilies[i].Name)
if err != nil {
return err
}
for _, c := range r.ColumnFamilies[i].Columns {
if err := cf.Put(c.Key, c.Value); err != nil {
return err
}
}
}
}
return nil
})
}
// WriteColumns writes a column to a row and column family returning an error if there is one. The row and column family
// will be created if they do not exist.
func (t table) WriteColumns(r []byte, cf []byte, cols ...Column) error {
return t.internal.Update(func(tx *bolt.Tx) error {
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
row, err := tbl.CreateBucketIfNotExists(r)
if err != nil {
return err
}
cf, err := row.CreateBucketIfNotExists(cf)
if err != nil {
return err
}
for _, c := range cols {
if err := cf.Put(c.Key, c.Value); err != nil {
return err
}
}
return nil
})
}
// IncrementColumn increments the value of a column by one. If the row or column family does not exist, they will be
// created and return an error if it fails. If the column does not exist, a value will be created to store an
// unsigned 64-bit integer. If the existing value is not 8 bytes, an error will be returned. Otherwise the value will be
// incremented.
func (t table) IncrementColumn(r, cf, c []byte, v int) error {
return t.internal.Update(func(tx *bolt.Tx) error {
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
row, err := tbl.CreateBucketIfNotExists(r)
if err != nil {
return err
}
cf, err := row.CreateBucketIfNotExists(cf)
if err != nil {
return err
}
val := cf.Get(c)
if val == nil {
val = make([]byte, 8)
binary.LittleEndian.PutUint64(val, uint64(v))
} else if len(val) != 8 {
return errors.New("Invalid value length")
} else {
tmp := binary.LittleEndian.Uint64(val)
binary.LittleEndian.PutUint64(val, tmp+uint64(v))
}
return cf.Put(c, val)
})
}
func (t table) ReadRow(key []byte) (Row, error) {
var r Row
r.RowKey = key
err := t.internal.View(func(tx *bolt.Tx) error {
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
row := tbl.Bucket(key)
if row == nil {
return errors.New("Row does not exist")
}
row.ForEach(func(cfname, val []byte) error {
if val == nil {
cf := row.Bucket(cfname)
var cols []Column
cf.ForEach(func(c, v []byte) error {
cols = append(cols, Column{c, v})
return nil
})
r.ColumnFamilies = append(r.ColumnFamilies, ColumnFamily{
Name: cfname,
Columns: cols,
})
}
return nil
})
return nil
})
return r, err
}
func (t table) ScanRows(prefix []byte) chan Row {
out := make(chan Row, 1)
go func() {
t.internal.View(func(tx *bolt.Tx) error {
// Assume bucket exists and has keys
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
c := tbl.Cursor()
for k, v := c.Seek(prefix); bytes.HasPrefix(k, prefix); k, v = c.Next() {
if v == nil {
row := tbl.Bucket(k)
if row != nil {
var r Row
r.RowKey = k
row.ForEach(func(cfname, val []byte) error {
if val == nil {
cf := row.Bucket(cfname)
var cols []Column
cf.ForEach(func(c, v []byte) error {
cols = append(cols, Column{c, v})
return nil
})
r.ColumnFamilies = append(r.ColumnFamilies, ColumnFamily{
Name: cfname,
Columns: cols,
})
}
return nil
})
out <- r
}
}
}
return nil
})
close(out)
}()
return out
}
func (t table) ScanColumns(r, cfname []byte, prefix []byte) chan Column {
out := make(chan Column, 1)
go func() {
t.internal.View(func(tx *bolt.Tx) error {
tbl := tx.Bucket(t.Name)
if tbl == nil {
return errors.New("Table does not exist")
}
row := tbl.Bucket(r)
if row == nil {
return errors.New("Row does not exist")
}
cf := row.Bucket(cfname)
if cf == nil {
return errors.New("Column Family does not exist")
}
c := cf.Cursor()
for k, v := c.Seek(prefix); bytes.HasPrefix(k, prefix); k, v = c.Next() {
out <- Column{k, v}
}
return nil
})
close(out)
}()
return out
} | tinytable.go | 0.714728 | 0.416114 | tinytable.go | starcoder |
package scoreboard
import (
"fmt"
"strings"
)
// Scoreboard represents a scoreboard that may be sent to a player. The scoreboard is shown on the right side
// of the player's screen.
type Scoreboard struct {
name string
lines []string
}
// New returns a new scoreboard with the display name passed. Once returned, lines may be added to the
// scoreboard to add text to it. The name is formatted according to the rules of fmt.Sprintln.
// Changing the scoreboard after sending it to a player will not update the scoreboard of the player
// automatically: Player.SendScoreboard() must be called again to update it.
func New(name ...interface{}) *Scoreboard {
return &Scoreboard{name: format(name)}
}
// Name returns the display name of the scoreboard, as passed during the construction of the scoreboard.
func (board *Scoreboard) Name() string {
return board.name
}
// Add adds a new line to the scoreboard using the content passed. The values passed are formatting according
// to the rules of fmt.Sprintln.
func (board *Scoreboard) Add(a ...interface{}) *Scoreboard {
board.lines = append(board.lines, board.pad(format(a)))
return board
}
// Addf adds a new line to the scoreboard using a custom format. The formatting specifiers are the same as
// those of fmt.Sprintf.
func (board *Scoreboard) Addf(format string, a ...interface{}) *Scoreboard {
board.lines = append(board.lines, board.pad(fmt.Sprintf(format, a...)))
return board
}
// Set sets a line on the scoreboard to a new value passed, formatting the values according to the rules of
// fmt.Sprintln. Set panics if the index passed is out of range: New lines must be added using Scoreboard.Add.
func (board *Scoreboard) Set(index int, a ...interface{}) *Scoreboard {
if index >= len(board.lines) || index < 0 {
panic(fmt.Sprintf("scoreboard: index out of range: index %v is not valid for scoreboard of size %v", index, len(board.lines)))
}
board.lines[index] = board.pad(format(a))
return board
}
// Setf sets a line on the scoreboard to a new value passed, formatting the values according to the rules of
// fmt.Sprintf with a custom format. Setf panics if the index passed is out of range: New lines must be added
// using Scoreboard.Addf.
func (board *Scoreboard) Setf(index int, format string, a ...interface{}) *Scoreboard {
if index >= len(board.lines) || index < 0 {
panic(fmt.Sprintf("scoreboard: index out of range: index %v is not valid for scoreboard of size %v", index, len(board.lines)))
}
board.lines[index] = board.pad(fmt.Sprintf(format, a...))
return board
}
// Remove removes the line with the index passed and shifts down all lines after it. Remove panics if the
// index passed is out of range.
func (board *Scoreboard) Remove(index int) *Scoreboard {
if index >= len(board.lines) || index < 0 {
panic(fmt.Sprintf("scoreboard: index out of range: index %v is not valid for scoreboard of size %v", index, len(board.lines)))
}
board.lines = append(board.lines[:index], board.lines[index+1:]...)
return board
}
// RemoveLast removes the last line of the scoreboard. Nothing happens if the scoreboard is empty.
func (board *Scoreboard) RemoveLast() *Scoreboard {
if len(board.lines) == 0 {
return board
}
board.lines = board.lines[:len(board.lines)-1]
return board
}
// Clear clears all lines from the scoreboard and resets it to its state directly after initialising the
// scoreboard.
func (board *Scoreboard) Clear() *Scoreboard {
board.lines = nil
return board
}
// Lines returns a list of all lines of the scoreboard. The order is the order in which they were added using
// Scoreboard.Add().
func (board *Scoreboard) Lines() []string {
return board.lines
}
// pad pads the string passed for as much as needed to achieve the same length as the name of the scoreboard.
// If the string passed is already of the same length as the name of the scoreboard or longer, the string will
// receive one space of padding.
func (board *Scoreboard) pad(s string) string {
if len(board.name)-len(s)-2 <= 0 {
return " " + s + " "
}
return " " + s + strings.Repeat(" ", len(board.name)-len(s)-2)
}
// format is a utility function to format a list of values to have spaces between them, but no newline at the
// end, which is typically used for sending messages, popups and tips.
func format(a []interface{}) string {
return strings.TrimSuffix(fmt.Sprintln(a...), "\n")
} | dragonfly/player/scoreboard/scoreboard.go | 0.76207 | 0.400544 | scoreboard.go | starcoder |
package main
// We want to identify in this test the performance differences by accessing a struct by array value vs initializing it at
// the end and inserting it at that time. We use the orderbook.Delta structure to benchmark these both methods.
import (
"fmt"
"time"
"github.com/gsalaz98/roadkill/orderbook"
)
func average(looptime []uint64) float64 {
var total uint64 = 0
for _, v := range looptime {
total += v
}
return float64(total) / float64(len(looptime))
}
func minMax(array []uint64) (uint64, uint64) {
var max = array[0]
var min = array[0]
for _, value := range array {
if max < value {
max = value
}
if min > value {
min = value
}
}
return min, max
}
func main() {
loop1 := make([]uint64, 100)
loop2 := make([]uint64, 100)
for x := 0; x < 100; x++ {
start := time.Now()
for i := 0; i < 1; i++ {
deltas := make([]orderbook.Delta, 10)
for j := 0; j < 10; j++ {
deltas[j] = orderbook.Delta{
TimeDelta: uint64(time.Now().UnixNano()),
Seq: 0,
Event: 0,
Price: 0,
Size: 0,
}
deltas[j].Seq = 100
deltas[j].Event = 100
deltas[j].Price = 50.43
deltas[j].Size = 0.04
}
}
end := time.Now().Sub(start)
loop1[x] = uint64(end / time.Nanosecond)
fmt.Println("array access: ", end)
start = time.Now()
for i := 0; i < 1; i++ {
deltas := make([]orderbook.Delta, 10)
for j := 0; j < 10; j++ {
time := uint64(time.Now().UnixNano())
var seq uint64 = 100
var event uint8 = 100
price := 50.43
size := 0.04
deltas[j] = orderbook.Delta{
TimeDelta: time,
Seq: seq,
Event: event,
Price: price,
Size: size,
}
}
}
end = time.Now().Sub(start)
loop2[x] = uint64(end / time.Nanosecond)
fmt.Println("struct jit init: ", end)
fmt.Println("==================")
}
loop1Min, loop1Max := minMax(loop1)
fmt.Printf("Loop1 Average: %f, min: %d, max: %d, std: None\n", average(loop1), loop1Min, loop1Max)
loop2Min, loop2Max := minMax(loop2)
fmt.Printf("Loop2 Average: %f, min: %d, max: %d, std: None\n", average(loop2), loop2Min, loop2Max)
}
// gccgo produces faster times for jit struct init, but is severely limited by array accessing.
// go compiler has no difference between these two methods. Perhaps optimizing for gccgo might be a mistake? | playground/efficiency/struct_gc/struct_access.go | 0.570212 | 0.455199 | struct_access.go | starcoder |
package gkgen
import (
"errors"
"fmt"
"reflect"
)
// NotEqualValidator generates code that will verify a fields does not equal a set value
// The validator will look at the field or the dereferenced value of the field
// nil values for a field are not considered invalid
type NotEqualValidator struct {
name string
}
// NewNotEqualValidator holds the NotEqualValidator state
func NewNotEqualValidator() *NotEqualValidator {
return &NotEqualValidator{name: "NotEqual"}
}
// Generate generates validation code
func (s *NotEqualValidator) Generate(sType reflect.Type, fieldStruct reflect.StructField, params []string) (string, error) {
if len(params) != 1 {
return "", errors.New("NotEqual validation requires exactly 1 parameter")
}
restrictedValue := params[0]
field := fieldStruct.Type
switch field.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
return fmt.Sprintf(`
if s.%[1]s == %[2]s {
errors%[1]s = append(errors%[1]s, errors.New("%[1]s cannot equal '%[2]s'"))
}`, fieldStruct.Name, restrictedValue), nil
case reflect.String:
return fmt.Sprintf(`
if s.%[1]s == "%[2]s" {
errors%[1]s = append(errors%[1]s, errors.New("%[1]s cannot equal '%[2]s'"))
}`, fieldStruct.Name, restrictedValue), nil
case reflect.Ptr:
field = field.Elem()
switch field.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
return fmt.Sprintf(`
if s.%[1]s != nil && *s.%[1]s == %[2]s {
errors%[1]s = append(errors%[1]s, errors.New("%[1]s cannot equal '%[2]s'"))
}`, fieldStruct.Name, restrictedValue), nil
case reflect.String:
return fmt.Sprintf(`
if s.%[1]s != nil && *s.%[1]s == "%[2]s" {
errors%[1]s = append(errors%[1]s, errors.New("%[1]s cannot equal '%[2]s'"))
}`, fieldStruct.Name, restrictedValue), nil
default:
return "", fmt.Errorf("NotEqual does not work on type '%s'", field.Kind())
}
default:
return "", fmt.Errorf("NotEqual does not work on type '%s'", field.Kind())
}
}
// Name provides access to the name field
func (s *NotEqualValidator) Name() string {
return s.name
} | gkgen/not_equal.go | 0.680985 | 0.479686 | not_equal.go | starcoder |
package grid
import (
"image"
"image/draw"
)
// Grid List of ImageTile
type Grid struct {
Tiles []*ImageTile
RowNb int
ColumnNb int
}
// New Create new Grid
func New(imageFilePaths []string, row int, column int) *Grid {
g := &Grid{
RowNb: row,
ColumnNb: column,
}
for _, path := range imageFilePaths {
tile := &ImageTile{
ImageFilePath: path,
Flipped: false,
}
g.Tiles = append(g.Tiles, tile)
}
return g
}
// Merge bnlabla
func (g *Grid) Merge() (*image.NRGBA, error) {
var canvas *image.NRGBA
imageBoundX := g.Tiles[0].Image.Bounds().Dx()
imageBoundY := g.Tiles[0].Image.Bounds().Dy()
canvasBoundX := g.RowNb * imageBoundX
canvasBoundY := g.ColumnNb * imageBoundY
canvasMaxPoint := image.Point{canvasBoundX, canvasBoundY}
canvasRect := image.Rectangle{image.Point{0, 0}, canvasMaxPoint}
canvas = image.NewNRGBA(canvasRect)
// draw grids one by one
for i, tile := range g.Tiles {
img := tile.Image
x := i % g.RowNb
y := i / g.ColumnNb
minPoint := image.Point{x * imageBoundX, y * imageBoundY}
maxPoint := minPoint.Add(image.Point{imageBoundX, imageBoundY})
nextGridRect := image.Rectangle{minPoint, maxPoint}
draw.Draw(canvas, nextGridRect, img, image.Point{}, draw.Src)
}
return canvas, nil
}
// ExecOnTilePermutation Execute f function on each permutation of tiles
func (g *Grid) ExecOnTilePermutation(f func(*Grid)) {
perm(g, f, 0)
}
// Permute the values at index i to len(a)-1.
func perm(grid *Grid, f func(*Grid), i int) {
if i > len(grid.Tiles) {
f(grid)
} else {
perm(grid, f, i+1)
for j := i + 1; j < len(grid.Tiles); j++ {
grid.Tiles[i], grid.Tiles[j] = grid.Tiles[j], grid.Tiles[i]
perm(grid, f, i+1)
grid.Tiles[i], grid.Tiles[j] = grid.Tiles[j], grid.Tiles[i]
}
}
}
func (g *Grid) flipAccordingToMask(mask int) {
for i := 0; i < len(g.Tiles); i++ {
currentTile := g.Tiles[i]
flipped := mask&(1<<i) != 0
if flipped != currentTile.Flipped {
currentTile.Upturn()
}
}
}
// ExecOnTileFlipCombination Execute f function on each flip combination of tiles
func (g *Grid) ExecOnTileFlipCombination(f func(*Grid)) {
length := len(g.Tiles)
for mask := 0; mask < 2<<length; mask++ {
g.flipAccordingToMask(mask)
f(g)
}
} | grid/grid.go | 0.698432 | 0.474327 | grid.go | starcoder |
package data
import (
"math"
"reflect"
"sort"
"strconv"
"strings"
)
// Value represents a Soy data value, which may be one of the enumerated types.
type Value interface {
// Truthy returns true according to the Soy definition of truthy and falsy values.
Truthy() bool
// String formats this value for display in a template.
String() string
// Equals returns true if the two values are equal. Specifically, if:
// - They are comparable: they have the same Type, or they are Int and Float
// - (Primitives) They have the same value
// - (Lists, Maps) They are the same instance
// Uncomparable types and unequal values return false.
Equals(other Value) bool
}
type (
Undefined struct{}
Null struct{}
Bool bool
Int int64
Float float64
String string
List []Value
Map map[string]Value
)
// Index retrieves a value from this list, or Undefined if out of bounds.
func (v List) Index(i int) Value {
if !(0 <= i && i < len(v)) {
return Undefined{}
}
return v[i]
}
// Key retrieves a value under the named key, or Undefined if it doesn't exist.
func (v Map) Key(k string) Value {
var result, ok = v[k]
if !ok {
return Undefined{}
}
return result
}
// Truthy ----------
func (v Undefined) Truthy() bool { return false }
func (v Null) Truthy() bool { return false }
func (v Bool) Truthy() bool { return bool(v) }
func (v Int) Truthy() bool { return v != 0 }
func (v Float) Truthy() bool { return v != 0.0 && float64(v) != math.NaN() }
func (v String) Truthy() bool { return v != "" }
func (v List) Truthy() bool { return true }
func (v Map) Truthy() bool { return true }
// String ----------
func (v Undefined) String() string { panic("Attempted to coerce undefined value into a string.") }
func (v Null) String() string { return "null" }
func (v Bool) String() string { return strconv.FormatBool(bool(v)) }
func (v Int) String() string { return strconv.FormatInt(int64(v), 10) }
func (v Float) String() string { return strconv.FormatFloat(float64(v), 'g', -1, 64) }
func (v String) String() string { return string(v) }
func (v List) String() string {
var items = make([]string, len(v))
for i, item := range v {
items[i] = item.String()
}
return "[" + strings.Join(items, ", ") + "]"
}
func (v Map) String() string {
var items = make([]string, len(v))
var i = 0
for k, v := range v {
var vstr string
if _, ok := v.(Undefined); ok {
vstr = "undefined" // have mercy
} else {
vstr = v.String()
}
items[i] = k + ": " + vstr
i++
}
sort.Strings(items)
return "{" + strings.Join(items, ", ") + "}"
}
// Equals ----------
func (v Undefined) Equals(other Value) bool {
_, ok := other.(Undefined)
return ok
}
func (v Null) Equals(other Value) bool {
_, ok := other.(Null)
return ok
}
func (v Bool) Equals(other Value) bool {
if o, ok := other.(Bool); ok {
return bool(v) == bool(o)
}
return false
}
func (v String) Equals(other Value) bool {
if o, ok := other.(String); ok {
return string(v) == string(o)
}
return false
}
func (v List) Equals(other Value) bool {
if o, ok := other.(List); ok {
return reflect.ValueOf(v).Pointer() == reflect.ValueOf(o).Pointer()
}
return false
}
func (v Map) Equals(other Value) bool {
if o, ok := other.(Map); ok {
return reflect.ValueOf(v).Pointer() == reflect.ValueOf(o).Pointer()
}
return false
}
func (v Int) Equals(other Value) bool {
switch o := other.(type) {
case Int:
return v == o
case Float:
return float64(v) == float64(o)
}
return false
}
func (v Float) Equals(other Value) bool {
switch o := other.(type) {
case Int:
return float64(v) == float64(o)
case Float:
return v == o
}
return false
} | data/value.go | 0.754915 | 0.449332 | value.go | starcoder |
package parse
import (
"fmt"
"reflect"
"strconv"
)
var (
stringSliceType = reflect.TypeOf([]string{})
boolSliceType = reflect.TypeOf([]bool{})
intSliceType = reflect.TypeOf([]int{})
int8SliceType = reflect.TypeOf([]int8{})
int16SliceType = reflect.TypeOf([]int16{})
int32SliceType = reflect.TypeOf([]int32{})
int64SliceType = reflect.TypeOf([]int64{})
uintSliceType = reflect.TypeOf([]uint{})
uint8SliceType = reflect.TypeOf([]uint8{})
uint16SliceType = reflect.TypeOf([]uint16{})
uint32SliceType = reflect.TypeOf([]uint32{})
float32SliceType = reflect.TypeOf([]float32{})
float64SliceType = reflect.TypeOf([]float64{})
complex64SliceType = reflect.TypeOf([]complex64{})
complex128SliceType = reflect.TypeOf([]complex128{})
)
// String casts the provided string into the provided type, returning the
// result in a reflect.Value.
func String(str string, t reflect.Type) (reflect.Value, error) {
switch t.Kind() {
case reflect.String:
return reflect.ValueOf(&str), nil
case reflect.Bool:
converted, err := strconv.ParseBool(str)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(&converted), nil
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16,
reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64,
reflect.Complex64, reflect.Complex128:
return parseNumber(str, t)
case reflect.Slice:
converted, err := StringSlice(str)
if err != nil {
return reflect.Value{}, err
}
convertedVal := reflect.ValueOf(converted)
if convertedVal.Type() == t {
return convertedVal, nil
}
castSlice := reflect.MakeSlice(t, 0, len(converted))
for idx, strVal := range converted {
castVal, parseErr := String(strVal, t.Elem())
if parseErr != nil {
return reflect.Value{}, fmt.Errorf("parse error of item %d %q: %s", idx, strVal, parseErr)
}
castSlice = reflect.Append(castSlice, castVal.Elem())
}
return castSlice, nil
case reflect.Map:
switch t {
case reflect.TypeOf(map[string][]string{}):
converted, err := StringStringSliceMap(str)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(converted), nil
case reflect.TypeOf(map[string]struct{}{}):
converted, err := StringSet(str)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(converted), nil
default:
keyKind := t.Key().Kind()
valKind := t.Elem().Kind()
err := checkKindsSupported(keyKind, valKind)
if err != nil {
return reflect.Value{}, fmt.Errorf("Unsupported map type: %v", t)
}
converted, err := Map(str, t)
if err != nil {
return reflect.Value{}, err
}
return converted, nil
}
default:
// If the type of the original StructField is unsupported, return an error.
return reflect.Value{}, fmt.Errorf("Value %q cannot be translated to kind %q", str, t.Kind())
}
}
func checkKindsSupported(kinds ...reflect.Kind) error {
for _, k := range kinds {
switch k {
case reflect.String, reflect.Bool, reflect.Int, reflect.Float64,
reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64, reflect.Float32, reflect.Complex64,
reflect.Complex128:
// no-op
default:
return fmt.Errorf("Kind %v not supported", k)
}
}
return nil
} | parse/parse_string.go | 0.59843 | 0.435301 | parse_string.go | starcoder |
package gonet
import (
"fmt"
"log"
"math"
"sync"
"time"
)
// NeuralNet struct is used to represent a simple neural network
type NeuralNet struct {
numNodes []int // Number of input, hidden and output nodes
zs []Vector // weight times input
alphas []Vector // activation(z)
biases []Vector // bias values
activations []*ActivationFunction // the activation function at each layer
weights []Matrix // Weights for each layer
changes []Matrix // Last change in weights for momentum
numLayers int // helpful value to replace len(nn.numNodes)
}
/*
Initialize the neural network;
nodesPerLayer is an array of how many nodes should go into each layer.
activations is the activation function for each layer - do not specify an activation for input layer.
*/
func (nn *NeuralNet) Init(nodesPerLayer []int, activations []*ActivationFunction) *NeuralNet {
layers := len(nodesPerLayer)
nn.numLayers = layers
nn.numNodes = make([]int, layers)
nn.zs = make([]Vector, layers)
nn.alphas = make([]Vector, layers)
nn.biases = make([]Vector, layers)
nn.activations = make([]*ActivationFunction, layers)
nn.weights = make([]Matrix, layers)
nn.changes = make([]Matrix, layers)
for i := 0; i < layers; i++ {
nn.numNodes[i] = nodesPerLayer[i]
nn.zs[i] = new(Vector).Init(nodesPerLayer[i], 0.0)
nn.alphas[i] = new(Vector).Init(nodesPerLayer[i], 0.0)
if i > 0 {
nn.activations[i] = activations[i - 1]
nn.biases[i] = new(Vector).Init(nodesPerLayer[i], 0.0).RandomFill()
nn.weights[i] = new(Matrix).Init(nodesPerLayer[i], nodesPerLayer[i - 1]).RandomFill()
nn.changes[i] = new(Matrix).Init(nodesPerLayer[i], nodesPerLayer[i - 1])
}
}
return nn;
}
/*
The Update method is used to activate the Neural Network.
Given an array of inputs, it returns an array, of length equivalent of number of outputs, with values
ranging from the min to the max of the activation function at the output layer.
*/
func (nn *NeuralNet) Update(inputs Vector) Vector {
if len(inputs) != nn.numNodes[0] {
log.Fatal("Error: wrong number of inputs")
}
// copy inputs
for i := 0; i < len(inputs); i++ {
nn.zs[0][i] = inputs[i]
nn.alphas[0][i] = inputs[i]
}
// feedforward through layers
for n := 1; n < nn.numLayers; n++ {
nn.zs[n] = nn.weights[n].Apply(nn.alphas[n - 1]).Add(nn.biases[n])
nn.alphas[n] = nn.activations[n].F(nn.zs[n])
}
return nn.alphas[nn.numLayers - 1]
}
/*
The BackPropagate method is used, when training the Neural Network,
to back propagate the errors from network activation.
*/
func (nn *NeuralNet) BackPropagate(labels Vector, eta, mFactor float64) float64 {
outLayer := nn.numLayers - 1
if len(labels) != nn.numNodes[outLayer] {
log.Fatal("Error: wrong number of target values")
}
// compute deltas
deltas := make([]Vector, nn.numLayers)
deltas[outLayer] = nn.activations[outLayer].Df(nn.zs[outLayer]).Mult(nn.alphas[outLayer].Sub(labels))
for n := outLayer; n - 1 > 0; n-- {
epsilons := nn.weights[n].ReverseApply(deltas[n])
deltas[n - 1] = nn.activations[n].Df(nn.zs[n - 1]).Mult(epsilons)
}
// adjust weights and biases across each layer in parallel
var wg sync.WaitGroup
wg.Add(nn.numLayers - 1)
for i := outLayer; i > 0; i-- {
n := i
go func() {
defer wg.Done()
momentum := nn.changes[n].Scale(mFactor)
nn.changes[n] = deltas[n].Cross(nn.alphas[n - 1])
nn.weights[n] = nn.weights[n].Sub(nn.changes[n].Scale(eta)).Sub(momentum)
nn.biases[n] = nn.biases[n].Sub(deltas[n].Scale(eta))
}()
}
var e float64
for i := 0; i < len(labels); i++ {
e += 0.5 * math.Pow(labels[i] - nn.alphas[outLayer][i], 2)
}
wg.Wait()
return e
}
/*
This method is used to train the Network, it will run the training operation for 'iterations' times
and return the computed errors when training.
*/
func (nn *NeuralNet) Train(inputs, labels []Vector, iterations int, eta, mFactor float64, debug bool) []float64 {
errors := make([]float64, iterations)
for i := 0; i < iterations; i++ {
start := time.Now()
var e float64
for i := 0; i < len(inputs); i++ {
nn.Update(inputs[i])
tmp := nn.BackPropagate(labels[i], eta, mFactor)
e += tmp
}
errors[i] = e
elapsed := time.Since(start).Seconds()
if debug {
fmt.Printf("%f percent complete - %f MSE - %f time for iteration\n", float64(i) / float64(iterations), e, elapsed)
}
}
return errors
}
func (nn *NeuralNet) Test(inputs, labels []Vector) {
for i := 0; i < len(inputs); i++ {
fmt.Println(inputs[i], "->", nn.Update(inputs[i]), " : ", labels[i])
}
} | neuralnet.go | 0.644337 | 0.570092 | neuralnet.go | starcoder |
package bigquery
import (
"fmt"
"strings"
SDK "google.golang.org/api/bigquery/v2"
)
// see API documents: https://cloud.google.com/bigquery/docs/reference/rest/v2
// ==========
// Datasets
// ==========
// CreateDataset performes Datasets.Insert operation.
// Creates a new empty dataset.
func (b *BigQuery) CreateDataset(dataset *SDK.Dataset) (*Dataset, error) {
ds, err := b.service.Datasets.Insert(b.projectID, dataset).Do()
b.logAPIError("Datasets.Insert", err, logArgs("datasetID", dataset.Id))
return &Dataset{ds}, err
}
// PatchDataset performes Datasets.Patch operation.
// Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.
func (b *BigQuery) PatchDataset(datasetID string, dataset *SDK.Dataset) (*Dataset, error) {
ds, err := b.service.Datasets.Patch(b.projectID, datasetID, dataset).Do()
b.logAPIError("Datasets.Patch", err, logArgs("datasetID", datasetID))
return &Dataset{ds}, err
}
// UpdateDataset performes Datasets.Update operation.
// Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.
func (b *BigQuery) UpdateDataset(datasetID string, dataset *SDK.Dataset) (*Dataset, error) {
ds, err := b.service.Datasets.Update(b.projectID, datasetID, dataset).Do()
b.logAPIError("Datasets.Update", err, logArgs("datasetID", datasetID))
return &Dataset{ds}, err
}
// DeleteDataset performes Datasets.Delete operation.
// Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.
func (b *BigQuery) DeleteDataset(datasetID string) error {
err := b.service.Datasets.Delete(b.projectID, datasetID).Do()
b.logAPIError("Datasets.Delete", err, logArgs("datasetID", datasetID))
return err
}
// GetDataset performes Datasets.Get operation.
// Returns the dataset specified by datasetID.
func (b *BigQuery) GetDataset(datasetID string) (*Dataset, error) {
ds, err := b.service.Datasets.Get(b.projectID, datasetID).Do()
b.logAPIError("Datasets.Get", err, logArgs("datasetID", datasetID))
return &Dataset{ds}, err
}
// ListDatasets performes Datasets.List operation.
// Lists all datasets in the specified project to which you have been granted the READER dataset role.
func (b *BigQuery) ListDatasets() (*SDK.DatasetList, error) {
list, err := b.service.Datasets.List(b.projectID).Do()
b.logAPIError("Datasets.List", err)
return list, err
}
// ==========
// Jobs
// ==========
// RunJob performes Jobs.Insert operation.
// Starts a new asynchronous job. Requires the Can View project role.
func (b *BigQuery) RunJob(job *SDK.Job) (*SDK.Job, error) {
j, err := b.service.Jobs.Insert(b.projectID, job).Do()
b.logAPIError("Jobs.Insert", err)
return j, err
}
// RunQuery performes Jobs.Query operation.
// Runs a BigQuery SQL query and returns results if the query completes within a specified timeout.
func (b *BigQuery) RunQuery(query *SDK.QueryRequest) (*SDK.QueryResponse, error) {
resp, err := b.service.Jobs.Query(b.projectID, query).Do()
b.logAPIError("Jobs.Query", err)
return resp, err
}
// CancelJob performes Jobs.Cancel operation.
// Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs. For more information, see pricing.
func (b *BigQuery) CancelJob(jobID string) (*SDK.JobCancelResponse, error) {
resp, err := b.service.Jobs.Cancel(b.projectID, jobID).Do()
b.logAPIError("Jobs.Cancel", err, logArgs("jobID", jobID))
return resp, err
}
// GetJob performes Jobs.Get operation.
// Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
func (b *BigQuery) GetJob(jobID string) (*SDK.Job, error) {
j, err := b.service.Jobs.Get(b.projectID, jobID).Do()
b.logAPIError("Jobs.Get", err, logArgs("jobID", jobID))
return j, err
}
// ListJobs performes Jobs.List operation.
// Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
func (b *BigQuery) ListJobs() (*SDK.JobList, error) {
list, err := b.service.Jobs.List(b.projectID).Do()
b.logAPIError("Jobs.List", err)
return list, err
}
// GetQueryResults performes Jobs.GetQueryResults operation.
// Retrieves the results of a query job.
func (b *BigQuery) GetQueryResults(jobID string) (*SDK.GetQueryResultsResponse, error) {
resp, err := b.service.Jobs.GetQueryResults(b.projectID, jobID).Do()
b.logAPIError("Jobs.GetQueryResults", err, logArgs("jobID", jobID))
return resp, err
}
// ==========
// Tabledata
// ==========
// InsertAll performes Tabledata.InsertAll operation.
// Streams data into BigQuery one record at a time without needing to run a load job. For more information, see streaming data into BigQuery.
func (b *BigQuery) InsertAll(datasetID string, tableID string, rows *SDK.TableDataInsertAllRequest) (*SDK.TableDataInsertAllResponse, error) {
resp, err := b.service.Tabledata.InsertAll(b.projectID, datasetID, tableID, rows).Do()
b.logAPIError("Tabledata.InsertAll", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return resp, err
}
// GetTableData performes Tabledata.List operation.
// Retrieves table data from a specified set of rows. Requires the READER dataset role.
func (b *BigQuery) GetTableData(datasetID string, tableID string) (*SDK.TableDataList, error) {
list, err := b.service.Tabledata.List(b.projectID, datasetID, tableID).Do()
b.logAPIError("Tabledata.List", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return list, err
}
// ==========
// Table
// ==========
// CreateTable performes Table.Insert operation.
// Creates a new, empty table in the dataset.
func (b *BigQuery) CreateTable(datasetID string, tbl *SDK.Table) (*Table, error) {
t, err := b.service.Tables.Insert(b.projectID, datasetID, tbl).Do()
b.logAPIError("Table.Insert", err, logArgs("datasetID", datasetID))
return &Table{t}, err
}
// PatchTable performes Tables.Patch operation.
// Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.
func (b *BigQuery) PatchTable(datasetID string, tableID string, tbl *SDK.Table) (*Table, error) {
t, err := b.service.Tables.Patch(b.projectID, datasetID, tableID, tbl).Do()
b.logAPIError("Table.Patch", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return &Table{t}, err
}
// UpdateTable performes Tables.Update operation.
// Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.
func (b *BigQuery) UpdateTable(datasetID string, tableID string, tbl *SDK.Table) (*Table, error) {
t, err := b.service.Tables.Update(b.projectID, datasetID, tableID, tbl).Do()
b.logAPIError("Table.Update", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return &Table{t}, err
}
// DropTable performes Tables.Delete operation.
// Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
func (b *BigQuery) DropTable(datasetID string, tableID string) error {
err := b.service.Tables.Delete(b.projectID, datasetID, tableID).Do()
b.logAPIError("Table.Delete", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return err
}
// GetTable performes Tables.Get operation.
// Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
func (b *BigQuery) GetTable(datasetID string, tableID string) (*Table, error) {
t, err := b.service.Tables.Get(b.projectID, datasetID, tableID).Do()
b.logAPIError("Table.Get", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return &Table{t}, err
}
// ListTables performes Tables.List operation.
// Lists all tables in the specified dataset. Requires the READER dataset role.
func (b *BigQuery) ListTables(datasetID string, tableID string) (*SDK.TableList, error) {
list, err := b.service.Tables.List(b.projectID, datasetID).Do()
b.logAPIError("Table.List", err, logArgs("datasetID", datasetID), logArgs("tableID", tableID))
return list, err
}
func (b *BigQuery) logAPIError(apiName string, err error, opts ...string) {
if err == nil {
return
}
msg := fmt.Sprintf("error on `%s` operation; error=[%s] projectID=[%s],", apiName, err.Error(), b.projectID)
if len(opts) != 0 {
msg = fmt.Sprintf("%s %s", msg, strings.Join(opts, " "))
}
b.Errorf(msg)
}
func logArgs(key, value string) string {
return fmt.Sprintf("%s=[%s]", key, value)
} | bigquery/bigquery_api.go | 0.736969 | 0.578091 | bigquery_api.go | starcoder |
// Package derefer contains helper routines for simplifying the getting of
// optional fields of basic type. This allows you to get the value from the
// pointer even if it is nil, because in this case the zero value of the
// specified type will be received.
package derefer
import (
"reflect"
)
// Any dereference a pointer any type from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Any(v interface{}) interface{} {
if v == nil {
return nil
}
r := reflect.ValueOf(v)
if r.Kind() == reflect.Ptr {
if !r.IsNil() {
return r.Elem().Interface()
}
return reflect.New(r.Type().Elem()).Elem().Interface()
}
return r.Interface()
}
// Bool dereference a pointer bool from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Bool(v *bool) bool {
if v == nil {
return false
}
return *v
}
// Byte dereference a pointer byte from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Byte(v *byte) byte {
if v == nil {
return 0
}
return *v
}
// Complex64 dereference a pointer complex64 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Complex64(v *complex64) complex64 {
if v == nil {
return 0
}
return *v
}
// Complex128 dereference a pointer complex128 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Complex128(v *complex128) complex128 {
if v == nil {
return 0
}
return *v
}
// Float32 dereference a pointer float32 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Float32(v *float32) float32 {
if v == nil {
return 0
}
return *v
}
// Float64 dereference a pointer float64 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Float64(v *float64) float64 {
if v == nil {
return 0
}
return *v
}
// Int dereference a pointer int from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Int(v *int) int {
if v == nil {
return 0
}
return *v
}
// Int8 dereference a pointer int8 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Int8(v *int8) int8 {
if v == nil {
return 0
}
return *v
}
// Int16 dereference a pointer int16 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Int16(v *int16) int16 {
if v == nil {
return 0
}
return *v
}
// Int32 dereference a pointer int32 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Int32(v *int32) int32 {
if v == nil {
return 0
}
return *v
}
// Int64 dereference a pointer int64 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Int64(v *int64) int64 {
if v == nil {
return 0
}
return *v
}
// Rune dereference a pointer rune from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Rune(v *rune) rune {
if v == nil {
return 0
}
return *v
}
// String dereference a pointer string from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func String(v *string) string {
if v == nil {
return ""
}
return *v
}
// Uint dereference a pointer uint from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uint(v *uint) uint {
if v == nil {
return 0
}
return *v
}
// Uint8 dereference a pointer uint8 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uint8(v *uint8) uint8 {
if v == nil {
return 0
}
return *v
}
// Uint16 dereference a pointer uint16 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uint16(v *uint16) uint16 {
if v == nil {
return 0
}
return *v
}
// Uint32 dereference a pointer uint32 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uint32(v *uint32) uint32 {
if v == nil {
return 0
}
return *v
}
// Uint64 dereference a pointer uint64 from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uint64(v *uint64) uint64 {
if v == nil {
return 0
}
return *v
}
// Uintptr dereference a pointer uintptr from the structure of a literal or
// variable, and if the pointer is nil it returns a zero value of this type.
func Uintptr(v *uintptr) uintptr {
if v == nil {
return 0
}
return *v
} | derefer.go | 0.747432 | 0.618233 | derefer.go | starcoder |
package key_stat
import (
"fmt"
"math"
)
// KeyStatsObject - the struct that holds the 'settings' and current values.
type KeyStatsObject struct {
values []int8
minWindow int8
maxWindow int8
cutoff float32
ignoreNanValues bool
ignoreInfValues bool
}
// SetIgnoreInfValues - controls if we want to ignore non number values when producing the outputs
// of any calculations
func (kso *KeyStatsObject) SetIgnoreNanValues(ignoreNanValues bool) {
kso.ignoreNanValues = ignoreNanValues
}
// SetIgnoreInfValues - controls if we want to ignore infinites (both positive and negative values)
// when producing the outputs of any calculations
func (kso *KeyStatsObject) SetIgnoreInfValues(ignoreInfValues bool) {
kso.ignoreInfValues = ignoreInfValues
}
// Add - if given value meets the given conditions, append to the values used in the calculation,
// adjusting this so it it relevant for the supplied windows
func (kso *KeyStatsObject) Add(value float64) {
if kso.ignoreNanValues && math.IsNaN(value) {
return
}
if kso.ignoreInfValues && (math.IsInf(value, 1) || math.IsInf(value, -1)) {
return
}
if len(kso.values) >= int(kso.maxWindow) {
kso.values = kso.values[1:len(kso.values)]
}
if (value == 1.0) || (math.IsInf(value, 1)) {
kso.values = append(kso.values, 1)
} else if (value == 0.0) || (math.IsInf(value, -1)) || math.IsNaN(value) {
kso.values = append(kso.values, 0)
} else {
panic("Supplied `value` argument is not valid - must be -inf, 0, 1, inf or nan, received value: " + fmt.Sprintf("%f", value))
}
}
// KeyStat - return current key stat values and if they are relevant given the cutoff
func (kso *KeyStatsObject) KeyStat() (bool, int, int) {
if len(kso.values) < int(kso.minWindow) {
return false, 0, 0
}
values := make([]int8, len(kso.values))
copy(values, kso.values)
r := len(values)
for i := 0; i < r; i++ {
num := 0
for _, j := range values {
num += int(j)
}
denom := len(values)
if float64(num)/float64(denom) >= float64(kso.cutoff) {
return true, num, denom
}
values = values[1:]
if len(values) < int(kso.minWindow) {
return false, 0, 0
}
}
panic("Error calculating current KeyStat values")
}
// NewKeyStatObject - set up a new key stat object with a supplied windows, cutoff and the default settings
func NewKeyStatObject(minWindow int8, maxWindow int8, cutoff float32) *KeyStatsObject {
if minWindow > maxWindow {
panic("`minWindow` argument must be less than `maxWindow`")
}
return &KeyStatsObject{
minWindow: minWindow,
maxWindow: maxWindow,
cutoff: cutoff,
ignoreNanValues: ignoreNanValuesDefault,
ignoreInfValues: ignoreInfValuesDefault,
}
} | key_stat.go | 0.706798 | 0.461927 | key_stat.go | starcoder |
// Package solr provides a solr client that enables the user to easily connect to
// one or more solr servers with support for the the basic CRUDL functionality
package solr
import (
"context"
"fmt"
"net/http"
)
// Client is the interface encompasing all the solr service methods
type Client interface {
// SetBasicAuth sets the authentication credentials if needed.
SetBasicAuth(username, password string)
// Ping checks the connectivity of the solr server. It usually just returns with
// Status = OK and a default response header, therefore this function just
// returns an error in case there is no response, or an unexpected one.
Ping(ctx context.Context) error
// Search performs a query to the solr server by using the `/select` endpoint, with the provided query
// parameters. The query input can be easily created utilizing the provided helpers (check examples).
// Currently only simple searches are supported.
// For more info:
// https://lucene.apache.org/solr/guide/8_5/overview-of-searching-in-solr.html
Search(ctx context.Context, q *Query) (*Response, error)
// Get performs a realtime get call to the solr server that returns the latest version of the document specified
// by its id (uniqueKey field) without the associated cost of reopening a searcher. This is primarily useful
// when using Solr as a NoSQL data store and not just a search index. For more info:
// https://lucene.apache.org/solr/guide/8_5/realtime-get.html
Get(ctx context.Context, id string) (*Response, error)
// BatchGet performs a realtime get call to the solr server that returns the latest version of multiple documents
// specified by their id (uniqueKey field) and filtered by the provided filter. The provided filter should
// follow the format of the `fq` parameter but be concatenated in one string. For more info:
// https://lucene.apache.org/solr/guide/8_5/realtime-get.html
BatchGet(ctx context.Context, ids []string, filter string) (*Response, error)
// Create adds a single document via JSON to the solr service. It calls the `/update/json/docs` endpoint.
// Therefore the provided interface (item) must be a valid JSON object. This method accepts extra
// options that are passed to the service as part of the request query. For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#adding-a-single-json-document
Create(ctx context.Context, item interface{}, opts *WriteOptions) (*Response, error)
// BatchCreate adds multiple documents at once via JSON to the solr service. It calls the `/update` endpoint.
// Therefore the provided interface (items) must be a valid array of JSON objects. This method accepts
// extra options that are passed to the service as part of the request query. For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#adding-multiple-json-documents
BatchCreate(ctx context.Context, items interface{}, opts *WriteOptions) (*Response, error)
// Update allows for partial updates of documents utilizing the "atomic" and the "in-place" updates approach.
// The expected Fields input can be easily created using the provided helpers (check examples). This method
// accepts extra options that are passed to the service as part of the request query. For more info:
// https://lucene.apache.org/solr/guide/8_5/updating-parts-of-documents.html#atomic-updates
Update(ctx context.Context, item *UpdatedFields, opts *WriteOptions) (*Response, error)
// DeleteByID sends a JSON update command that deletes the document specified by its id (uniqueKey field).
// It calls the `/update` endpoint and sends Solr JSON. This method accepts extra options that are
// passed to the service as part of the request query. For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#sending-json-update-commands
DeleteByID(ctx context.Context, id string, opts *WriteOptions) (*Response, error)
// DeleteByID sends a JSON update command that deletes the documents matching the given query. The query format
// should follow the syntax of the Q parameter for the Search endpoint. It calls the `/update` endpoint and
// sends Solr JSON. This method accepts extra options that are passed to the service as part of the
// request query. For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#sending-json-update-commands
DeleteByQuery(ctx context.Context, query string, opts *WriteOptions) (*Response, error)
// Clear is a helper method that removes all documents from the solr server. Use with caution.
// It sends a DeleteByQuery request where the query is `*:*` and commit=true.
Clear(ctx context.Context) (*Response, error)
// Commit sends a JSON update command that commits all uncommited changes. Unless specified from one of the
// options all write methods of this library will not commit their changes, therefore this method should
// be called at the end of the a transaction to ensure that the indexes are properly updated.
// For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#sending-json-update-commands
Commit(ctx context.Context, opts *CommitOptions) (*Response, error)
// Rollback sends a JSON update command that rollbacks all uncommited changes. Unless specified from one of the
// options all write methods of this library will not commit their changes, therefore this method should
// be called if some action of the transaction returns an error and data cleaning is necessary.
// For more info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#sending-json-update-commands
Rollback(ctx context.Context) (*Response, error)
// Optimize sends a JSON update command that requests Solr to merge internal data structures. For a large index,
// optimization will take some time to complete, but by merging many small segment files into larger segments, \
// search performance may improve. More info:
// https://lucene.apache.org/solr/guide/8_5/uploading-data-with-index-handlers.html#commit-and-optimize-during-updates
Optimize(ctx context.Context, opts *OptimizeOptions) (*Response, error)
// CustomUpdate allows the creation of a request to the `/update` endpoint that can include more than one update
// command or for those that want a more finegrained request.
CustomUpdate(ctx context.Context, item *UpdateBuilder, opts *WriteOptions) (*Response, error)
}
func read(ctx context.Context, conn connection, url string) (*Response, error) {
return conn.request(ctx, http.MethodGet, url, nil)
}
func create(ctx context.Context, conn connection, url string, item interface{}) (*Response, error) {
bodyBytes, err := interfaceToBytes(item)
if err != nil {
return nil, err
}
err = isJSON(bodyBytes)
if err != nil {
return nil, fmt.Errorf("Invalid JSON provided: %s", err)
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func batchCreate(ctx context.Context, conn connection, url string, items interface{}) (*Response, error) {
bodyBytes, err := interfaceToBytes(items)
if err != nil {
return nil, err
}
err = isArrayOfJSON(bodyBytes)
if err != nil {
return nil, fmt.Errorf("Invalid Array of JSON provided: %s", err)
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func update(ctx context.Context, conn connection, url string, item *UpdatedFields) (*Response, error) {
ub := NewUpdateBuilder()
ub.add(item.fields)
bodyBytes, err := interfaceToBytes(ub.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func delete(ctx context.Context, conn connection, url string, doc Doc) (*Response, error) {
ub := NewUpdateBuilder()
ub.delete(doc)
bodyBytes, err := interfaceToBytes(ub.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func commit(ctx context.Context, conn connection, url string, opts *CommitOptions) (*Response, error) {
ub := NewUpdateBuilder()
ub.commit(opts)
bodyBytes, err := interfaceToBytes(ub.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func optimize(ctx context.Context, conn connection, url string, opts *OptimizeOptions) (*Response, error) {
ub := NewUpdateBuilder()
ub.optimize(opts)
bodyBytes, err := interfaceToBytes(ub.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func rollback(ctx context.Context, conn connection, url string) (*Response, error) {
ub := NewUpdateBuilder()
ub.rollback()
bodyBytes, err := interfaceToBytes(ub.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
}
func customUpdate(ctx context.Context, conn connection, url string, item *UpdateBuilder) (*Response, error) {
item.prepare()
bodyBytes, err := interfaceToBytes(item.commands)
if err != nil {
return nil, err
}
return conn.request(ctx, http.MethodPost, url, bodyBytes)
} | solr.go | 0.832917 | 0.418935 | solr.go | starcoder |
package sweetiebot
import (
"fmt"
"strings"
"github.com/bwmarrin/discordgo"
)
type AddGroupCommand struct {
}
func (c *AddGroupCommand) Name() string {
return "AddGroup"
}
func (c *AddGroupCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```You have to name the group!```", false
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if ok {
return "```That group already exists!```", false
}
if len(info.config.Groups) <= 0 {
info.config.Groups = make(map[string]map[string]bool)
}
group := make(map[string]bool)
group[msg.Author.ID] = true
info.config.Groups[arg] = group
info.SaveConfig()
return "```Successfully created the " + arg + " group! Join it using !joingroup " + arg + " and ping it using !ping " + arg + "```", false
}
func (c *AddGroupCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[name]", "Creates a new group and automatically adds you to it. Groups are automatically destroyed when everyone in the group leaves.")
}
func (c *AddGroupCommand) UsageShort() string { return "Creates a new group." }
type JoinGroupCommand struct {
}
func (c *JoinGroupCommand) Name() string {
return "JoinGroup"
}
func (c *JoinGroupCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```You have to provide a group name!```", false
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false
}
info.config.Groups[arg][msg.Author.ID] = true
info.SaveConfig()
return "```Successfully joined the " + arg + " group! Ping it using !ping " + arg + " or leave it using !leavegroup " + arg + "```", false
}
func (c *JoinGroupCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[group]", "Joins an existing group.")
}
func (c *JoinGroupCommand) UsageShort() string { return "Joins an existing group." }
type ListGroupCommand struct {
}
func (c *ListGroupCommand) Name() string {
return "ListGroup"
}
func (c *ListGroupCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
if len(info.config.Groups) <= 0 {
return "```No groups to list!```", false
}
keys := make([]string, len(info.config.Groups))
i := 0
for k := range info.config.Groups {
keys[i] = k
i++
}
return "```" + strings.Join(keys, ", ") + "```", false
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup with no arguments to list existing groups.```", false
}
pings := make([]string, len(info.config.Groups[arg]))
i := 0
for k := range info.config.Groups[arg] {
m, _, _, _ := sb.db.GetUser(SBatoi(k))
if m != nil {
pings[i] = m.Username
}
i++
}
return "```" + strings.Join(pings, ", ") + "```", false
}
func (c *ListGroupCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[group]", "If no argument is given, lists all the current groups. If a group name is given, lists all the members of that group.")
}
func (c *ListGroupCommand) UsageShort() string { return "Lists all groups." }
type LeaveGroupCommand struct {
}
func (c *LeaveGroupCommand) Name() string {
return "LeaveGroup"
}
func (c *LeaveGroupCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```You have to provide a group name!```", false
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false
}
_, ok = info.config.Groups[arg][msg.Author.ID]
if !ok {
return "```You aren't in that group!```", false
}
delete(info.config.Groups[arg], msg.Author.ID)
if len(info.config.Groups[arg]) <= 0 {
delete(info.config.Groups, arg)
}
info.SaveConfig()
return "```You have been removed from " + arg + "```", false
}
func (c *LeaveGroupCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[group]", "Removes you from the given group, if you are a member of it.")
}
func (c *LeaveGroupCommand) UsageShort() string { return "Removes you from a group." }
func getGroupPings(groups []string, info *GuildInfo) string {
if len(groups) == 0 {
return ""
}
union := make(map[string]bool)
for _, group := range groups {
for k, v := range info.config.Groups[group] {
union[k] = v
}
}
pings := make([]string, len(union), len(union))
i := 0
for k := range union {
pings[i] = SBitoa(SBatoi(k)) // We convert to integers and then back to strings to prevent bloons from fucking with the bot
i++
}
return "<@" + strings.Join(pings, "> <@") + ">"
}
type PingCommand struct {
}
func (c *PingCommand) Name() string {
return "Ping"
}
func (c *PingCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```You have to provide a group name!```", false
}
nargs := strings.SplitN(args[0], "\n", 2)
args = append(nargs, args[1:]...)
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if !ok {
groups := strings.Split(arg, "+")
for _, v := range groups {
_, ok = info.config.Groups[v]
if !ok {
return fmt.Sprintf("```The %s group doesn't exist! Use !listgroup to list existing groups.```", v), false
}
_, ok = info.config.Groups[v][msg.Author.ID]
if !ok {
return fmt.Sprintf("```You aren't a member of %s. You can only ping groups you are a member of.```", v), false
}
}
sb.dg.ChannelMessageSend(msg.ChannelID, arg+": "+getGroupPings(groups, info)+" "+info.SanitizeOutput(strings.Join(args[1:], " ")))
} else {
_, ok = info.config.Groups[arg][msg.Author.ID]
if !ok {
return "```You can only ping groups you are a member of.```", false
}
sb.dg.ChannelMessageSend(msg.ChannelID, arg+": "+getGroupPings([]string{arg}, info)+" "+info.SanitizeOutput(strings.Join(args[1:], " ")))
}
return "", false
}
func (c *PingCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[group] [arbitrary string]", "Pings everyone in a group with the given message, but only if you are a member of the group.")
}
func (c *PingCommand) UsageShort() string { return "Pings a group." }
type PurgeGroupCommand struct {
}
func (c *PurgeGroupCommand) Name() string {
return "PurgeGroup"
}
func (c *PurgeGroupCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```You have to provide a group name!```", false
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false
}
delete(info.config.Groups, arg)
info.SaveConfig()
return "```Deleted " + arg + "```", false
}
func (c *PurgeGroupCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[group]", "Deletes the group, if it exists.")
}
func (c *PurgeGroupCommand) UsageShort() string { return "Deletes a group." } | sweetiebot/groups_command.go | 0.588534 | 0.477067 | groups_command.go | starcoder |
package chrono
// Extent represents a period of time measured in nanoseconds.
// The represented value is exactly equivalent to the standard library's time.Duration.
type Extent int64
// Common time-based durations relative to 1 nanosecond.
const (
Nanosecond Extent = 1
Microsecond = 1000 * Nanosecond
Millisecond = 1000 * Microsecond
Second = 1000 * Millisecond
Minute = 60 * Second
Hour = 60 * Minute
)
// Nanoseconds returns the extent as an integer nanosecond count.
func (e Extent) Nanoseconds() int64 {
return int64(e)
}
// Microseconds returns the duration as a floating point number of microseconds.
func (e Extent) Microseconds() float64 {
micros := e / Microsecond
nsec := e % micros
return float64(micros) + float64(nsec)/1e3
}
// Milliseconds returns the duration as a floating point number of milliseconds.
func (e Extent) Milliseconds() float64 {
millis := e / Millisecond
nsec := e % millis
return float64(millis) + float64(nsec)/1e6
}
// Seconds returns the duration as a floating point number of seconds.
func (e Extent) Seconds() float64 {
secs := e / Second
nsec := e % secs
return float64(secs) + float64(nsec)/1e9
}
// Minutes returns the duration as a floating point number of minutes.
func (e Extent) Minutes() float64 {
mins := e / Minute
nsec := e % mins
return float64(mins) + float64(nsec)/(60*1e9)
}
// Hours returns the duration as a floating point number of hours.
func (e Extent) Hours() float64 {
hours := e / Hour
nsec := e % hours
return float64(hours) + float64(nsec)/(60*60*1e9)
}
// Units returns the whole numbers of hours, minutes, seconds, and nanosecond offset represented by e.
func (e Extent) Units() (hours, mins, secs, nsec int) {
hours = int(e / Hour)
mins = int(e/Minute) % 60
secs = int(e/Second) % 60
nsec = int(e % Second)
return
}
// Truncate returns the result of rounding e toward zero to a multiple of m.
func (e Extent) Truncate(m Extent) Extent {
if m <= 0 {
return e
}
return e - e%m
} | extent.go | 0.924751 | 0.547041 | extent.go | starcoder |
package livedocs
var LiveShort = `Deploy local packages to a cluster.`
var LiveLong = `
The ` + "`" + `live` + "`" + ` command group contains subcommands for deploying local
` + "`" + `kpt` + "`" + ` packages to a cluster.
`
var ApplyShort = `Apply a package to the cluster (create, update, prune).`
var ApplyLong = `
kpt live apply [PKG_PATH | -] [flags]
Args:
PKG_PATH | -:
Path to the local package which should be applied to the cluster. It must
contain a Kptfile with inventory information. Defaults to the current working
directory.
Using '-' as the package path will cause kpt to read resources from stdin.
Flags:
--dry-run:
It true, kpt will validate the resources in the package and print which
resources will be applied and which resources will be pruned, but no resources
will be changed.
If the --server-side flag is true, kpt will do a server-side dry-run, otherwise
it will be a client-side dry-run. Note that the output will differ somewhat
between the two alternatives.
--field-manager:
Identifier for the **owner** of the fields being applied. Only usable
when --server-side flag is specified. Default value is kubectl.
--force-conflicts:
Force overwrite of field conflicts during apply due to different field
managers. Only usable when --server-side flag is specified.
Default value is false (error and failure when field managers conflict).
--install-resource-group:
Install the ResourceGroup CRD into the cluster if it isn't already
available. Default is false.
--inventory-policy:
Determines how to handle overlaps between the package being currently applied
and existing resources in the cluster. The available options are:
* strict: If any of the resources already exist in the cluster, but doesn't
belong to the current package, it is considered an error.
* adopt: If a resource already exist in the cluster, but belongs to a
different package, it is considered an error. Resources that doesn't belong
to other packages are adopted into the current package.
The default value is ` + "`" + `strict` + "`" + `.
--output:
Determines the output format for the status information. Must be one of the following:
* events: The output will be a list of the status events as they become available.
* json: The output will be a list of the status events as they become available,
each formatted as a json object.
* table: The output will be presented as a table that will be updated inline
as the status of resources become available.
The default value is ‘events’.
--poll-period:
The frequency with which the cluster will be polled to determine
the status of the applied resources. The default value is 2 seconds.
--prune-propagation-policy:
The propagation policy that should be used when pruning resources. The
default value here is 'Background'. The other options are 'Foreground' and 'Orphan'.
--prune-timeout:
The threshold for how long to wait for all pruned resources to be
deleted before giving up. If this flag is not set, kpt live apply will not
wait. In most cases, it would also make sense to set the
--prune-propagation-policy to Foreground when this flag is set.
--reconcile-timeout:
The threshold for how long to wait for all resources to reconcile before
giving up. If this flag is not set, kpt live apply will not wait for
resources to reconcile.
--server-side:
Perform the apply operation server-side rather than client-side.
Default value is false (client-side).
`
var ApplyExamples = `
# apply resources in the current directory
$ kpt live apply
# apply resources in the my-dir directory and wait for all the resources to be
# reconciled before pruning
$ kpt live apply --reconcile-timeout=15m my-dir
# apply resources and specify how often to poll the cluster for resource status
$ kpt live apply --reconcile-timeout=15m --poll-period=5s my-dir
`
var DestroyShort = `Remove all previously applied resources in a package from the cluster`
var DestroyLong = `
kpt live destroy [PKG_PATH | -]
Args:
PKG_PATH | -:
Path to the local package which should be deleted from the cluster. It must
contain a Kptfile with inventory information. Defaults to the current working
directory.
Using '-' as the package path will cause kpt to read resources from stdin.
Flags:
--dry-run:
It true, kpt will print the resources that will be removed from the cluster,
but no resources will be deleted.
--inventory-policy:
Determines how to handle overlaps between the package being currently applied
and existing resources in the cluster. The available options are:
* strict: If any of the resources already exist in the cluster, but doesn't
belong to the current package, it is considered an error.
* adopt: If a resource already exist in the cluster, but belongs to a
different package, it is considered an error. Resources that doesn't belong
to other packages are adopted into the current package.
The default value is ` + "`" + `strict` + "`" + `.
--output:
Determines the output format for the status information. Must be one of the following:
* events: The output will be a list of the status events as they become available.
* json: The output will be a list of the status events as they become available,
each formatted as a json object.
* table: The output will be presented as a table that will be updated inline
as the status of resources become available.
The default value is ‘events’.
`
var DestroyExamples = `
# remove all resources in the current package from the cluster.
$ kpt live destroy
`
var InitShort = `Initialize a package with the information needed for inventory tracking.`
var InitLong = `
kpt live init [PKG_PATH] [flags]
Args:
PKG_PATH:
Path to the local package which should be updated with inventory information.
It must contain a Kptfile. Defaults to the current working directory.
Flags:
--force:
Forces the inventory values to be updated, even if they are already set.
Defaults to false.
--inventory-id:
Inventory identifier for the package. This is used to detect overlap between
packages that might use the same name and namespace for the inventory object.
Defaults to an auto-generated value.
--name:
The name for the ResourceGroup resource that contains the inventory
for the package. Defaults to the name of the package.
--namespace:
The namespace for the ResourceGroup resource that contains the inventory
for the package. If not provided, kpt will check if all the resources
in the package belong in the same namespace. If they do, that namespace will
be used. If they do not, the namespace in the user's context will be chosen.
`
var InitExamples = `
# initialize a package in the current directory.
$ kpt live init
# initialize a package with a specific name for the group of resources.
$ kpt live init --namespace=test my-dir
`
var InstallResourceGroupShort = `Install the ResourceGroup CRD in the cluster.`
var InstallResourceGroupLong = `
kpt live install-resource-group
`
var InstallResourceGroupExamples = `
# install ResourceGroup CRD into the current cluster.
$ kpt live install-resource-group
`
var MigrateShort = `Migrate a package and the inventory object to use the ResourceGroup CRD.`
var MigrateLong = `
kpt live migrate [PKG_PATH] [flags]
Args:
PKG_PATH:
Path to the local package. It must have a Kptfile and an existing inventory
template in the root of the package. It defaults to the current directory.
Flags:
--dry-run:
Go through the steps of migration, but don't make any changes.
--force:
Forces the inventory values in the Kptfile to be updated, even if they are
already set. Defaults to false.
--name:
The name for the ResourceGroup resource that contains the inventory
for the package. Defaults to the same name as the existing ConfigMap
inventory object.
--namespace:
The namespace for the ResourceGroup resource that contains the inventory
for the package. If not provided, it defaults to the same namespace as the
existing ConfigMap inventory object.
`
var MigrateExamples = `
# Migrate the package in the current directory.
$ kpt live migrate
`
var StatusShort = `Display shows the status for the resources in the cluster`
var StatusLong = `
kpt live status [PKG_PATH | -] [flags]
Args:
PKG_PATH | -:
Path to the local package for which the status of the package in the cluster
should be displayed. It must contain a Kptfile with inventory information.
Defaults to the current working directory.
Using '-' as the package path will cause kpt to read resources from stdin.
Flags:
--output:
Determines the output format for the status information. Must be one of the following:
* events: The output will be a list of the status events as they become available.
* json: The output will be a list of the status events as they become available,
each formatted as a json object.
* table: The output will be presented as a table that will be updated inline
as the status of resources become available.
The default value is ‘events’.
--poll-period:
The frequency with which the cluster will be polled to determine the status
of the applied resources. The default value is 2 seconds.
--poll-until:
When to stop polling for status and exist. Must be one of the following:
* known: Exit when the status for all resources have been found.
* current: Exit when the status for all resources have reached the Current status.
* deleted: Exit when the status for all resources have reached the NotFound
status, i.e. all the resources have been deleted from the live state.
* forever: Keep polling for status until interrupted.
The default value is ‘known’.
--timeout:
Determines how long the command should run before exiting. This deadline will
be enforced regardless of the value of the --poll-until flag. The default is
to wait forever.
`
var StatusExamples = `
# Monitor status for the resources belonging to the package in the current
# directory. Wait until all resources have reconciled.
$ kpt live status
# Monitor status for the resources belonging to the package in the my-app
# directory. Output in table format:
$ kpt live status my-app --poll-until=forever --output=table
` | internal/docs/generated/livedocs/docs.go | 0.877437 | 0.683297 | docs.go | starcoder |
package input
import (
"github.com/Jeffail/benthos/v3/lib/input/reader"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeRedisStreams] = TypeSpec{
constructor: NewRedisStreams,
description: `
Pulls messages from Redis (v5.0+) streams with the XREADGROUP command. The
` + "`client_id`" + ` should be unique for each consumer of a group.
The field ` + "`limit`" + ` specifies the maximum number of records to be
received per request. When more than one record is returned they are batched and
can be split into individual messages with the ` + "`split`" + ` processor.
Messages consumed by this input can be processed in parallel, meaning a single
instance of this input can utilise any number of threads within a
` + "`pipeline`" + ` section of a config.
Use the ` + "`batching`" + ` fields to configure an optional
[batching policy](../batching.md#batch-policy).
Redis stream entries are key/value pairs, as such it is necessary to specify the
key that contains the body of the message. All other keys/value pairs are saved
as metadata fields.`,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
return sanitiseWithBatch(conf.RedisStreams, conf.RedisStreams.Batching)
},
}
}
//------------------------------------------------------------------------------
// NewRedisStreams creates a new Redis List input type.
func NewRedisStreams(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
var c reader.Async
var err error
if c, err = reader.NewRedisStreams(conf.RedisStreams, log, stats); err != nil {
return nil, err
}
if c, err = reader.NewAsyncBatcher(conf.RedisStreams.Batching, c, mgr, log, stats); err != nil {
return nil, err
}
c = reader.NewAsyncBundleUnacks(reader.NewAsyncPreserver(c))
return NewAsyncReader(TypeRedisStreams, true, c, log, stats)
}
//------------------------------------------------------------------------------ | lib/input/redis_streams.go | 0.695752 | 0.417628 | redis_streams.go | starcoder |
package entities
import "github.com/rpaloschi/dxf-go/core"
// LWPolyline Entity representation
type LWPolyline struct {
BaseEntity
Closed bool
Plinegen bool
ConstantWidth float64
Elevation float64
Thickness float64
Points LWPolyLinePointSlice
ExtrusionDirection core.Point
}
// Equals tests equality against another LWPolyline.
func (p LWPolyline) Equals(other core.DxfElement) bool {
if otherLWPolyline, ok := other.(*LWPolyline); ok {
return p.BaseEntity.Equals(otherLWPolyline.BaseEntity) &&
p.Closed == otherLWPolyline.Closed &&
p.Plinegen == otherLWPolyline.Plinegen &&
core.FloatEquals(p.ConstantWidth, otherLWPolyline.ConstantWidth) &&
core.FloatEquals(p.Elevation, otherLWPolyline.Elevation) &&
core.FloatEquals(p.Thickness, otherLWPolyline.Thickness) &&
p.Points.Equals(otherLWPolyline.Points) &&
p.ExtrusionDirection.Equals(otherLWPolyline.ExtrusionDirection)
}
return false
}
const closedBit = 0x1
const plinegenBit = 0x80
// NewLWPolyline builds a new LWPolyline from a slice of Tags.
func NewLWPolyline(tags core.TagSlice) (*LWPolyline, error) {
polyline := new(LWPolyline)
// set defaults
polyline.ExtrusionDirection = core.Point{X: 0.0, Y: 0.0, Z: 1.0}
polyline.InitBaseEntityParser()
pointIndex := -1
polyline.Update(map[int]core.TypeParser{
70: core.NewIntTypeParser(func(flags int64) {
polyline.Closed = flags&closedBit != 0
polyline.Plinegen = flags&plinegenBit != 0
}),
90: core.NewIntTypeParser(func(value int64) {
polyline.Points = make(LWPolyLinePointSlice, value)
}),
38: core.NewFloatTypeParserToVar(&polyline.Elevation),
39: core.NewFloatTypeParserToVar(&polyline.Thickness),
43: core.NewFloatTypeParserToVar(&polyline.ConstantWidth),
10: core.NewFloatTypeParser(func(x float64) {
pointIndex++
polyline.Points[pointIndex].Point.X = x
}),
20: core.NewFloatTypeParser(func(y float64) {
polyline.Points[pointIndex].Point.Y = y
}),
91: core.NewIntTypeParser(func(value int64) {
polyline.Points[pointIndex].Id = value
}),
40: core.NewFloatTypeParser(func(value float64) {
polyline.Points[pointIndex].StartingWidth = value
}),
41: core.NewFloatTypeParser(func(value float64) {
polyline.Points[pointIndex].EndWidth = value
}),
42: core.NewFloatTypeParser(func(value float64) {
polyline.Points[pointIndex].Bulge = value
}),
210: core.NewFloatTypeParserToVar(&polyline.ExtrusionDirection.X),
220: core.NewFloatTypeParserToVar(&polyline.ExtrusionDirection.Y),
230: core.NewFloatTypeParserToVar(&polyline.ExtrusionDirection.Z),
})
err := polyline.Parse(tags)
return polyline, err
}
// LWPolyLinePointSlice Slice of LWPolyLinePoint
type LWPolyLinePointSlice []LWPolyLinePoint
// Equals Compares two LWPolyLinePointSlices for equality.
func (p LWPolyLinePointSlice) Equals(other LWPolyLinePointSlice) bool {
if len(p) != len(other) {
return false
}
for i, point := range p {
otherPoint := other[i]
if !point.Equals(otherPoint) {
return false
}
}
return true
}
// LWPolyLinePoint point and attributes in an LWPolyline.
type LWPolyLinePoint struct {
Point core.Point
Id int64
StartingWidth float64
EndWidth float64
Bulge float64
}
// Equals compares two LWPolyLinePoints for equality
func (p LWPolyLinePoint) Equals(other LWPolyLinePoint) bool {
return p.Point.Equals(other.Point) &&
p.Id == other.Id &&
core.FloatEquals(p.StartingWidth, other.StartingWidth) &&
core.FloatEquals(p.EndWidth, other.EndWidth) &&
core.FloatEquals(p.Bulge, other.Bulge)
} | vendor/github.com/rpaloschi/dxf-go/entities/lwpolyline.go | 0.646572 | 0.633864 | lwpolyline.go | starcoder |
package automations
import (
"fmt"
"math"
"math/rand"
. "github.com/bspaans/bleep/sequencer/status"
"github.com/bspaans/bleep/theory"
)
type IntAutomation func(s *Status, counter, t uint) int
type IntArrayAutomation func(s *Status, counter, t uint) []int
type FloatAutomation func(s *Status, counter, t uint) float64
func IntIdAutomation(id int) IntAutomation {
return func(s *Status, counter, t uint) int {
return id
}
}
func FloatIdAutomation(id float64) FloatAutomation {
return func(s *Status, counter, t uint) float64 {
return id
}
}
func IntArrayIdAutomation(id []int) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
return id
}
}
func IntRangeAutomation(min, max, step int) IntAutomation {
if step == 0 {
step = 1
}
reverse := false
if max < min {
reverse = true
}
return func(s *Status, counter, t uint) int {
if reverse {
intRange := uint(min - max)
steps := uint(math.Ceil(float64(intRange) / float64(step)))
v := min - step*int(counter%steps)
return v
} else {
intRange := uint(max - min)
steps := uint(math.Ceil(float64(intRange) / float64(step)))
v := min + step*int(counter%steps)
return v
}
}
}
func IntTransposeAutomation(transpose int, automation IntAutomation) IntAutomation {
return func(s *Status, counter, t uint) int {
return transpose + automation(s, counter, t)
}
}
func FloatTransposeAutomation(transpose float64, automation FloatAutomation) FloatAutomation {
return func(s *Status, counter, t uint) float64 {
return transpose + automation(s, counter, t)
}
}
func IntArrayTransposeAutomation(transpose int, automation IntArrayAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
values := automation(s, counter, t)
result := make([]int, len(values))
for i, v := range values {
result[i] = v + transpose
}
return result
}
}
func IntFadeInAutomation(from, to, changeEvery int) IntAutomation {
if changeEvery == 0 {
changeEvery = 1
}
width := to - from
diff := 1.0 / float64(changeEvery)
r := make([]int, int(float64(width+1)/diff))
for i := 0; i < len(r); i++ {
r[i] = from + int(float64(i)*diff)
}
return func(s *Status, counter, t uint) int {
if counter >= uint(len(r)) {
return to
}
fmt.Println(r[counter])
return r[counter]
}
}
func IntRandomAutomation(min, max int) IntAutomation {
return func(s *Status, counter, t uint) int {
if min > max {
min, max = max, min
}
return rand.Intn(max-min) + min
}
}
func FloatRandomAutomation(min, max float64) FloatAutomation {
return func(s *Status, counter, t uint) float64 {
if min > max {
min, max = max, min
}
randomRange := max - min
return rand.Float64()*randomRange + min
}
}
func IntSweepAutomation(min, max, changeEvery, step int) IntAutomation {
if changeEvery == 0 {
changeEvery = 1
}
if step == 0 {
step = 1
}
diff := float64(math.Abs(float64(step)))
width := max - min
if min > max {
width = min - max
diff = -diff
}
diff *= 1.0 / float64(changeEvery)
r := make([]int, int((float64(width+1) / diff)))
for i := 0; i < len(r); i++ {
r[i] = min + int(float64(i)*diff)
}
return IntBackAndForthAutomation(r)
}
func IntCycleAutomation(ints []int) IntAutomation {
l := uint(len(ints))
return func(s *Status, counter, t uint) int {
ix := counter % l
v := ints[ix]
return v
}
}
func IntRegisterAutomation(register int) IntAutomation {
return func(s *Status, counter, t uint) int {
return s.IntRegisters[register]
}
}
func IntArrayRegisterAutomation(register int) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
return s.IntArrayRegisters[register]
}
}
func FloatRegisterAutomation(register int) FloatAutomation {
return func(s *Status, counter, t uint) float64 {
return s.FloatRegisters[register]
}
}
func IntArrayCycleAutomation(f IntArrayAutomation) IntAutomation {
return func(s *Status, counter, t uint) int {
ints := f(s, counter, t)
return IntCycleAutomation(ints)(s, counter, t)
}
}
func IntArrayIndexAutomation(ixF IntAutomation, f IntArrayAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
ints := f(s, counter, t)
if len(ints) == 0 {
return ints
}
ix := ixF(s, counter, t)
return []int{ints[ix%len(ints)]}
}
}
func IntBackAndForthAutomation(ints []int) IntAutomation {
l := uint(len(ints))
return func(s *Status, counter, t uint) int {
ix := counter % (l*2 - 2)
if ix < l {
return ints[ix]
} else {
return ints[l-((ix+2)-l)]
}
}
}
func FloatBackAndForthAutomation(floats []float64) FloatAutomation {
l := uint(len(floats))
return func(s *Status, counter, t uint) float64 {
ix := counter % (l*2 - 2)
if ix < l {
return floats[ix]
} else {
return floats[l-((ix+2)-l)]
}
}
}
func OffsetAutomation(offset uint, a IntAutomation) IntAutomation {
return func(s *Status, counter, t uint) int {
return a(s, counter, t+offset)
}
}
func IntNegativeOffsetAutomation(offset uint, a IntAutomation) IntAutomation {
return func(s *Status, counter, t uint) int {
return a(s, counter-1, t-offset)
}
}
func IntArrayNegativeOffsetAutomation(offset uint, a IntArrayAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
return a(s, counter-1, t-offset)
}
}
func ChordCycleArrayAutomation(changeEvery int, chords [][]int) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
ix := counter % (uint(changeEvery * len(chords)))
v := chords[ix/uint(changeEvery)]
return v
}
}
func Chord(chord string, baseNoteF, octavesF, inversionsF IntAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
baseNote := baseNoteF(s, counter, t)
inversions := inversionsF(s, counter, t)
octaves := octavesF(s, counter, t)
baseValues := theory.ChordOnNoteInt(baseNote, chord)
baseValues = theory.InvertChord(baseValues, inversions)
values := []int{}
for octaves >= 1 {
for _, note := range baseValues {
values = append(values, note)
}
for i, _ := range baseValues {
baseValues[i] += 12
}
octaves--
}
return values
}
}
func Scale(scale string, baseNoteF, octavesF, inversionsF IntAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
baseNote := baseNoteF(s, counter, t)
inversions := inversionsF(s, counter, t)
octaves := octavesF(s, counter, t)
baseValues := theory.ScaleOnNoteInt(baseNote, scale)
baseValues = theory.InvertChord(baseValues, inversions)
values := []int{}
for octaves >= 1 {
for _, note := range baseValues {
values = append(values, note)
}
for i, _ := range baseValues {
baseValues[i] += 12
}
octaves--
}
return values
}
}
func ChordOnScale(chord string, scaleF IntArrayAutomation, startF, octavesF, inversionsF IntAutomation) IntArrayAutomation {
return func(s *Status, counter, t uint) []int {
scale := scaleF(s, counter, t)
start := startF(s, counter, t)
inversions := inversionsF(s, counter, t)
octaves := octavesF(s, counter, t)
scaleChords := map[string][]int{
"triad": []int{2, 2},
"seventh": []int{2, 2, 2},
"sixth": []int{2, 2, 1},
"ninth": []int{2, 2, 4},
}
ix := start % len(scale)
prev := scale[ix]
baseValues := []int{prev}
add := 0
for _, interval := range scaleChords[chord] {
ix = (ix + interval)
if ix >= len(scale) {
add += 12
ix = ix % len(scale)
}
baseValues = append(baseValues, scale[ix]+add)
prev = scale[ix]
}
baseValues = theory.InvertChord(baseValues, inversions)
values := []int{}
for octaves >= 1 {
for _, note := range baseValues {
values = append(values, note)
}
for i, _ := range baseValues {
baseValues[i] += 12
}
octaves--
}
return values
}
} | sequencer/automations/automations.go | 0.591133 | 0.462594 | automations.go | starcoder |
package assert
import (
"fmt"
"reflect"
"testing"
)
// F represents a testing function.
type F func(testing.TB)
// Nop does nothing.
func Nop(tb testing.TB) {}
// Eval runs the given function and returns a Nop.
func Eval(f func()) F { f(); return Nop }
// C creates a test case for the given name and testing function.
func C(name string, tfs ...F) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
switch v := tb.(type) {
case *testing.T:
v.Run(name, func(t *testing.T) { t.Helper(); Apply(t, tfs...) })
case *testing.B:
v.Run(name, func(b *testing.B) { b.Helper(); Apply(b, tfs...) })
default:
panic(fmt.Errorf("%T is not *testing.T nor *testing.B", v))
}
}
}
// Apply the given testing.TB object to testing functions as a helper.
func Apply(tb testing.TB, tfs ...F) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
for _, tf := range tfs {
tf(tb)
}
}
// All combines the given testing functions into a single testing function.
func All(tfs ...F) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
for _, tf := range tfs {
tf(tb)
}
}
}
// True expects the given condition to be true.
func True(cond bool) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
if !cond {
tb.Fatal("expected true")
}
}
}
// False expects the given condition to be false.
func False(cond bool) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
if cond {
tb.Fatal("expected false")
}
}
}
// NoError expects the given error to be nil.
func NoError(err error) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
if err != nil {
tb.Fatalf("\nunexpected error: %s", err.Error())
}
}
}
// IsError expects the given error to be set.
func IsError(err error) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
if err == nil {
tb.Fatal("expected error")
}
}
}
// Equal expects the given values to be equal.
func Equal(v, e interface{}) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
if !reflect.DeepEqual(v, e) {
tb.Fatalf("\nexpected: %#v\n actual: %#v", e, v)
}
}
}
// Panic expects the given function to panic.
func Panic(f func()) F {
return func(tb testing.TB) {
if h, ok := tb.(interface{ Helper() }); ok {
h.Helper()
}
defer func() {
if recover() == nil {
tb.Fatal("expected panic")
}
}()
f()
}
} | assert.go | 0.708717 | 0.51379 | assert.go | starcoder |
package main
import (
"bufio"
"fmt"
"github.com/thompsonlabs/taskmaster"
"github.com/thompsonlabs/taskmaster/pool"
"os"
"strconv"
"time"
)
func main() {
var taskpool = buildTaskPool()
go launchTaskPool(30, &taskpool)
readUserInput(taskpool)
}
func buildTaskPool() pool.TaskPool {
/**
Here we build a task pool according to our requirements
for the purposes of this example we elect to build an
ElasticTaskPool which allows us to specify as parameters:
1)An inactivity timeout value (in milliseconds) for the worker threads in the pool.
2)The minimum number of core threads that must be present in the pool these threads
WILL NOT be removed even if their respective inactivity timeouts are reached.
The Builder will return the pool as an abstract TaskPool type irrespetive of the specific
TaskPool implementation you choose to build. (see the docs for more info)
*/
var taskpool = taskmaster.
Builder().
NewElasticTaskPool(60000, 5).
SetMaxQueueCount(50).
SetMaxWorkerCount(20).
SetCustomErrorFunction(TestCustomErrorFunction).
Build()
return taskpool
}
func launchTaskPool(numberOfTestTasksToLaunch int, taskpool *pool.TaskPool) {
fmt.Println("Launching " + strconv.Itoa(numberOfTestTasksToLaunch) + " Test Tasks in the Taskpool... ")
(*taskpool).StartUp()
for i := 0; i < numberOfTestTasksToLaunch; i++ {
(*taskpool).SubmitTask(NewTestTask(i))
}
//wait the specified time on the pool; a wait value of 0 may be entered to wait indefinitely
(*taskpool).Wait(30000)
fmt.Println("Pool has been successfully shutdown.")
}
func readUserInput(pool pool.TaskPool) {
/**
Here, for the purposes of the test we just read in commands from the
standard input (i.e command line) and execute them against the launched
pool instance. The supported commands are as follows:
1)quit - quit the example program which implicitly shutsdown the taskpool.
2)new - submits a new test task to the taskpool.
3)stats - displays the current pool stats: queued tasks,idle workers and active workers.
4)shutdown - shuts down the launched pool. "quit" will still need to be typed to exit.
5)new10 - Adds 10 new test tasks to the taskpool.
*/
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
readText := scanner.Text()
if readText == "quit" {
break
} else if readText == "new" {
pool.SubmitTask(NewTestTask(110))
} else if readText == "stats" {
fmt.Println("Queue count: " + strconv.Itoa(pool.GetQueueCount()))
fmt.Println("Idle worker count: " + strconv.Itoa(pool.IdleWorkersCount()))
fmt.Println("Active worker count: " + strconv.Itoa(pool.ActiveWorkersCount()))
} else if readText == "shutdown" {
pool.ShutDown()
} else if readText == "new10" {
for i := 0; i < 10; i++ {
pool.SubmitTask(NewTestTask(110))
}
}
fmt.Println(scanner.Text())
}
}
//TestTask - A test task created for the purposes of this example. The task
// implements the requisite "Executable" interface (containing a single Execute() function)
// and simply print a string to the console on execution.
type TestTask struct {
exeIdx int
}
//NewTestTask Creates a and returns a new TestTask instance.
func NewTestTask(anIndex int) *TestTask {
return &TestTask{
exeIdx: anIndex}
}
//Execute - Overrriden from the Executable interface; here is where the operation
// the TaskPool is required to run should be defined.
func (tt *TestTask) Execute() {
//to test the pools panic recovery
if tt.exeIdx == 7 {
panic("7 Index is not allowed.")
}
//sleep to simulate some time taken to complete the task
time.Sleep(time.Millisecond * 3000)
//print success status to the console.
fmt.Println("Task: " + strconv.Itoa(tt.exeIdx) + " Successfully executed")
}
//TestCustomErrorFunction - A test custom error function
func TestCustomErrorFunction(panicError interface{}) {
fmt.Println("Error log from TestCustomErrorFunction: ", panicError)
} | example/taskpool-main.go | 0.520496 | 0.422386 | taskpool-main.go | starcoder |
package main
import (
"bytes"
"fmt"
"math/rand"
"time"
)
type maze struct {
c2 [][]byte // cells by row
h2 [][]byte // horizontal walls by row (ignore first row)
v2 [][]byte // vertical walls by row (ignore first of each column)
}
func newMaze(rows, cols int) *maze {
c := make([]byte, rows*cols) // all cells
h := bytes.Repeat([]byte{'-'}, rows*cols) // all horizontal walls
v := bytes.Repeat([]byte{'|'}, rows*cols) // all vertical walls
c2 := make([][]byte, rows) // cells by row
h2 := make([][]byte, rows) // horizontal walls by row
v2 := make([][]byte, rows) // vertical walls by row
for i := range h2 {
c2[i] = c[i*cols : (i+1)*cols]
h2[i] = h[i*cols : (i+1)*cols]
v2[i] = v[i*cols : (i+1)*cols]
}
return &maze{c2, h2, v2}
}
func (m *maze) String() string {
hWall := []byte("+---")
hOpen := []byte("+ ")
vWall := []byte("| ")
vOpen := []byte(" ")
rightCorner := []byte("+\n")
rightWall := []byte("|\n")
var b []byte
for r, hw := range m.h2 {
for _, h := range hw {
if h == '-' || r == 0 {
b = append(b, hWall...)
} else {
b = append(b, hOpen...)
if h != '-' && h != 0 {
b[len(b)-2] = h
}
}
}
b = append(b, rightCorner...)
for c, vw := range m.v2[r] {
if vw == '|' || c == 0 {
b = append(b, vWall...)
} else {
b = append(b, vOpen...)
if vw != '|' && vw != 0 {
b[len(b)-4] = vw
}
}
if m.c2[r][c] != 0 {
b[len(b)-2] = m.c2[r][c]
}
}
b = append(b, rightWall...)
}
for _ = range m.h2[0] {
b = append(b, hWall...)
}
b = append(b, rightCorner...)
return string(b)
}
func (m *maze) gen() {
m.g2(rand.Intn(len(m.c2)), rand.Intn(len(m.c2[0])))
}
const (
up = iota
dn
rt
lf
)
func (m *maze) g2(r, c int) {
m.c2[r][c] = ' '
for _, dir := range rand.Perm(4) {
switch dir {
case up:
if r > 0 && m.c2[r-1][c] == 0 {
m.h2[r][c] = 0
m.g2(r-1, c)
}
case lf:
if c > 0 && m.c2[r][c-1] == 0 {
m.v2[r][c] = 0
m.g2(r, c-1)
}
case dn:
if r < len(m.c2)-1 && m.c2[r+1][c] == 0 {
m.h2[r+1][c] = 0
m.g2(r+1, c)
}
case rt:
if c < len(m.c2[0])-1 && m.c2[r][c+1] == 0 {
m.v2[r][c+1] = 0
m.g2(r, c+1)
}
}
}
}
func main() {
rand.Seed(time.Now().UnixNano())
const height = 4
const width = 7
m := newMaze(height, width)
m.gen()
m.solve(
rand.Intn(height), rand.Intn(width),
rand.Intn(height), rand.Intn(width))
fmt.Print(m)
}
func (m *maze) solve(ra, ca, rz, cz int) {
var rSolve func(ra, ca, dir int) bool
rSolve = func(r, c, dir int) bool {
if r == rz && c == cz {
m.c2[r][c] = 'F'
return true
}
if dir != dn && m.h2[r][c] == 0 {
if rSolve(r-1, c, up) {
m.c2[r][c] = '^'
m.h2[r][c] = '^'
return true
}
}
if dir != up && r+1 < len(m.h2) && m.h2[r+1][c] == 0 {
if rSolve(r+1, c, dn) {
m.c2[r][c] = 'v'
m.h2[r+1][c] = 'v'
return true
}
}
if dir != lf && c+1 < len(m.v2[0]) && m.v2[r][c+1] == 0 {
if rSolve(r, c+1, rt) {
m.c2[r][c] = '>'
m.v2[r][c+1] = '>'
return true
}
}
if dir != rt && m.v2[r][c] == 0 {
if rSolve(r, c-1, lf) {
m.c2[r][c] = '<'
m.v2[r][c] = '<'
return true
}
}
return false
}
rSolve(ra, ca, -1)
m.c2[ra][ca] = 'S'
} | lang/Go/maze-solving.go | 0.567457 | 0.414247 | maze-solving.go | starcoder |
package entities
import "github.com/edanko/dxf-go/core"
// Vertex Entity representation
type Vertex struct {
BaseEntity
Location core.Point
StartingWidth float64
EndWidth float64
Bulge float64
CreatedByCurveFitting bool
CurveFitTangentDefined bool
SplineVertex bool
SplineFrameCtrlPoint bool
Is3dPolylineVertex bool
Is3dPolylineMesh bool
IsPolyfaceMeshVertex bool
CurveFitTangentDirection float64
Id int
}
// Equals tests equality against another Vertex.
func (c Vertex) Equals(other core.DxfElement) bool {
if otherVertex, ok := other.(*Vertex); ok {
return c.BaseEntity.Equals(otherVertex.BaseEntity) &&
c.Location.Equals(otherVertex.Location) &&
core.FloatEquals(c.StartingWidth, otherVertex.StartingWidth) &&
core.FloatEquals(c.EndWidth, otherVertex.EndWidth) &&
core.FloatEquals(c.Bulge, otherVertex.Bulge) &&
c.CreatedByCurveFitting == otherVertex.CreatedByCurveFitting &&
c.CurveFitTangentDefined == otherVertex.CurveFitTangentDefined &&
c.SplineVertex == otherVertex.SplineVertex &&
c.SplineFrameCtrlPoint == otherVertex.SplineFrameCtrlPoint &&
c.Is3dPolylineVertex == otherVertex.Is3dPolylineVertex &&
c.Is3dPolylineMesh == otherVertex.Is3dPolylineMesh &&
c.IsPolyfaceMeshVertex == otherVertex.IsPolyfaceMeshVertex &&
core.FloatEquals(c.CurveFitTangentDirection,
otherVertex.CurveFitTangentDirection) &&
c.Id == otherVertex.Id
}
return false
}
const extraVertexCurveFittingBit = 0x1
const curveFitTangentDefinedBit = 0x2
const splineVertexCreatedBit = 0x8
const splineFrameCtrlPointBit = 0x10
const polylineVertex3dBit = 0x20
const polygonMesh3dBit = 0x40
const polyfaceMeshVertexBit = 0x80
// NewVertex builds a new Vertex from a slice of Tags.
func NewVertex(tags core.TagSlice) (*Vertex, error) {
vertex := new(Vertex)
vertex.InitBaseEntityParser()
vertex.Update(map[int]core.TypeParser{
10: core.NewFloatTypeParserToVar(&vertex.Location.X),
20: core.NewFloatTypeParserToVar(&vertex.Location.Y),
30: core.NewFloatTypeParserToVar(&vertex.Location.Z),
40: core.NewFloatTypeParserToVar(&vertex.StartingWidth),
41: core.NewFloatTypeParserToVar(&vertex.EndWidth),
42: core.NewFloatTypeParserToVar(&vertex.Bulge),
50: core.NewFloatTypeParserToVar(&vertex.CurveFitTangentDirection),
70: core.NewIntTypeParser(func(flags int) {
vertex.CreatedByCurveFitting = flags&extraVertexCurveFittingBit != 0
vertex.CurveFitTangentDefined = flags&curveFitTangentDefinedBit != 0
vertex.SplineVertex = flags&splineVertexCreatedBit != 0
vertex.SplineFrameCtrlPoint = flags&splineFrameCtrlPointBit != 0
vertex.Is3dPolylineVertex = flags&polylineVertex3dBit != 0
vertex.Is3dPolylineMesh = flags&polygonMesh3dBit != 0
vertex.IsPolyfaceMeshVertex = flags&polyfaceMeshVertexBit != 0
}),
91: core.NewIntTypeParserToVar(&vertex.Id),
})
err := vertex.Parse(tags)
return vertex, err
}
// VertexSlice a slice of Vertex objects.
type VertexSlice []*Vertex
// Equals Compares two Vertices for equality.
func (v VertexSlice) Equals(other VertexSlice) bool {
if len(v) != len(other) {
return false
}
for i, vertex := range v {
otherVertex := other[i]
if !vertex.Equals(otherVertex) {
return false
}
}
return true
} | entities/vertex.go | 0.682785 | 0.471953 | vertex.go | starcoder |
package barchart
import (
"math"
)
// XY point in a 2D plot.
type XY struct{ X, Y int }
// XYf point in a 2D plot.
type XYf struct {
X float64
Y, ScaledY *float64
}
// BarChart of XY points.
type BarChart struct {
MinX, MaxX, MinY, MaxY int
xy []XY
}
// Add a XY point to the plot.
func (p *BarChart) Add(x, y int) { p.xy = append(p.xy, XY{x, y}) }
// XYs aggregates the XY values together in a dense form.
// Empty slots between two Xs are left nil to represent
// the absence of data.
func (p *BarChart) XYs() []*XY {
xys := make([]*XY, p.MaxX-p.MinX)
for _, xy := range p.xy {
slot := xys[xy.X-p.MinX]
if slot == nil {
slot = &XY{xy.X, xy.Y}
} else {
slot.Y += xy.Y
}
}
return xys
}
// ScaleXYs aggregates the XY values together in a dense form.
// Empty slots between two Xs are left nil to represent
// the absence of data. The values are scaled using s.
func (p *BarChart) ScaleXYs(xWidth int, s ScaleFunc) []XYf {
diff := p.MaxX - p.MinX
scaleX := float64(diff) / float64(xWidth-1)
buckets := make([]XYf, xWidth)
for i := range buckets {
buckets[i] = XYf{
X: float64(i)*scaleX + float64(p.MinX),
Y: nil,
ScaledY: nil,
}
}
miny, maxy := float64(p.xy[0].Y), float64(p.xy[0].Y)
for _, val := range p.xy {
minx := float64(p.MinX)
xdiff := float64(val.X) - minx
bi := int(xdiff / scaleX)
slot := buckets[bi]
if slot.Y == nil {
slot.Y = new(float64)
slot.ScaledY = new(float64)
}
y := float64(val.Y)
*slot.Y += y
miny = math.Min(*slot.Y, miny)
maxy = math.Max(*slot.Y, maxy)
buckets[bi] = slot
}
for _, val := range buckets {
if val.Y == nil {
continue
}
*val.ScaledY = s(miny, maxy, *val.Y)
}
return buckets
}
// BarChartXYs builds a BarChart using pairwise X and Y []float64.
func BarChartXYs(xys [][2]int) BarChart {
if len(xys) == 0 {
return BarChart{}
}
minx, maxx, miny, maxy := xys[0][0], xys[0][0], xys[0][1], xys[0][1]
plot := BarChart{
xy: make([]XY, len(xys)),
}
var x, y int
for i := range plot.xy {
x = xys[i][0]
y = xys[i][1]
minx = imin(x, minx)
maxx = imax(x, maxx)
miny = imin(y, miny)
maxy = imax(y, maxy)
plot.xy[i] = XY{x, y}
}
plot.MinX = minx
plot.MaxX = maxx
plot.MinY = miny
plot.MaxY = maxy
return plot
}
// ScaleFunc is the type to implement to scale an histogram.
type ScaleFunc func(min, max, value float64) float64
// Linear builds a ScaleFunc that will linearly scale the values of
// an histogram so that they do not exceed width.
func Linear(width int) ScaleFunc {
return func(min, max, value float64) float64 {
diff := max - min
offset := value - min
ratio := offset / diff
return ratio * float64(width)
}
}
func imin(a, b int) int {
if a < b {
return a
}
return b
}
func imax(a, b int) int {
if a > b {
return a
}
return b
} | barchart/barchart.go | 0.808219 | 0.594198 | barchart.go | starcoder |
package sunspec
import (
"encoding/binary"
"errors"
"fmt"
"math"
"net"
)
// Scalable defines the behavior of a point type which may be scaled using the definition:
// ScaledValue = PointValue * (10^ScaleFactor)
type Scalable interface {
Scaled() bool
Factor() int16
}
// scale is internally used to store a scale factor
type scale struct {
f interface{}
}
// Scaled specifies whether the point is scaled using an optional factor.
func (s *scale) Scaled() bool {
return s.f != nil
}
// factor returns the scale value of the point.
func (s *scale) factor(p Point) int16 {
switch sf := s.f.(type) {
case int16:
return sf
case Sunssf:
return sf.Get()
case string:
for g := p.Origin(); g != nil; g = g.Origin() {
for _, p := range g.Points() {
if p.Name() == sf {
if p, ok := p.(Sunssf); ok {
s.f = p
return p.Get()
}
}
}
}
}
return 0
}
// ****************************************************************************
// Int16 represents the sunspec type int16.
type Int16 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v int16) error
// Get returns the point´s underlying value.
Get() int16
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tInt16 struct {
point
data int16
scale
}
var _ Int16 = (*tInt16)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tInt16) Valid() bool { return t.Get() != -0x8000 }
// String formats the point´s value as string.
func (t *tInt16) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tInt16) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tInt16) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, uint16(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tInt16) decode(buf []byte) error {
return t.Set(int16(binary.BigEndian.Uint16(buf)))
}
// Set sets the point´s underlying value.
func (t *tInt16) Set(v int16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tInt16) Get() int16 { return t.data }
// Factor returns the scale value of the point.
func (t *tInt16) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tInt16) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Int32 represents the sunspec type int32.
type Int32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v int32) error
// Get returns the point´s underlying value.
Get() int32
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tInt32 struct {
point
data int32
scale
}
var _ (Int32) = (*tInt32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tInt32) Valid() bool { return t.Get() != -0x80000000 }
// String formats the point´s value as string.
func (t *tInt32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tInt32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tInt32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, uint32(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tInt32) decode(buf []byte) error {
return t.Set(int32((binary.BigEndian.Uint32(buf))))
}
// Set sets the point´s underlying value.
func (t *tInt32) Set(v int32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tInt32) Get() int32 { return t.data }
// Factor returns the scale value of the point.
func (t *tInt32) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tInt32) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Int64 represents the sunspec type int64.
type Int64 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v int64) error
// Get returns the point´s underlying value.
Get() int64
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tInt64 struct {
point
data int64
scale
}
var _ Int64 = (*tInt64)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tInt64) Valid() bool { return t.Get() != -0x8000000000000000 }
// String formats the point´s value as string.
func (t *tInt64) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tInt64) Quantity() uint16 { return 4 }
// encode puts the point´s value into a buffer.
func (t *tInt64) encode(buf []byte) error {
binary.BigEndian.PutUint64(buf, uint64(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tInt64) decode(buf []byte) error {
return t.Set(int64(binary.BigEndian.Uint64(buf)))
}
// Set sets the point´s underlying value.
func (t *tInt64) Set(v int64) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tInt64) Get() int64 { return t.data }
// Factor returns the scale value of the point.
func (t *tInt64) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tInt64) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Pad represents the sunspec type pad.
type Pad interface {
// Point defines the generic behavior all sunspec types have in common.
Point
}
type tPad struct {
point
}
var _ Pad = (*tPad)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tPad) Valid() bool { return false }
// String formats the point´s value as string.
func (t *tPad) String() string { return "" }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tPad) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tPad) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, 0x8000)
return nil
}
// decode sets the point´s value from a buffer.
func (t *tPad) decode(buf []byte) error { return nil }
// ****************************************************************************
// Sunssf represents the sunspec type sunssf.
type Sunssf interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Get returns the point´s underlying value.
Get() int16
}
type tSunssf struct {
point
data int16
}
var _ Sunssf = (*tSunssf)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tSunssf) Valid() bool { return t.Get() != -0x8000 }
// String formats the point´s value as string.
func (t *tSunssf) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tSunssf) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tSunssf) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, uint16(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tSunssf) decode(buf []byte) error {
return t.set(int16(binary.BigEndian.Uint16(buf)))
}
// set sets the point´s underlying value.
func (t *tSunssf) set(v int16) error {
if v != -0x8000 && (v < -10 || v > 10) {
return errors.New("sunspec: value out of boundary")
}
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tSunssf) Get() int16 { return t.data }
// ****************************************************************************
// Uint16 represents the sunspec type uint16.
type Uint16 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint16) error
// Get returns the point´s underlying value.
Get() uint16
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tUint16 struct {
point
data uint16
scale
}
var _ Uint16 = (*tUint16)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tUint16) Valid() bool { return t.Get() != 0xFFFF }
// String formats the point´s value as string.
func (t *tUint16) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tUint16) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tUint16) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tUint16) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint16(buf))
}
// Set sets the point´s underlying value.
func (t *tUint16) Set(v uint16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tUint16) Get() uint16 { return t.data }
// Factor returns the scale value of the point.
func (t *tUint16) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tUint16) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Uint32 represents the sunspec type uint32.
type Uint32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint32) error
// Get returns the point´s underlying value.
Get() uint32
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tUint32 struct {
point
data uint32
scale
}
var _ Uint32 = (*tUint32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tUint32) Valid() bool { return t.Get() != 0xFFFFFFFF }
// String formats the point´s value as string.
func (t *tUint32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tUint32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tUint32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tUint32) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint32(buf))
}
// Set sets the point´s underlying value.
func (t *tUint32) Set(v uint32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tUint32) Get() uint32 { return t.data }
// Factor returns the scale value of the point.
func (t *tUint32) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tUint32) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Uint64 represents the sunspec type uint64.
type Uint64 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint64) error
// Get returns the point´s underlying value.
Get() uint64
// Value returns the scaled value as defined by the specification.
Value() float64
}
type tUint64 struct {
point
data uint64
scale
}
var _ Uint64 = (*tUint64)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tUint64) Valid() bool { return t.Get() != 0xFFFFFFFFFFFFFFFF }
// String formats the point´s value as string.
func (t *tUint64) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tUint64) Quantity() uint16 { return 4 }
// encode puts the point´s value into a buffer.
func (t *tUint64) encode(buf []byte) error {
binary.BigEndian.PutUint64(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tUint64) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint64(buf))
}
// Set sets the point´s underlying value.
func (t *tUint64) Set(v uint64) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tUint64) Get() uint64 { return t.data }
// Factor returns the scale value of the point.
func (t *tUint64) Factor() int16 { return t.factor(t) }
// Value returns the scaled value as defined by the specification.
func (t *tUint64) Value() float64 { return float64(t.Get()) * math.Pow10(int(t.Factor())) }
// ****************************************************************************
// Acc16 represents the sunspec type acc16.
type Acc16 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint16) error
// Get returns the point´s underlying value.
Get() uint16
}
type tAcc16 struct {
point
data uint16
scale
}
var _ Acc16 = (*tAcc16)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tAcc16) Valid() bool { return t.Get() != 0 }
// String formats the point´s value as string.
func (t *tAcc16) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tAcc16) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tAcc16) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tAcc16) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint16(buf))
}
// Set sets the point´s underlying value.
func (t *tAcc16) Set(v uint16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tAcc16) Get() uint16 { return t.data }
// Factor returns the scale value of the point.
func (t *tAcc16) Factor() int16 { return t.factor(t) }
// ****************************************************************************
// Acc32 represents the sunspec type acc32.
type Acc32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint32) error
// Get returns the point´s underlying value.
Get() uint32
}
type tAcc32 struct {
point
data uint32
scale
}
var _ (Acc32) = (*tAcc32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tAcc32) Valid() bool { return t.Get() != 0 }
// String formats the point´s value as string.
func (t *tAcc32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tAcc32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tAcc32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tAcc32) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint32(buf))
}
// Set sets the point´s underlying value.
func (t *tAcc32) Set(v uint32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tAcc32) Get() uint32 { return t.data }
// Factor returns the scale value of the point.
func (t *tAcc32) Factor() int16 { return t.factor(t) }
// ****************************************************************************
// Acc64 represents the sunspec type acc64.
type Acc64 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Scalable defines the behavior of a point type which may be scaled using the definition.
Scalable
// Set sets the point´s underlying value.
Set(v uint64) error
// Get returns the point´s underlying value.
Get() uint64
}
type tAcc64 struct {
point
data uint64
scale
}
var _ Acc64 = (*tAcc64)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tAcc64) Valid() bool { return t.Get() != 0 }
// String formats the point´s value as string.
func (t *tAcc64) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tAcc64) Quantity() uint16 { return 4 }
// encode puts the point´s value into a buffer.
func (t *tAcc64) encode(buf []byte) error {
binary.BigEndian.PutUint64(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tAcc64) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint64(buf))
}
// Set sets the point´s underlying value.
func (t *tAcc64) Set(v uint64) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tAcc64) Get() uint64 { return t.data }
// Factor returns the scale value of the point.
func (t *tAcc64) Factor() int16 { return t.factor(t) }
// ****************************************************************************
// Count represents the sunspec type count.
type Count interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Get returns the point´s underlying value.
Get() uint16
}
type tCount struct {
point
data uint16
}
var _ Count = (*tCount)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tCount) Valid() bool { return t.Get() != 0 }
// String formats the point´s value as string.
func (t *tCount) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tCount) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tCount) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tCount) decode(buf []byte) error {
return t.set(binary.BigEndian.Uint16(buf))
}
// Set sets the point´s underlying value.
func (t *tCount) set(v uint16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tCount) Get() uint16 { return t.data }
// ****************************************************************************
// Bitfield16 represents the sunspec type bitfield16.
type Bitfield16 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v uint16) error
// Get returns the point´s underlying value.
Get() uint16
// Flip sets the bit at position pos, starting at 0, to the value of v.
Flip(pos int, v bool) error
// Field returns the individual bit values as bool array.
Field() [16]bool
// States returns all active enumerated states, correlating the bit value to its symbol.
States() []string
}
type tBitfield16 struct {
point
data uint16
symbols Symbols
}
var _ Bitfield16 = (*tBitfield16)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tBitfield16) Valid() bool { return t.Get() != 0xFFFF }
// String formats the point´s value as string.
func (t *tBitfield16) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tBitfield16) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tBitfield16) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tBitfield16) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint16(buf))
}
// Set sets the point´s underlying value.
func (t *tBitfield16) Set(v uint16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tBitfield16) Get() uint16 { return t.data }
// Flip sets the bit at position pos, starting at 0, to the value of v.
func (t *tBitfield16) Flip(pos int, v bool) error {
switch {
case pos < 0 || pos > 15:
return errors.New("sunspec: out of bounds bit position ")
case v:
return t.Set(t.Get() | (1 << pos))
}
return t.Set(t.Get() &^ (1 << pos))
}
// Field returns the individual bit values as bool array.
func (t *tBitfield16) Field() (f [16]bool) {
for v, b := t.Get(), 0; b < len(f); b++ {
f[b] = v&(1<<b) != 0
}
return f
}
// States returns all active enumerated states, correlating the bit value to its symbol.
func (t *tBitfield16) States() (s []string) {
if !t.Valid() {
return nil
}
for i, v := range t.Field() {
if v {
s = append(s, t.symbols[uint32(i)].Name())
}
}
return s
}
// ****************************************************************************
// Bitfield32 represents the sunspec type bitfield32.
type Bitfield32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v uint32) error
// Get returns the point´s underlying value.
Get() uint32
// Flip sets the bit at position pos, starting at 0, to the value of v.
Flip(pos int, v bool) error
// Field returns the individual bit values as bool array.
Field() [32]bool
// States returns all active enumerated states, correlating the bit value to its symbol.
States() []string
}
type tBitfield32 struct {
point
data uint32
symbols Symbols
}
var _ Bitfield32 = (*tBitfield32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tBitfield32) Valid() bool { return t.Get() != 0xFFFFFFFF }
// String formats the point´s value as string.
func (t *tBitfield32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tBitfield32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tBitfield32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tBitfield32) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint32(buf))
}
// Set sets the point´s underlying value.
func (t *tBitfield32) Set(v uint32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tBitfield32) Get() uint32 { return t.data }
// Flip sets the bit at position pos, starting at 0, to the value of v.
func (t *tBitfield32) Flip(pos int, v bool) error {
switch {
case pos < 0 || pos > 31:
return errors.New("sunspec: out of bounds bit position ")
case v:
return t.Set(t.Get() | (1 << pos))
}
return t.Set(t.Get() &^ (1 << pos))
}
// Field returns the individual bit values as bool array.
func (t *tBitfield32) Field() (f [32]bool) {
for v, b := t.Get(), 0; b < len(f); b++ {
f[b] = v&(1<<b) != 0
}
return f
}
// States returns all active enumerated states, correlating the bit value to its symbol.
func (t *tBitfield32) States() (s []string) {
if !t.Valid() {
return nil
}
for i, v := range t.Field() {
if v {
s = append(s, t.symbols[uint32(i)].Name())
}
}
return s
}
// ****************************************************************************
// Bitfield64 represents the sunspec type bitfield64.
type Bitfield64 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v uint64) error
// Get returns the point´s underlying value.
Get() uint64
// Flip sets the bit at position pos, starting at 0, to the value of v.
Flip(pos int, v bool) error
// Field returns the individual bit values as bool array.
Field() [64]bool
// States returns all active enumerated states, correlating the bit value to its symbol.
States() []string
}
type tBitfield64 struct {
point
data uint64
symbols Symbols
}
var _ Bitfield64 = (*tBitfield64)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tBitfield64) Valid() bool { return t.Get() != 0xFFFFFFFFFFFFFFFF }
// String formats the point´s value as string.
func (t *tBitfield64) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tBitfield64) Quantity() uint16 { return 4 }
// encode puts the point´s value into a buffer.
func (t *tBitfield64) encode(buf []byte) error {
binary.BigEndian.PutUint64(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tBitfield64) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint64(buf))
}
// Set sets the point´s underlying value.
func (t *tBitfield64) Set(v uint64) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tBitfield64) Get() uint64 { return t.data }
// Flip sets the bit at position pos, starting at 0, to the value of v.
func (t *tBitfield64) Flip(pos int, v bool) error {
switch {
case pos < 0 || pos > 63:
return errors.New("sunspec: out of bounds bit position ")
case v:
return t.Set(t.Get() | (1 << pos))
}
return t.Set(t.Get() &^ (1 << pos))
}
// Field returns the individual bit values as bool array.
func (t *tBitfield64) Field() (f [64]bool) {
for v, b := t.Get(), 0; b < len(f); b++ {
f[b] = v&(1<<b) != 0
}
return f
}
// States returns all active enumerated states, correlating the bit value to its symbol.
func (t *tBitfield64) States() (s []string) {
if !t.Valid() {
return nil
}
for i, v := range t.Field() {
if v {
s = append(s, t.symbols[uint32(i)].Name())
}
}
return s
}
// ****************************************************************************
// Enum16 represents the sunspec type enum16.
type Enum16 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v uint16) error
// Get returns the point´s underlying value.
Get() uint16
// State returns the currently active enumerated state.
State() string
}
type tEnum16 struct {
point
data uint16
symbols Symbols
}
var _ Enum16 = (*tEnum16)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tEnum16) Valid() bool { return t.Get() != 0xFFFF }
// String formats the point´s value as string.
func (t *tEnum16) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tEnum16) Quantity() uint16 { return 1 }
// encode puts the point´s value into a buffer.
func (t *tEnum16) encode(buf []byte) error {
binary.BigEndian.PutUint16(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tEnum16) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint16(buf))
}
// Set sets the point´s underlying value.
func (t *tEnum16) Set(v uint16) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tEnum16) Get() uint16 { return t.data }
// State returns the currently active enumerated state.
func (t *tEnum16) State() string { return t.symbols[uint32(t.Get())].Name() }
// ****************************************************************************
// Enum32 represents the sunspec type enum32.
type Enum32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v uint32) error
// Get returns the point´s underlying value.
Get() uint32
// State returns the currently active enumerated state.
State() string
}
type tEnum32 struct {
point
data uint32
symbols Symbols
}
var _ Enum32 = (*tEnum32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tEnum32) Valid() bool { return t.Get() != 0xFFFFFFFF }
// String formats the point´s value as string.
func (t *tEnum32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tEnum32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tEnum32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tEnum32) decode(buf []byte) error {
return t.Set(binary.BigEndian.Uint32(buf))
}
// Set sets the point´s underlying value.
func (t *tEnum32) Set(v uint32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tEnum32) Get() uint32 { return t.data }
// State returns the currently active enumerated state.
func (t *tEnum32) State() string { return t.symbols[t.Get()].Name() }
// ****************************************************************************
// String represents the sunspec type string.
type String interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v string) error
// Get returns the point´s underlying value.
Get() string
}
type tString struct {
point
data []byte
}
var _ String = (*tString)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tString) Valid() bool { return t.Get() != "" }
// String formats the point´s value as string.
func (t *tString) String() string { return t.Get() }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tString) Quantity() uint16 { return uint16(cap(t.data) / 2) }
// encode puts the point´s value into a buffer.
func (t *tString) encode(buf []byte) error {
copy(buf, []byte(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tString) decode(buf []byte) error {
return t.Set(string(buf[:2*t.Quantity()]))
}
// Set sets the point´s underlying value.
func (t *tString) Set(v string) error {
t.data = t.data[:cap(t.data)]
copy(t.data, v)
return nil
}
// Get returns the point´s underlying value.
func (t *tString) Get() string { return string(t.data) }
// ****************************************************************************
// Float32 represents the sunspec type float32.
type Float32 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v float32) error
// Get returns the point´s underlying value.
Get() float32
}
type tFloat32 struct {
point
data float32
}
var _ Float32 = (*tFloat32)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tFloat32) Valid() bool { return t.Get() != 0x7FC00000 }
// String formats the point´s value as string.
func (t *tFloat32) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tFloat32) Quantity() uint16 { return 2 }
// encode puts the point´s value into a buffer.
func (t *tFloat32) encode(buf []byte) error {
binary.BigEndian.PutUint32(buf, math.Float32bits(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tFloat32) decode(buf []byte) error {
return t.Set(math.Float32frombits(binary.BigEndian.Uint32(buf)))
}
// Set sets the point´s underlying value.
func (t *tFloat32) Set(v float32) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tFloat32) Get() float32 { return t.data }
// ****************************************************************************
// Float64 represents the sunspec type float64.
type Float64 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v float64) error
// Get returns the point´s underlying value.
Get() float64
}
type tFloat64 struct {
point
data float64
}
var _ Float64 = (*tFloat64)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tFloat64) Valid() bool { return t.Get() != 0x7FF8000000000000 }
// String formats the point´s value as string.
func (t *tFloat64) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tFloat64) Quantity() uint16 { return 4 }
// encode puts the point´s value into a buffer.
func (t *tFloat64) encode(buf []byte) error {
binary.BigEndian.PutUint64(buf, math.Float64bits(t.Get()))
return nil
}
// decode sets the point´s value from a buffer.
func (t *tFloat64) decode(buf []byte) error {
return t.Set(math.Float64frombits(binary.BigEndian.Uint64(buf)))
}
// Set sets the point´s underlying value.
func (t *tFloat64) Set(v float64) error {
t.data = v
return nil
}
// Get returns the point´s underlying value.
func (t *tFloat64) Get() float64 { return t.data }
// ****************************************************************************
// Ipaddr represents the sunspec type ipaddr.
type Ipaddr interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v net.IP) error
// Get returns the point´s underlying value.
Get() net.IP
// Raw returns the point´s raw data.
Raw() [4]byte
}
type tIpaddr struct {
point
data [4]byte
}
var _ Ipaddr = (*tIpaddr)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tIpaddr) Valid() bool { return t.data != [4]byte{} }
// String formats the point´s value as string.
func (t *tIpaddr) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tIpaddr) Quantity() uint16 { return uint16(len(t.data) / 2) }
// encode puts the point´s value into a buffer.
func (t *tIpaddr) encode(buf []byte) error {
copy(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tIpaddr) decode(buf []byte) error {
return t.Set(buf)
}
// Set sets the point´s underlying value.
func (t *tIpaddr) Set(v net.IP) error {
copy(t.data[:len(t.data)], v)
return nil
}
// Get returns the point´s underlying value.
func (t *tIpaddr) Get() net.IP { return append(net.IP(nil), t.data[:]...) }
// Raw returns the point´s raw data.
func (t *tIpaddr) Raw() (r [4]byte) {
copy(r[:], t.Get())
return r
}
// ****************************************************************************
// Ipaddr represents the sunspec type ipaddr.
type Ipv6addr interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v net.IP) error
// Get returns the point´s underlying value.
Get() net.IP
// Raw returns the point´s raw data.
Raw() [16]byte
}
type tIpv6addr struct {
point
data [16]byte
}
var _ Ipv6addr = (*tIpv6addr)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tIpv6addr) Valid() bool { return t.data != [16]byte{} }
// String formats the point´s value as string.
func (t *tIpv6addr) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tIpv6addr) Quantity() uint16 { return uint16(len(t.data) / 2) }
// encode puts the point´s value into a buffer.
func (t *tIpv6addr) encode(buf []byte) error {
copy(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tIpv6addr) decode(buf []byte) error {
return t.Set(buf)
}
// Set sets the point´s underlying value.
func (t *tIpv6addr) Set(v net.IP) error {
copy(t.data[:len(t.data)], v)
return nil
}
// Get returns the point´s underlying value.
func (t *tIpv6addr) Get() net.IP { return append(net.IP(nil), t.data[:]...) }
// Raw returns the point´s raw data.
func (t *tIpv6addr) Raw() (r [16]byte) {
copy(r[:], t.Get())
return r
}
// ****************************************************************************
// Eui48 represents the sunspec type eui48.
type Eui48 interface {
// Point defines the generic behavior all sunspec types have in common.
Point
// Set sets the point´s underlying value.
Set(v net.HardwareAddr) error
// Get returns the point´s underlying value.
Get() net.HardwareAddr
// Raw returns the point´s raw data.
Raw() [8]byte
}
type tEui48 struct {
point
data [8]byte
}
var _ Eui48 = (*tEui48)(nil)
// Valid specifies whether the underlying value is implemented by the device.
func (t *tEui48) Valid() bool { return true } //?
// String formats the point´s value as string.
func (t *tEui48) String() string { return fmt.Sprintf("%v", t.Get()) }
// Quantity returns the number of modbus registers required to store the underlying value.
func (t *tEui48) Quantity() uint16 { return uint16(len(t.data) / 2) }
// encode puts the point´s value into a buffer.
func (t *tEui48) encode(buf []byte) error {
copy(buf, t.Get())
return nil
}
// decode sets the point´s value from a buffer.
func (t *tEui48) decode(buf []byte) error {
return t.Set(buf)
}
// Set sets the point´s underlying value.
func (t *tEui48) Set(v net.HardwareAddr) error {
copy(t.data[:len(t.data)], v)
return nil
}
// Get returns the point´s underlying value.
func (t *tEui48) Get() net.HardwareAddr { return append(net.HardwareAddr(nil), t.data[:]...) }
// Raw returns the point´s raw data.
func (t *tEui48) Raw() (r [8]byte) {
copy(r[:], t.Get())
return r
} | types.go | 0.904091 | 0.502991 | types.go | starcoder |
package merkletree2
import (
"math/big"
)
// Position represents the position of a node in the tree. When converted to
// bytes, a Position can be interpreted as a 1 followed (from left to right) by
// a sequence of log2(Config.childrenPerNode)-bit symbols, where each such
// symbol identifies which child to descend to in a path from the root to a
// node. The sequence is padded with 0s on the left to the nearest byte. For
// example, in a binary tree the root has position 0x01 (i.e. 0b00000001), and
// the second child of the first child of the root has position 0x05
// (0b00000101).
type Position big.Int
func (t *Tree) getRootPosition() *Position {
return (*Position)(big.NewInt(1))
}
func (t *Tree) getChild(p *Position, c ChildIndex) *Position {
var q big.Int
q.Lsh((*big.Int)(p), uint(t.cfg.bitsPerIndex))
q.Or(&q, big.NewInt(int64(c)))
return (*Position)(&q)
}
func (p *Position) getBytes() []byte {
return (*big.Int)(p).Bytes()
}
func (t *Tree) isPositionOnPathToKey(p *Position, k Key) bool {
// If the Key is shorter than current prefix
if len(k)*8 < (*big.Int)(p).BitLen()-1 {
return false
}
var q big.Int
q.SetBytes([]byte(k))
q.SetBit(&q, len(k)*8, 1)
q.Rsh(&q, uint(q.BitLen()-(*big.Int)(p).BitLen()))
return (*big.Int)(p).Cmp(&q) == 0
}
func (p *Position) equals(q *Position) bool {
return (*big.Int)(p).Cmp((*big.Int)(q)) == 0
}
// getParent return nil if the p is the root
func (t *Tree) getParent(p *Position) *Position {
if (*big.Int)(p).BitLen() < 2 {
return nil
}
var f big.Int
f.Rsh((*big.Int)(p), uint(t.cfg.bitsPerIndex))
return (*Position)(&f)
}
// getAllSiblings returns nil,nil if p is the root
func (t *Tree) getAllSiblings(p *Position) (siblings []Position, parent *Position) {
parent = t.getParent(p)
if parent == nil {
return nil, nil
}
siblings = make([]Position, t.cfg.childrenPerNode-1)
var child0 big.Int
child0.Lsh((*big.Int)(parent), uint(t.cfg.bitsPerIndex))
var buff big.Int
pChildIndex := buff.Xor(&child0, (*big.Int)(p)).Int64()
for i, j := int64(0), int64(0); j < int64(t.cfg.childrenPerNode); j++ {
if j == pChildIndex {
continue
}
(*big.Int)(&siblings[i]).Or(&child0, big.NewInt(j))
i++
}
return siblings, parent
}
// getDeepestPositionForKey converts the key into the position the key would be
// stored at if the tree was full with only one key per leaf.
func (t *Tree) getDeepestPositionForKey(k Key) (*Position, error) {
if len(k) != t.cfg.keysByteLength {
return nil, NewInvalidKeyError()
}
var p Position
(*big.Int)(&p).SetBytes(k)
(*big.Int)(&p).SetBit((*big.Int)(&p), len(k)*8, 1)
return &p, nil
}
func (t *Tree) getSiblingPositionsOnPathToKey(k Key) ([][]Position, error) {
p, err := t.getDeepestPositionForKey(k)
if err != nil {
return nil, err
}
maxPathLength := ((*big.Int)(p).BitLen() - 1) / int(t.cfg.bitsPerIndex)
positions := make([][]Position, maxPathLength)
root := t.getRootPosition()
for i := 0; !p.equals(root); {
positions[i], p = t.getAllSiblings(p)
i++
}
return positions, nil
} | go/merkletree2/position.go | 0.763924 | 0.703912 | position.go | starcoder |
package set
import (
"context"
"fmt"
"math"
"github.com/pbanos/botanic/feature"
)
const (
sampleCountThresholdForSetImplementation = 1000
)
/*
Set represents a collection of samples.
Its Entropy method returns the entropy of the set for a given Feature: a
measure of the disinformation we have on the classes of samples that belong to
it.
Its Classes method returns the set of uniq classes of samples belonging to the
set.
Its SubsetWith method takes a feature.Criterion and returns a subset that only
contains samples that satisfy it.
Its Samples method returns the samples it contains
*/
type Set interface {
Entropy(context.Context, feature.Feature) (float64, error)
SubsetWith(context.Context, feature.Criterion) (Set, error)
FeatureValues(context.Context, feature.Feature) ([]interface{}, error)
CountFeatureValues(context.Context, feature.Feature) (map[string]int, error)
Samples(context.Context) ([]Sample, error)
Count(context.Context) (int, error)
}
type memoryIntensiveSubsettingSet struct {
entropy *float64
samples []Sample
}
type cpuIntensiveSubsettingSet struct {
entropy *float64
count *int
samples []Sample
criteria []feature.Criterion
}
/*
New takes a slice of samples and returns a set built with them.
The set will be a CPU intensive one when the number of samples is
over sampleCountThresholdForSetImplementation
*/
func New(samples []Sample) Set {
if len(samples) > sampleCountThresholdForSetImplementation {
return &cpuIntensiveSubsettingSet{nil, nil, samples, []feature.Criterion{}}
}
return &memoryIntensiveSubsettingSet{nil, samples}
}
/*
NewMemoryIntensive takes a slice of samples and returns a Set
built with them. A memory-intensive set is an implementation that
replicates the slice of samples when subsetting to reduce
calculations at the cost of increased memory.
*/
func NewMemoryIntensive(samples []Sample) Set {
return &memoryIntensiveSubsettingSet{nil, samples}
}
/*
NewCPUIntensive takes a slice of samples and returns a Set
built with them. A cpu-intensive set is an implementation that
instead of replicating the samples when subsetting, stores the
applying feature criteria to define the subset and keeps the same
sample slice. This can achieve a drastic reduction in memory use
that comes at the cost of CPU time: every calculation that goes over
the samples of the set will apply the feature criteria of the set
on all original samples (the ones provided to this method).
*/
func NewCPUIntensive(samples []Sample) Set {
return &cpuIntensiveSubsettingSet{nil, nil, samples, []feature.Criterion{}}
}
func (s *memoryIntensiveSubsettingSet) Count(ctx context.Context) (int, error) {
return len(s.samples), nil
}
func (s *cpuIntensiveSubsettingSet) Count(ctx context.Context) (int, error) {
if s.count != nil {
return *s.count, nil
}
var length int
s.iterateOnSet(func(_ Sample) (bool, error) {
length++
return true, nil
})
s.count = &length
return length, nil
}
func (s *memoryIntensiveSubsettingSet) Entropy(ctx context.Context, f feature.Feature) (float64, error) {
if s.entropy != nil {
return *s.entropy, nil
}
var result float64
featureValueCounts := make(map[string]float64)
count := 0.0
for _, sample := range s.samples {
v, err := sample.ValueFor(f)
if err != nil {
return result, err
}
if v != nil {
vString, ok := v.(string)
if !ok {
vString = fmt.Sprintf("%v", v)
}
count += 1.0
featureValueCounts[vString] += 1.0
}
}
for _, v := range featureValueCounts {
probValue := v / count
result -= probValue * math.Log(probValue)
}
s.entropy = &result
return result, nil
}
func (s *cpuIntensiveSubsettingSet) Entropy(ctx context.Context, f feature.Feature) (float64, error) {
if s.entropy != nil {
return *s.entropy, nil
}
var result float64
featureValueCounts := make(map[string]float64)
count := 0.0
err := s.iterateOnSet(func(sample Sample) (bool, error) {
v, err := sample.ValueFor(f)
if err != nil {
return false, err
}
if v != nil {
vString, ok := v.(string)
if !ok {
vString = fmt.Sprintf("%v", v)
}
count += 1.0
featureValueCounts[vString] += 1.0
}
return true, nil
})
if err != nil {
return result, err
}
for _, v := range featureValueCounts {
probValue := v / count
result -= probValue * math.Log(probValue)
}
s.entropy = &result
return result, nil
}
func (s *memoryIntensiveSubsettingSet) FeatureValues(ctx context.Context, f feature.Feature) ([]interface{}, error) {
result := []interface{}{}
encountered := make(map[string]bool)
for _, sample := range s.samples {
v, err := sample.ValueFor(f)
if err != nil {
return nil, err
}
vString := fmt.Sprintf("%v", v)
if !encountered[vString] {
encountered[vString] = true
result = append(result, v)
}
}
return result, nil
}
func (s *cpuIntensiveSubsettingSet) FeatureValues(ctx context.Context, f feature.Feature) ([]interface{}, error) {
result := []interface{}{}
encountered := make(map[string]bool)
err := s.iterateOnSet(func(sample Sample) (bool, error) {
v, err := sample.ValueFor(f)
if err != nil {
return false, err
}
vString := fmt.Sprintf("%v", v)
if !encountered[vString] {
encountered[vString] = true
result = append(result, v)
}
return true, nil
})
if err != nil {
return nil, err
}
return result, nil
}
func (s *memoryIntensiveSubsettingSet) SubsetWith(ctx context.Context, fc feature.Criterion) (Set, error) {
var samples []Sample
for _, sample := range s.samples {
ok, err := fc.SatisfiedBy(sample)
if err != nil {
return nil, err
}
if ok {
samples = append(samples, sample)
}
}
return &memoryIntensiveSubsettingSet{nil, samples}, nil
}
func (s *cpuIntensiveSubsettingSet) SubsetWith(ctx context.Context, fc feature.Criterion) (Set, error) {
criteria := append([]feature.Criterion{fc}, s.criteria...)
return &cpuIntensiveSubsettingSet{nil, nil, s.samples, criteria}, nil
}
func (s *memoryIntensiveSubsettingSet) Samples(ctx context.Context) ([]Sample, error) {
return s.samples, nil
}
func (s *cpuIntensiveSubsettingSet) Samples(ctx context.Context) ([]Sample, error) {
var samples []Sample
err := s.iterateOnSet(func(sample Sample) (bool, error) {
samples = append(samples, sample)
return true, nil
})
if err != nil {
return nil, err
}
return samples, nil
}
func (s *memoryIntensiveSubsettingSet) CountFeatureValues(ctx context.Context, f feature.Feature) (map[string]int, error) {
result := make(map[string]int)
for _, sample := range s.samples {
v, err := sample.ValueFor(f)
if err != nil {
return nil, err
}
vString := fmt.Sprintf("%v", v)
result[vString]++
}
return result, nil
}
func (s *cpuIntensiveSubsettingSet) CountFeatureValues(ctx context.Context, f feature.Feature) (map[string]int, error) {
result := make(map[string]int)
err := s.iterateOnSet(func(sample Sample) (bool, error) {
v, err := sample.ValueFor(f)
if err != nil {
return false, err
}
vString := fmt.Sprintf("%v", v)
result[vString]++
return true, nil
})
if err != nil {
return nil, err
}
return result, nil
}
func (s *cpuIntensiveSubsettingSet) iterateOnSet(lambda func(Sample) (bool, error)) error {
for _, sample := range s.samples {
skip := false
for _, criterion := range s.criteria {
ok, err := criterion.SatisfiedBy(sample)
if err != nil {
return err
}
if !ok {
skip = true
break
}
}
if !skip {
ok, err := lambda(sample)
if err != nil {
return err
}
if !ok {
break
}
}
}
return nil
} | set/set.go | 0.703855 | 0.503479 | set.go | starcoder |
package query
import (
"fmt"
"math/big"
"strings"
"time"
"github.com/hyperledger/burrow/logging/errors"
)
const (
// DateLayout defines a layout for all dates (`DATE date`)
DateLayout = "2006-01-02"
// TimeLayout defines a layout for all times (`TIME time`)
TimeLayout = time.RFC3339
)
// Operator is an operator that defines some kind of relation between tag and
// operand (equality, etc.).
type Operator uint8
const (
OpTerminal Operator = iota
OpAnd
OpOr
OpLessEqual
OpGreaterEqual
OpLess
OpGreater
OpEqual
OpContains
OpNotEqual
OpNot
)
var opNames = map[Operator]string{
OpAnd: "AND",
OpOr: "OR",
OpLessEqual: "<=",
OpGreaterEqual: ">=",
OpLess: "<",
OpGreater: ">",
OpEqual: "=",
OpContains: "CONTAINS",
OpNotEqual: "!=",
OpNot: "Not",
}
func (op Operator) String() string {
return opNames[op]
}
func (op Operator) Arity() int {
if op == OpNot {
return 1
}
return 2
}
// Instruction is a container suitable for the code tape and the stack to hold values an operations
type instruction struct {
op Operator
tag *string
string *string
time *time.Time
number *big.Float
match bool
}
func (in *instruction) String() string {
switch {
case in.op != OpTerminal:
return in.op.String()
case in.tag != nil:
return *in.tag
case in.string != nil:
return "'" + *in.string + "'"
case in.time != nil:
return in.time.String()
case in.number != nil:
return in.number.String()
default:
if in.match {
return "true"
}
return "false"
}
}
// A Boolean expression for the query grammar
type Expression struct {
// This is our 'bytecode'
code []*instruction
errors errors.MultipleErrors
explainer func(format string, args ...interface{})
}
// Evaluate expects an Execute() to have filled the code of the Expression so it can be run in the little stack machine
// below
func (e *Expression) Evaluate(getTagValue func(tag string) (interface{}, bool)) (bool, error) {
if len(e.errors) > 0 {
return false, e.errors
}
var left, right *instruction
stack := make([]*instruction, 0, len(e.code))
var err error
for _, in := range e.code {
if in.op == OpTerminal {
// just push terminals on to the stack
stack = append(stack, in)
continue
}
stack, left, right, err = pop(stack, in.op)
if err != nil {
return false, fmt.Errorf("cannot process instruction %v in expression [%v]: %w", in, e, err)
}
ins := &instruction{}
switch in.op {
case OpNot:
ins.match = !right.match
case OpAnd:
ins.match = left.match && right.match
case OpOr:
ins.match = left.match || right.match
default:
// We have a a non-terminal, non-connective operation
tagValue, ok := getTagValue(*left.tag)
// No match if we can't get tag value
if ok {
switch {
case right.string != nil:
ins.match = compareString(in.op, tagValue, *right.string)
case right.number != nil:
ins.match = compareNumber(in.op, tagValue, right.number)
case right.time != nil:
ins.match = compareTime(in.op, tagValue, *right.time)
}
}
// Uncomment this for a little bit of debug:
//e.explainf("%v := %v\n", left, tagValue)
}
// Uncomment this for a little bit of debug:
//e.explainf("%v %v %v => %v\n", left, in.op, right, ins.match)
// Push whether this was a match back on to stack
stack = append(stack, ins)
}
if len(stack) != 1 {
return false, fmt.Errorf("stack for query expression [%v] should have exactly one element after "+
"evaulation but has %d", e, len(stack))
}
return stack[0].match, nil
}
func (e *Expression) explainf(fmt string, args ...interface{}) {
if e.explainer != nil {
e.explainer(fmt, args...)
}
}
func pop(stack []*instruction, op Operator) ([]*instruction, *instruction, *instruction, error) {
arity := op.Arity()
if len(stack) < arity {
return stack, nil, nil, fmt.Errorf("cannot pop arguments for arity %d operator %v from stack "+
"because stack has fewer than %d elements", arity, op, arity)
}
if arity == 1 {
return stack[:len(stack)-1], nil, stack[len(stack)-1], nil
}
return stack[:len(stack)-2], stack[len(stack)-2], stack[len(stack)-1], nil
}
func compareString(op Operator, tagValue interface{}, value string) bool {
tagString := StringFromValue(tagValue)
switch op {
case OpContains:
return strings.Contains(tagString, value)
case OpEqual:
return tagString == value
case OpNotEqual:
return tagString != value
}
return false
}
func compareNumber(op Operator, tagValue interface{}, value *big.Float) bool {
tagNumber := new(big.Float)
switch n := tagValue.(type) {
case string:
f, _, err := big.ParseFloat(n, 10, 64, big.ToNearestEven)
if err != nil {
return false
}
tagNumber.Set(f)
case *big.Float:
tagNumber.Set(n)
case *big.Int:
tagNumber.SetInt(n)
case float32:
tagNumber.SetFloat64(float64(n))
case float64:
tagNumber.SetFloat64(n)
case int:
tagNumber.SetInt64(int64(n))
case int32:
tagNumber.SetInt64(int64(n))
case int64:
tagNumber.SetInt64(n)
case uint:
tagNumber.SetUint64(uint64(n))
case uint32:
tagNumber.SetUint64(uint64(n))
case uint64:
tagNumber.SetUint64(n)
default:
return false
}
cmp := tagNumber.Cmp(value)
switch op {
case OpLessEqual:
return cmp < 1
case OpGreaterEqual:
return cmp > -1
case OpLess:
return cmp == -1
case OpGreater:
return cmp == 1
case OpEqual:
return cmp == 0
case OpNotEqual:
return cmp != 0
}
return false
}
func compareTime(op Operator, tagValue interface{}, value time.Time) bool {
var tagTime time.Time
var err error
switch t := tagValue.(type) {
case time.Time:
tagTime = t
case int64:
// Hmmm, should we?
tagTime = time.Unix(t, 0)
case string:
tagTime, err = time.Parse(TimeLayout, t)
if err != nil {
tagTime, err = time.Parse(DateLayout, t)
if err != nil {
return false
}
}
default:
return false
}
switch op {
case OpLessEqual:
return tagTime.Before(value) || tagTime.Equal(value)
case OpGreaterEqual:
return tagTime.Equal(value) || tagTime.After(value)
case OpLess:
return tagTime.Before(value)
case OpGreater:
return tagTime.After(value)
case OpEqual:
return tagTime.Equal(value)
case OpNotEqual:
return !tagTime.Equal(value)
}
return false
}
// These methods implement the various visitors that are called in the PEG grammar with statements like
// { p.Operator(OpEqual) }
func (e *Expression) String() string {
strs := make([]string, len(e.code))
for i, in := range e.code {
strs[i] = in.String()
}
return strings.Join(strs, ", ")
}
func (e *Expression) Operator(operator Operator) {
e.code = append(e.code, &instruction{
op: operator,
})
}
// Terminals...
func (e *Expression) Tag(value string) {
e.code = append(e.code, &instruction{
tag: &value,
})
}
func (e *Expression) Time(value string) {
t, err := time.Parse(TimeLayout, value)
e.pushErr(err)
e.code = append(e.code, &instruction{
time: &t,
})
}
func (e *Expression) Date(value string) {
date, err := time.Parse(DateLayout, value)
e.pushErr(err)
e.code = append(e.code, &instruction{
time: &date,
})
}
func (e *Expression) Number(value string) {
number, _, err := big.ParseFloat(value, 10, 64, big.ToNearestEven)
e.pushErr(err)
e.code = append(e.code, &instruction{
number: number,
})
}
func (e *Expression) Value(value string) {
e.code = append(e.code, &instruction{
string: &value,
})
}
func (e *Expression) pushErr(err error) {
if err != nil {
e.errors = append(e.errors, err)
}
} | event/query/expression.go | 0.694717 | 0.414543 | expression.go | starcoder |
package geometry
import (
"math"
"github.com/tab58/v1/spatial/pkg/numeric"
"gonum.org/v1/gonum/blas/blas64"
)
// Matrix3D is a row-major representation of a 3x3 matrix.
type Matrix3D struct {
elements [9]float64
}
// Rows returns the number of rows in the matrix.
func (m *Matrix3D) Rows() uint { return 3 }
// Cols returns the number of columns in the matrix.
func (m *Matrix3D) Cols() uint { return 3 }
// Clone returns a deep copy of the matrix.
func (m *Matrix3D) Clone() *Matrix3D {
a := m.elements
tmp := [9]float64{a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], a[8]}
return &Matrix3D{
elements: tmp,
}
}
// Copy copies the elements of the matrix to this one.
func (m *Matrix3D) Copy(mat *Matrix3D) {
a := mat.elements
m.elements[0] = a[0]
m.elements[1] = a[1]
m.elements[2] = a[2]
m.elements[3] = a[3]
m.elements[4] = a[4]
m.elements[5] = a[5]
m.elements[6] = a[6]
m.elements[7] = a[7]
m.elements[8] = a[8]
}
// Identity sets the matrix to the identity matrix.
func (m *Matrix3D) Identity() {
// ignoring error since all elements will not overflow
m.SetElements(1, 0, 0, 0, 1, 0, 0, 0, 1)
}
// Scale multiplies the elements of the matrix by the given scalar.
func (m *Matrix3D) Scale(z float64) error {
out := [9]float64{}
for i, v := range m.elements {
val := v * z
if numeric.IsOverflow(val) {
return numeric.ErrOverflow
}
out[i] = val
}
m.elements = out
return nil
}
// ElementAt returns the value of the element at the given indices.
func (m *Matrix3D) ElementAt(i, j uint) (float64, error) {
cols := m.Cols()
if i <= m.Rows() || j <= cols {
return 0, numeric.ErrMatrixOutOfRange
}
return m.elements[i*cols+j], nil
}
// SetElements sets the elements in the matrix.
func (m *Matrix3D) SetElements(m00, m01, m02, m10, m11, m12, m20, m21, m22 float64) error {
if numeric.AreAnyOverflow(m00, m01, m02, m10, m11, m12, m20, m21, m22) {
return numeric.ErrOverflow
}
m.elements[0] = m00
m.elements[1] = m01
m.elements[2] = m02
m.elements[3] = m10
m.elements[4] = m11
m.elements[5] = m12
m.elements[6] = m20
m.elements[7] = m21
m.elements[8] = m22
return nil
}
// Elements clones the elements of the matrix and returns them.
func (m *Matrix3D) Elements() [9]float64 {
tmp := [9]float64{0, 0, 0, 0, 0, 0, 0, 0, 0}
for i, v := range m.elements {
tmp[i] = v
}
return tmp
}
// ToBlas64General returns a blas64.General with the same values as the matrix.
func (m *Matrix3D) ToBlas64General() blas64.General {
data := make([]float64, len(m.elements))
copy(data, m.elements[:])
return blas64.General{
Rows: int(m.Rows()),
Cols: int(m.Cols()),
Stride: int(m.Cols()),
Data: data,
}
}
// SetElementAt sets the value of the element at the given indices.
func (m *Matrix3D) SetElementAt(i, j uint, value float64) error {
cols := m.Cols()
if i <= m.Rows() || j <= cols {
return numeric.ErrMatrixOutOfRange
}
m.elements[i*cols+j] = value
return nil
}
// Add adds the elements of the given matrix to the elements of this matrix.
func (m *Matrix3D) Add(mat *Matrix3D) error {
tmp := [9]float64{
m.elements[0] + mat.elements[0],
m.elements[1] + mat.elements[1],
m.elements[2] + mat.elements[2],
m.elements[3] + mat.elements[3],
m.elements[4] + mat.elements[4],
m.elements[5] + mat.elements[5],
m.elements[6] + mat.elements[6],
m.elements[7] + mat.elements[7],
m.elements[8] + mat.elements[8],
}
if numeric.AreAnyOverflow(tmp[:]...) {
return numeric.ErrOverflow
}
m.elements = tmp
return nil
}
// Sub subtracts the elements of the given matrix from the elements of this matrix.
func (m *Matrix3D) Sub(mat *Matrix3D) error {
tmp := [9]float64{
m.elements[0] - mat.elements[0],
m.elements[1] - mat.elements[1],
m.elements[2] - mat.elements[2],
m.elements[3] - mat.elements[3],
m.elements[4] - mat.elements[4],
m.elements[5] - mat.elements[5],
m.elements[6] - mat.elements[6],
m.elements[7] - mat.elements[7],
m.elements[8] - mat.elements[8],
}
if numeric.AreAnyOverflow(tmp[:]...) {
return numeric.ErrOverflow
}
m.elements = tmp
return nil
}
func multiply3DMatrices(a, b [9]float64) ([9]float64, error) {
a00, a01, a02 := a[0], a[1], a[2]
a10, a11, a12 := a[3], a[4], a[5]
a20, a21, a22 := a[6], a[7], a[8]
b00, b01, b02 := b[0], b[1], b[2]
b10, b11, b12 := b[3], b[4], b[5]
b20, b21, b22 := b[6], b[7], b[8]
out := [9]float64{0, 0, 0, 0, 0, 0, 0, 0, 0}
out[0] = b00*a00 + b01*a10 + b02*a20
out[1] = b00*a01 + b01*a11 + b02*a21
out[2] = b00*a02 + b01*a12 + b02*a22
out[3] = b10*a00 + b11*a10 + b12*a20
out[4] = b10*a01 + b11*a11 + b12*a21
out[5] = b10*a02 + b11*a12 + b12*a22
out[6] = b20*a00 + b21*a10 + b22*a20
out[7] = b20*a01 + b21*a11 + b22*a21
out[8] = b20*a02 + b21*a12 + b22*a22
if numeric.AreAnyOverflow(out[:]...) {
return [9]float64{}, numeric.ErrOverflow
}
return out, nil
}
// Premultiply left-multiplies the given matrix with this one.
func (m *Matrix3D) Premultiply(mat *Matrix3D) error {
res, err := multiply3DMatrices(mat.elements, m.elements)
if err != nil {
return err
}
m.elements = res
return nil
}
// Postmultiply right-multiplies the given matrix with this one.
func (m *Matrix3D) Postmultiply(mat *Matrix3D) error {
res, err := multiply3DMatrices(m.elements, mat.elements)
if err != nil {
return err
}
m.elements = res
return nil
}
// Invert inverts this matrix in-place.
func (m *Matrix3D) Invert() error {
a := m.elements
a00, a01, a02 := a[0], a[1], a[2]
a10, a11, a12 := a[3], a[4], a[5]
a20, a21, a22 := a[6], a[7], a[8]
b01 := a22*a11 - a12*a21
b11 := -a22*a10 + a12*a20
b21 := a21*a10 - a11*a20
// Calculate the determinant
det := a00*b01 + a01*b11 + a02*b21
if math.Abs(det) < 1e-13 {
return numeric.ErrSingularMatrix
}
det = 1.0 / det
out := [9]float64{}
out[0] = b01 * det
out[1] = (-a22*a01 + a02*a21) * det
out[2] = (a12*a01 - a02*a11) * det
out[3] = b11 * det
out[4] = (a22*a00 - a02*a20) * det
out[5] = (-a12*a00 + a02*a10) * det
out[6] = b21 * det
out[7] = (-a21*a00 + a01*a20) * det
out[8] = (a11*a00 - a01*a10) * det
m.elements = out
return nil
}
// Determinant calculates the determinant of the matrix.
func (m *Matrix3D) Determinant() float64 {
a := m.elements
a00, a01, a02 := a[0], a[1], a[2]
a10, a11, a12 := a[3], a[4], a[5]
a20, a21, a22 := a[6], a[7], a[8]
res := a00*(a22*a11-a12*a21) +
a01*(-a22*a10+a12*a20) +
a02*(a21*a10-a11*a20)
return res
}
// Adjoint calculates the adjoint/adjugate matrix.
func (m *Matrix3D) Adjoint() *Matrix3D {
a := m.elements
a00, a01, a02 := a[0], a[1], a[2]
a10, a11, a12 := a[3], a[4], a[5]
a20, a21, a22 := a[6], a[7], a[8]
out := [9]float64{}
out[0] = a11*a22 - a12*a21
out[1] = a02*a21 - a01*a22
out[2] = a01*a12 - a02*a11
out[3] = a12*a20 - a10*a22
out[4] = a00*a22 - a02*a20
out[5] = a02*a10 - a00*a12
out[6] = a10*a21 - a11*a20
out[7] = a01*a20 - a00*a21
out[8] = a00*a11 - a01*a10
return &Matrix3D{
elements: out,
}
}
// Transpose transposes the matrix in-place.
func (m *Matrix3D) Transpose() {
a01 := m.elements[1]
a02 := m.elements[2]
a12 := m.elements[5]
m.elements[1] = m.elements[3]
m.elements[2] = m.elements[6]
m.elements[3] = a01
m.elements[5] = m.elements[7]
m.elements[6] = a02
m.elements[7] = a12
}
// IsSingular returns true if the matrix determinant is exactly zero, false if not.
func (m *Matrix3D) IsSingular() bool {
return m.Determinant() == 0
}
// IsNearSingular returns true if the matrix determinant is equal or below the given tolerance, false if not.
func (m *Matrix3D) IsNearSingular(tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
return math.Abs(m.Determinant()) <= tol, nil
} | pkg/geometry/matrix3d.go | 0.81457 | 0.729279 | matrix3d.go | starcoder |
package nne
import (
"fmt"
"math"
"math/rand"
"time"
)
// sigmoid helper function
func sigmoid(x float64) float64 {
return 1 / (1 + math.Exp(-x))
}
// Node for backpropagation training based feed forward network.
type Node struct {
Threshold float64 // threshold
Weights []float64
activation float64 // activation value
error float64
}
// NewNode creates new backpropagation network node.
func NewNode(wCount int) *Node {
return &Node{Weights: make([]float64, wCount, wCount)}
}
// Network is feed forward backpropagation network.
// Public members can be persisted to json or database.
type Network struct {
Input []*Node
Hidden []*Node
Output []*Node
lhRate float64 // learning rate of the hidden layer
loRate float64 // learning rate of the output layer
netInput []float64
desiredOut []float64
}
// NewNetwork creates new backpropagation network with input, hidden and output layers.
func NewNetwork(inCount, hideCount, outCount int) *Network {
n := &Network{
lhRate: 0.15,
loRate: 0.2,
Input: make([]*Node, inCount, inCount),
Hidden: make([]*Node, hideCount, hideCount),
Output: make([]*Node, outCount, outCount),
}
rand.Seed(time.Now().Unix())
for i := 0; i < inCount; i++ {
n.Input[i] = NewNode(hideCount)
for j := 0; j < hideCount; j++ {
n.Input[i].Weights[j] = rand.Float64() - 0.49999
}
}
for i := 0; i < hideCount; i++ {
n.Hidden[i] = NewNode(outCount)
for j := 0; j < outCount; j++ {
n.Hidden[i].Weights[j] = rand.Float64()
}
}
for i := 0; i < outCount; i++ {
n.Output[i] = NewNode(0)
}
// reset thresholds
for i := 0; i < len(n.Hidden); i++ {
n.Hidden[i].Threshold = rand.Float64()
}
for i := 0; i < len(n.Output); i++ {
n.Output[i].Threshold = rand.Float64()
}
return n
}
// TrainingData holds single block of inputs and outputs for the training to run.
// It is public for easier persistence to disk or database.
type TrainingData struct {
Input []float64
Output []float64
}
type TrainingSet []*TrainingData
func (tr *TrainingSet) Add(input []float64, output []float64) {
*tr = append(*tr, &TrainingData{Input: input, Output: output})
}
// Train performs network training for number of iterations, usually over 2000 epochs.
func (n *Network) Train(epochs int, data TrainingSet) {
inputLen := len(n.Input)
outputLen := len(n.Output)
for i := 0; i < epochs; i++ {
for _, tr := range data {
if inputLen != len(tr.Input) {
panic(fmt.Sprintf("expected training data input length %d got %d", inputLen, len(tr.Input)))
}
if outputLen != len(tr.Output) {
panic(fmt.Sprintf("expected traing data output length %d got %d", outputLen, len(tr.Output)))
}
n.netInput = tr.Input
n.desiredOut = tr.Output
n.trainOnePattern()
}
}
}
// TrainOnePattern train single pattern.
func (n *Network) trainOnePattern() {
n.calcActivation()
n.calcErrorOutput()
n.calcErrorHidden()
n.calcNewThresholds()
n.calcNewWeightsHidden()
n.calcNewWeightsInput()
}
// SetLearningRate sets learning rate for the backpropagation.
func (n *Network) SetLearningRates(lhRate, loRate float64) {
n.lhRate = lhRate
n.loRate = loRate
}
func (n *Network) calcActivation() {
// a loop to set the activations of the hidden layer
for h := 0; h < len(n.Hidden); h++ {
for i := 0; i < len(n.Input); i++ {
n.Hidden[h].activation += n.netInput[i] * n.Input[i].Weights[h]
}
}
// calculate the output of the hidden
for _, hid := range n.Hidden {
hid.activation += hid.Threshold
hid.activation = sigmoid(hid.activation)
}
// a loop to set the activations of the output layer
for j, val := range n.Output {
for _, hid := range n.Hidden {
val.activation += hid.activation * hid.Weights[j]
}
}
// calculate the output of the output layer
for _, val := range n.Output {
val.activation += val.Threshold
val.activation = sigmoid(val.activation)
}
}
// calcErrorOutput calculates error of each output neuron.
func (n *Network) calcErrorOutput() {
for j := 0; j < len(n.Output); j++ {
n.Output[j].error = n.Output[j].activation * (1 - n.Output[j].activation) *
(n.desiredOut[j] - n.Output[j].activation)
}
}
// calcErrorHidden calculate error of each hidden neuron.
func (n *Network) calcErrorHidden() {
for h := 0; h < len(n.Hidden); h++ {
for j := 0; j < len(n.Output); j++ {
n.Hidden[h].error += n.Hidden[h].Weights[j] * n.Output[j].error
}
n.Hidden[h].error *= n.Hidden[h].activation * (1 - n.Hidden[h].activation)
}
}
// calcNewThresholds calculate new thresholds for each neuron.
func (n *Network) calcNewThresholds() {
// computing the thresholds for next iteration for hidden layer
for h := 0; h < len(n.Hidden); h++ {
n.Hidden[h].Threshold += n.Hidden[h].error * n.lhRate
}
// computing the thresholds for next iteration for output layer
for j := 0; j < len(n.Output); j++ {
n.Output[j].Threshold += n.Output[j].error * n.loRate
}
}
// calcNewWeightsHidden calculate new weights between hidden and output.
func (n *Network) calcNewWeightsHidden() {
for h := 0; h < len(n.Hidden); h++ {
temp := n.Hidden[h].activation * n.loRate
for j := 0; j < len(n.Output); j++ {
n.Hidden[h].Weights[j] += temp * n.Output[j].error
}
}
}
// calcNewWeightsInput .
func (n *Network) calcNewWeightsInput() {
for i := 0; i < len(n.netInput); i++ {
temp := n.netInput[i] * n.lhRate
for h := 0; h < len(n.Hidden); h++ {
n.Input[i].Weights[h] += temp * n.Hidden[h].error
}
}
}
// calcTotalErrorPattern.
func (n *Network) calcTotalError() float64 {
temp := 0.0
for j := 0; j < len(n.Output); j++ {
temp += n.Output[j].error
}
return temp
}
// Results calculates network outputs and returns raw float64 activation values.
func (n *Network) Results(input []float64) []float64 {
n.netInput = input
n.calcActivation()
out := make([]float64, len(n.Output), len(n.Output))
for i, node := range n.Output {
out[i] = node.activation
}
return out
}
// Result calculates network output value for single output node networks.
func (n *Network) Result(input []float64) float64 {
if len(n.Output) != 1 {
panic("nne: network output must have only 1 output node.")
}
n.netInput = input
n.calcActivation()
node := n.Output[0]
return node.activation
} | nne.go | 0.701202 | 0.503845 | nne.go | starcoder |
package main
import (
"bufio"
"flag"
"fmt"
"os"
"strconv"
"strings"
)
type Point struct {
x int
y int
}
func main() {
// Use Flags to run a part
methodP := flag.String("method", "p1", "The method/part that should be run, valid are p1,p2 and test")
flag.Parse()
switch *methodP {
case "p1":
PartOne()
break
case "p2":
PartTwo()
break
case "test":
break
}
}
func PartOne() {
input := readInput()
largestArea := FindLargestArea(input)
fmt.Printf("The largest area is %v\n", largestArea)
}
func PartTwo() {
//input := readInput()
}
func FindLargestArea(points []Point) int {
largestArea := 0
checkPoints := FindInfinitePoints(points)
// Find number of closest neighbours for point
sizeMap := make(map[Point]int)
for _, point := range checkPoints {
areaSize := FindNumClosestNeighbours(point, points)
sizeMap[point] = areaSize
if areaSize > largestArea {
largestArea = areaSize
}
}
fmt.Println(sizeMap)
return largestArea
}
// Remove any points that have an "infinite area", check if they are closest to one of the edge points of the grid
func FindInfinitePoints(points []Point) []Point {
checkPoints := make([]Point, len(points))
copy(checkPoints, points)
// First find out the area we are dealing with, dont want to consider any points that have infinite area
// Figure out max X and Y, then do closest neighbours on all edge points, and remove the points that are associated with them
xmax := FindMax(1, points)
ymax := FindMax(2, points)
edgePoints := make([]Point, 0)
for i := 0; i <= xmax; i++ {
for j := 0; j <= ymax; j++ {
if i == 0 || j == 0 || i == xmax || j == ymax {
//Add this point to check points list
edgePoints = append(edgePoints, Point{i, j})
}
}
}
// Now that we have all the edge points
// Find the list of nearest neighbours for each of those points, then remove them
for _, point := range edgePoints {
closest := FindClosestNeighbour(point, points)
// Remove this from our checkPoints list
for i, checkPoint := range checkPoints {
if checkPoint == closest {
checkPoints = append(checkPoints[:i], checkPoints[i+1:]...)
}
}
}
return checkPoints
}
// FindMax for x and y
func FindMax(field int, points []Point) int {
highest := 0
switch field {
case 1:
for _, point := range points {
if point.x > highest {
highest = point.x
}
}
case 2:
for _, point := range points {
if point.y > highest {
highest = point.y
}
}
}
return highest
}
// Find number of Closest neighbours for a point
func FindNumClosestNeighbours(neighbour Point, points []Point) int {
xmax := FindMax(1, points)
ymax := FindMax(2, points)
areaSize := 0
for i := 0; i < xmax; i++ {
for j := 0; j < ymax; j++ {
if (FindClosestNeighbour(Point{i, j}, points) == neighbour) {
areaSize++
}
}
}
return areaSize
}
// Find out which is the closest neighbour to the gridpoint
func FindClosestNeighbour(gridPoint Point, neighbours []Point) Point {
closestPoint := neighbours[0]
closestDelta := ManhattenDistance(closestPoint, gridPoint)
var deltaList []int
for _, point := range neighbours {
delta := ManhattenDistance(point, gridPoint)
if delta < closestDelta {
closestPoint = point
closestDelta = delta
}
deltaList = append(deltaList, delta)
}
// Only check if we have two of the closest distances already
found := 0
for deltaL := range deltaList {
if closestDelta == deltaL {
found++
}
}
if found > 1 {
return Point{-1, -1}
}
return closestPoint
}
// Figure out the Manhatten Distance between two points
func ManhattenDistance(firstPoint Point, secondPoint Point) int {
x := abs(firstPoint.x - secondPoint.x)
y := abs(firstPoint.y - secondPoint.y)
return x + y
}
// Absoulute value of Int
func abs(x int) int {
if x < 0 {
return -x
} else {
return x
}
}
// Read data from input.txt
// Load it into points array
func readInput() []Point {
var input []Point
f, _ := os.Open("input.txt")
scanner := bufio.NewScanner(f)
for scanner.Scan() {
if scanner.Text() != "" {
values := strings.Split(scanner.Text(), ",")
x, _ := strconv.Atoi(values[0])
y, _ := strconv.Atoi(strings.Trim(values[1], " "))
input = append(input, Point{x, y})
}
}
return input
} | 2018/6-SafeZone/main.go | 0.61878 | 0.450964 | main.go | starcoder |
package math
import (
"unsafe"
)
// Transform is a utility type used to aggregate transformations. Transform
// concatenation, like matrix multiplication, is not commutative.
type Transform Mat4
// NewTransform returns a new, initialized transform.
func NewTransform() Transform {
return Transform(Ident4())
}
// Iden sets this transform to the identity transform. You NEED to call this
// EXCEPT IF:
// - You get your transform from NewTransform
// - You're gonna call Set* BEFORE Translate* or Rotate*
func (t *Transform) Iden() {
*t = Transform(Ident4())
}
// Translate3f concatenates a translation to this transform of {x, y, z}.
func (t *Transform) Translate3f(x, y, z float32) {
tran := Translate3D(x, y, z)
((*Mat4)(t)).Mul4With(&tran)
}
// TranslateVec3 concatenates a translation to this transform of v.
func (t *Transform) TranslateVec3(v *Vec3) {
tran := Translate3D(v[0], v[1], v[2])
((*Mat4)(t)).Mul4With(&tran)
}
// SetTranslate3f sets the transform to a translate transform of {x, y, z}.
func (t *Transform) SetTranslate3f(x, y, z float32) {
*t = Transform(Translate3D(x, y, z))
}
// SetTranslateVec3 sets the transform to a translate transform of v.
func (t *Transform) SetTranslateVec3(v *Vec3) {
*t = Transform(Translate3D(v[0], v[1], v[2]))
}
// RotateQuat rotates this transform by q.
func (t *Transform) RotateQuat(q *Quaternion) {
m := q.Mat4()
((*Mat4)(t)).Mul4With(&m)
}
// SetRotateQuat rotates this transform by q.
func (t *Transform) SetRotateQuat(q *Quaternion) {
*t = Transform(q.Mat4())
}
// Concatenate Transform t2 into t.
func (t *Transform) Concatenate(t2 *Transform) {
((*Mat4)(t)).Mul4With((*Mat4)(t2))
}
// Scale3f scales this transform by {x, y, z}.
func (t *Transform) Scale3f(x, y, z float32) {
tran := Scale3D(x, y, z)
((*Mat4)(t)).Mul4With(&tran)
}
// ScaleVec3 scales this transform by v.
func (t *Transform) ScaleVec3(v *Vec3) {
tran := Scale3D(v[0], v[1], v[2])
((*Mat4)(t)).Mul4With(&tran)
}
// SetScale3f sets the transform to a scaling operation by {x, y, z}.
func (t *Transform) SetScale3f(x, y, z float32) {
*t = Transform(Scale3D(x, y, z))
}
// SetScaleVec3 sets the transform to a scaling operation by v.
func (t *Transform) SetScaleVec3(v *Vec3) {
*t = Transform(Scale3D(v[0], v[1], v[2]))
}
// LocalToWorld transforms a given point and returns the world point that this
// transform generates.
func (t *Transform) LocalToWorld(v *Vec3) Vec3 {
v4 := v.Vec4(1)
v4 = (*Mat4)(t).Mul4x1(&v4)
return v4.Vec3()
}
// WorldToLocal transform a given point and returns the local point that this
// transform generates.
func (t *Transform) WorldToLocal(v *Vec3) Vec3 {
// BUG(hydroflame): the current implementation currently inverse the matrix
// on every call ... that may not be the most efficient.
inv := (*Mat4)(t).Inverse()
v4 := v.Vec4(1)
v4 = inv.Mul4x1(&v4)
return v4.Vec3()
}
// Normal returns the normal matrix of this transform, this is used in most
// light shading algorithms.
func (t *Transform) Normal() Mat3 {
// Since we prevent scaling we are guaranteed that the upper 3x3 matrix is
// orthogonal and (TODO(hydroflame): find the word for when a matrix has all
// unit vectors), we can just throw it back and it's the correct transform
// matrix.
return ((*Mat4)(t)).Mat3()
}
// Mat4 simply returns the Mat4 associated with this Transform. This effectively
// makes a copy.
func (t *Transform) Mat4() Mat4 {
return *((*Mat4)(t))
}
// Mat4 simply returns a pointer to the Mat4 associated with this Transform.
func (t *Transform) AsMat4() *Mat4 {
return (*Mat4)(t)
}
// Pointer returns the pointer to the first element of the underlying 4x4
// matrix. This is can be passed directly to OpenGL function.
func (t *Transform) Pointer() unsafe.Pointer {
return unsafe.Pointer(t)
}
// String return a string that represents this transform (a mat4).
func (t *Transform) String() string {
return (*Mat4)(t).String()
}
// Transform2D is a utility type used to aggregate transformations. Transform
// concatenation, like matrix multiplication, is not commutative.
type Transform2D Mat3
// NewTransform2D returns a new, initialized transform.
func NewTransform2D() Transform2D {
return Transform2D(Ident3())
}
// Iden sets this transform to the identity transform. You NEED to call this
// EXCEPT IF:
// - You get your transform from NewTransform
// - You're gonna call Set* BEFORE Translate* or Rotate*
func (t *Transform2D) Iden() {
*t = Transform2D(Ident3())
}
// Translate2f concatenates a translation to this transform of {x, y, z}.
func (t *Transform2D) Translate2f(x, y float32) {
tran := Translate2D(x, y)
((*Mat3)(t)).Mul3With(&tran)
}
// TranslateVec2 concatenates a translation to this transform of v.
func (t *Transform2D) TranslateVec2(v *Vec2) {
tran := Translate2D(v[0], v[1])
((*Mat3)(t)).Mul3With(&tran)
}
// SetTranslate2f sets the transform to a translate transform of {x, y, z}.
func (t *Transform2D) SetTranslate2f(x, y float32) {
*t = Transform2D(Translate2D(x, y))
}
// SetTranslateVec2 sets the transform to a translate transform of v.
func (t *Transform2D) SetTranslateVec2(v *Vec2) {
*t = Transform2D(Translate2D(v[0], v[1]))
}
// Rotate concatenates a rotation of angle (radian).
func (t *Transform2D) Rotate(angle float32) {
rot := HomogRotate2D(angle)
((*Mat3)(t)).Mul3With(&rot)
}
// SetRotate sets the transform to a rotate transform of angle (radian).
func (t *Transform2D) SetRotate(angle float32) {
*t = Transform2D(HomogRotate2D(angle))
}
// LocalToWorld transform a given point and returns the world point that this
// transform generates.
func (t *Transform2D) LocalToWorld(v *Vec2) Vec2 {
v3 := v.Vec3(1)
v3 = (*Mat3)(t).Mul3x1(&v3)
return v3.Vec2()
}
// WorldToLocal transform a given point and returns the local point that this
// transform generates.
func (t *Transform2D) WorldToLocal(v *Vec2) Vec2 {
// BUG(hydroflame): the current implementation currently inverse the matrix
// on every call ... that may not be the most efficient.
inv := (*Mat3)(t).Inverse()
v3 := v.Vec3(1)
v3 = inv.Mul3x1(&v3)
return v3.Vec2()
}
// Concatenate Transform t2 into t.
func (t *Transform2D) Concatenate(t2 *Transform2D) {
((*Mat3)(t)).Mul3With((*Mat3)(t2))
}
// Mat3 simply returns the Mat3 associated with this Transform. This effectively
// makes a copy.
func (t *Transform2D) Mat3() Mat3 {
return *((*Mat3)(t))
}
// Pointer returns the pointer to the first element of the underlying 4x4
// matrix. This is can be passed directly to OpenGL function.
func (t *Transform2D) Pointer() unsafe.Pointer {
return unsafe.Pointer(t)
}
// String return a string that represents this transform (a mat4).
func (t *Transform2D) String() string {
return (*Mat3)(t).String()
} | math/transform.go | 0.791821 | 0.683406 | transform.go | starcoder |
package sg
import "fmt"
type Mat4Type int
const (
IdentityType Mat4Type = 0x00
Translation2DType = 0x01
Scale2DType = 0x02
Rotation2DType = 0x04
ScaleAndRotate2DType = 0x07 // all of the above
GenericType = 0xff
)
type Mat4 struct {
M [16]float32
Type Mat4Type
}
func NewIdentity() Mat4 {
return Mat4{
M: [16]float32{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
},
Type: IdentityType,
}
}
func NewMat4(
m11 float32, m12 float32, m13 float32, m14 float32,
m21 float32, m22 float32, m23 float32, m24 float32,
m31 float32, m32 float32, m33 float32, m34 float32,
m41 float32, m42 float32, m43 float32, m44 float32,
mtype Mat4Type) Mat4 {
return Mat4{
M: [16]float32{
m11, m12, m13, m14,
m21, m22, m23, m24,
m31, m32, m33, m34,
m41, m42, m43, m44,
},
Type: mtype,
}
}
func (m Mat4) IsNil() bool {
if m.Type != IdentityType {
return false
}
for _, v := range m.M {
if v != 0 {
return false
}
}
return true
}
func (this Mat4) Equals(other Mat4) bool {
for i, v := range this.M {
if v != other.M[i] {
return false
}
}
return true
}
func (this Mat4) MulM4(o Mat4) Mat4 {
m := this.M
if this.Type == Translation2DType && o.Type == Translation2DType {
return NewMat4(1, 0, 0, m[3]+o.M[3],
0, 1, 0, m[7]+o.M[7],
0, 0, 1, 0,
0, 0, 0, 1,
Translation2DType)
} else if this.Type == Translation2DType {
return NewMat4(
o.M[0]+m[3]*o.M[12],
o.M[1]+m[3]*o.M[13],
o.M[2]+m[3]*o.M[14],
o.M[3]+m[3]*o.M[15],
o.M[4]+m[7]*o.M[12],
o.M[5]+m[7]*o.M[13],
o.M[6]+m[7]*o.M[14],
o.M[7]+m[7]*o.M[15],
o.M[8],
o.M[9],
o.M[10],
o.M[11],
o.M[12],
o.M[13],
o.M[14],
o.M[15],
this.Type|o.Type)
} else if o.Type == Translation2DType {
return NewMat4(
m[0], m[1], m[2], m[0]*o.M[3]+m[1]*o.M[7]+m[3],
m[4], m[5], m[6], m[4]*o.M[3]+m[5]*o.M[7]+m[7],
m[8], m[9], m[10], m[8]*o.M[3]+m[9]*o.M[7]+m[11],
m[12], m[13], m[14], m[12]*o.M[3]+m[13]*o.M[7]+m[15],
Mat4Type(this.Type|o.Type))
} else if this.Type <= ScaleAndRotate2DType && o.Type <= ScaleAndRotate2DType {
return NewMat4(
m[0]*o.M[0]+m[1]*o.M[4],
m[0]*o.M[1]+m[1]*o.M[5],
0,
m[0]*o.M[3]+m[1]*o.M[7]+m[3],
m[4]*o.M[0]+m[5]*o.M[4],
m[4]*o.M[1]+m[5]*o.M[5],
0,
m[4]*o.M[3]+m[5]*o.M[7]+m[7],
0, 0, 1, 0,
0, 0, 0, 1,
Mat4Type(this.Type|o.Type))
}
// Genereic full multiplication
return NewMat4(
m[0]*o.M[0]+m[1]*o.M[4]+m[2]*o.M[8]+m[3]*o.M[12],
m[0]*o.M[1]+m[1]*o.M[5]+m[2]*o.M[9]+m[3]*o.M[13],
m[0]*o.M[2]+m[1]*o.M[6]+m[2]*o.M[10]+m[3]*o.M[14],
m[0]*o.M[3]+m[1]*o.M[7]+m[2]*o.M[11]+m[3]*o.M[15],
m[4]*o.M[0]+m[5]*o.M[4]+m[6]*o.M[8]+m[7]*o.M[12],
m[4]*o.M[1]+m[5]*o.M[5]+m[6]*o.M[9]+m[7]*o.M[13],
m[4]*o.M[2]+m[5]*o.M[6]+m[6]*o.M[10]+m[7]*o.M[14],
m[4]*o.M[3]+m[5]*o.M[7]+m[6]*o.M[11]+m[7]*o.M[15],
m[8]*o.M[0]+m[9]*o.M[4]+m[10]*o.M[8]+m[11]*o.M[12],
m[8]*o.M[1]+m[9]*o.M[5]+m[10]*o.M[9]+m[11]*o.M[13],
m[8]*o.M[2]+m[9]*o.M[6]+m[10]*o.M[10]+m[11]*o.M[14],
m[8]*o.M[3]+m[9]*o.M[7]+m[10]*o.M[11]+m[11]*o.M[15],
m[12]*o.M[0]+m[13]*o.M[4]+m[14]*o.M[8]+m[15]*o.M[12],
m[12]*o.M[1]+m[13]*o.M[5]+m[14]*o.M[9]+m[15]*o.M[13],
m[12]*o.M[2]+m[13]*o.M[6]+m[14]*o.M[10]+m[15]*o.M[14],
m[12]*o.M[3]+m[13]*o.M[7]+m[14]*o.M[11]+m[15]*o.M[15],
Mat4Type(this.Type|o.Type))
}
func (this Mat4) MulV2(v Vec2) Vec2 {
m := this.M
return Vec2{
m[0]*v.X + m[1]*v.Y + m[3],
m[4]*v.X + m[5]*v.Y + m[7]}
}
func (this Mat4) MulV3(v Vec3) Vec3 {
m := this.M
return Vec3{
m[0]*v.X + m[1]*v.Y + m[2]*v.Z + m[3],
m[4]*v.X + m[5]*v.Y + m[6]*v.Z + m[7],
m[8]*v.X + m[9]*v.Y + m[10]*v.Z + m[11]}
}
func (this Mat4) MulV4(v Vec4) Vec4 {
m := this.M
return Vec4{
m[0]*v.X + m[1]*v.Y + m[2]*v.Z + m[3]*v.W,
m[4]*v.X + m[5]*v.Y + m[6]*v.Z + m[7]*v.W,
m[8]*v.X + m[9]*v.Y + m[10]*v.Z + m[11]*v.W,
m[12]*v.X + m[13]*v.Y + m[14]*v.Z + m[15]*v.W}
}
func (this Mat4) Transposed() Mat4 {
m := this.M
return NewMat4(
m[0], m[4], m[8], m[12],
m[1], m[5], m[9], m[12],
m[2], m[6], m[10], m[14],
m[3], m[7], m[11], m[15],
IdentityType)
}
func (this Mat4) Inverted(invertible *bool) Mat4 {
m := this.M
var inv Mat4
inv.M[0] = m[5]*m[10]*m[15] -
m[5]*m[11]*m[14] -
m[9]*m[6]*m[15] +
m[9]*m[7]*m[14] +
m[13]*m[6]*m[11] -
m[13]*m[7]*m[10]
inv.M[4] = -m[4]*m[10]*m[15] +
m[4]*m[11]*m[14] +
m[8]*m[6]*m[15] -
m[8]*m[7]*m[14] -
m[12]*m[6]*m[11] +
m[12]*m[7]*m[10]
inv.M[8] = m[4]*m[9]*m[15] -
m[4]*m[11]*m[13] -
m[8]*m[5]*m[15] +
m[8]*m[7]*m[13] +
m[12]*m[5]*m[11] -
m[12]*m[7]*m[9]
inv.M[12] = -m[4]*m[9]*m[14] +
m[4]*m[10]*m[13] +
m[8]*m[5]*m[14] -
m[8]*m[6]*m[13] -
m[12]*m[5]*m[10] +
m[12]*m[6]*m[9]
inv.M[1] = -m[1]*m[10]*m[15] +
m[1]*m[11]*m[14] +
m[9]*m[2]*m[15] -
m[9]*m[3]*m[14] -
m[13]*m[2]*m[11] +
m[13]*m[3]*m[10]
inv.M[5] = m[0]*m[10]*m[15] -
m[0]*m[11]*m[14] -
m[8]*m[2]*m[15] +
m[8]*m[3]*m[14] +
m[12]*m[2]*m[11] -
m[12]*m[3]*m[10]
inv.M[9] = -m[0]*m[9]*m[15] +
m[0]*m[11]*m[13] +
m[8]*m[1]*m[15] -
m[8]*m[3]*m[13] -
m[12]*m[1]*m[11] +
m[12]*m[3]*m[9]
inv.M[13] = m[0]*m[9]*m[14] -
m[0]*m[10]*m[13] -
m[8]*m[1]*m[14] +
m[8]*m[2]*m[13] +
m[12]*m[1]*m[10] -
m[12]*m[2]*m[9]
inv.M[2] = m[1]*m[6]*m[15] -
m[1]*m[7]*m[14] -
m[5]*m[2]*m[15] +
m[5]*m[3]*m[14] +
m[13]*m[2]*m[7] -
m[13]*m[3]*m[6]
inv.M[6] = -m[0]*m[6]*m[15] +
m[0]*m[7]*m[14] +
m[4]*m[2]*m[15] -
m[4]*m[3]*m[14] -
m[12]*m[2]*m[7] +
m[12]*m[3]*m[6]
inv.M[10] = m[0]*m[5]*m[15] -
m[0]*m[7]*m[13] -
m[4]*m[1]*m[15] +
m[4]*m[3]*m[13] +
m[12]*m[1]*m[7] -
m[12]*m[3]*m[5]
inv.M[14] = -m[0]*m[5]*m[14] +
m[0]*m[6]*m[13] +
m[4]*m[1]*m[14] -
m[4]*m[2]*m[13] -
m[12]*m[1]*m[6] +
m[12]*m[2]*m[5]
inv.M[3] = -m[1]*m[6]*m[11] +
m[1]*m[7]*m[10] +
m[5]*m[2]*m[11] -
m[5]*m[3]*m[10] -
m[9]*m[2]*m[7] +
m[9]*m[3]*m[6]
inv.M[7] = m[0]*m[6]*m[11] -
m[0]*m[7]*m[10] -
m[4]*m[2]*m[11] +
m[4]*m[3]*m[10] +
m[8]*m[2]*m[7] -
m[8]*m[3]*m[6]
inv.M[11] = -m[0]*m[5]*m[11] +
m[0]*m[7]*m[9] +
m[4]*m[1]*m[11] -
m[4]*m[3]*m[9] -
m[8]*m[1]*m[7] +
m[8]*m[3]*m[5]
inv.M[15] = m[0]*m[5]*m[10] -
m[0]*m[6]*m[9] -
m[4]*m[1]*m[10] +
m[4]*m[2]*m[9] +
m[8]*m[1]*m[6] -
m[8]*m[2]*m[5]
var det float32 = m[0]*inv.M[0] + m[1]*inv.M[4] + m[2]*inv.M[8] + m[3]*inv.M[12]
if det == 0 {
if invertible != nil {
*invertible = false
}
return NewIdentity()
}
det = 1.0 / det
for i := 0; i < 16; i++ {
inv.M[i] *= det
}
if invertible != nil {
*invertible = true
}
return inv
}
func (this Mat4) String() string {
return fmt.Sprintf("%g %g %g %g\n%g %g %g %g\n%g %g %g %g\n%g %g %g %g",
this.M[0], this.M[1], this.M[2], this.M[3],
this.M[4], this.M[5], this.M[6], this.M[7],
this.M[8], this.M[9], this.M[10], this.M[11],
this.M[12], this.M[13], this.M[14], this.M[15])
} | sg/mat4.go | 0.538255 | 0.612918 | mat4.go | starcoder |
3D Printable Nuts and Bolts
*/
//-----------------------------------------------------------------------------
package main
import . "github.com/deadsy/sdfx/sdf"
//-----------------------------------------------------------------------------
// Tolerance: Measured in mm. Typically 0.0 to 0.4. Larger is looser.
// Smaller is tighter. Heuristically it could be set to some fraction
// of an FDM nozzle size. It's worth experimenting to find out a good
// value for the specific application and printer.
// const MM_TOLERANCE = 0.4 // a bit loose
// const MM_TOLERANCE = 0.2 // very tight
// const MM_TOLERANCE = 0.3 // good plastic to plastic fit
const MM_TOLERANCE = 0.3
const INCH_TOLERANCE = MM_TOLERANCE / MM_PER_INCH
// Quality: The long axis of the model is rendered with N STL cells. A larger
// value will take longer to generate, give a better model resolution and a
// larger STL file size.
const QUALITY = 200
//-----------------------------------------------------------------------------
// Return a Bolt
func Bolt(
name string, // name of thread
style string, // head style hex,knurl
tolerance float64, // subtract from external thread radius
total_length float64, // threaded length + shank length
shank_length float64, // non threaded length
) SDF3 {
t := ThreadLookup(name)
if total_length < 0 {
return nil
}
if shank_length < 0 {
return nil
}
thread_length := total_length - shank_length
if thread_length < 0 {
thread_length = 0
}
var head_3d SDF3
head_r := t.Hex_Radius()
head_h := t.Hex_Height()
if style == "hex" {
head_3d = HexHead3D(head_r, head_h, "b")
} else if style == "knurl" {
head_3d = KnurledHead3D(head_r, head_h, head_r*0.25)
} else {
panic("unknown style")
}
// shank
shank_length += head_h / 2
shank_ofs := shank_length / 2
shank_3d := Cylinder3D(shank_length, t.Radius, head_h*0.08)
shank_3d = Transform3D(shank_3d, Translate3d(V3{0, 0, shank_ofs}))
// thread
r := t.Radius - tolerance
l := thread_length
screw_ofs := l/2 + shank_length
screw_3d := Screw3D(ISOThread(r, t.Pitch, "external"), l, t.Pitch, 1)
// chamfer the thread
screw_3d = Chamfered_Cylinder(screw_3d, 0, 0.5)
screw_3d = Transform3D(screw_3d, Translate3d(V3{0, 0, screw_ofs}))
return Union3D(head_3d, screw_3d, shank_3d)
}
//-----------------------------------------------------------------------------
// Return a Nut.
func Nut(
name string, // name of thread
style string, // head style hex,knurl
tolerance float64, // add to internal thread radius
) SDF3 {
t := ThreadLookup(name)
var nut_3d SDF3
nut_r := t.Hex_Radius()
nut_h := t.Hex_Height()
if style == "hex" {
nut_3d = HexHead3D(nut_r, nut_h, "tb")
} else if style == "knurl" {
nut_3d = KnurledHead3D(nut_r, nut_h, nut_r*0.25)
} else {
panic("unknown style")
}
// internal thread
thread_3d := Screw3D(ISOThread(t.Radius+tolerance, t.Pitch, "internal"), nut_h, t.Pitch, 1)
return Difference3D(nut_3d, thread_3d)
}
//-----------------------------------------------------------------------------
func inch() {
// bolt
bolt_3d := Bolt("unc_5/8", "knurl", INCH_TOLERANCE, 2.0, 0.5)
bolt_3d = Scale3D(bolt_3d, MM_PER_INCH)
RenderSTL(bolt_3d, QUALITY, "bolt.stl")
// nut
nut_3d := Nut("unc_5/8", "knurl", INCH_TOLERANCE)
nut_3d = Scale3D(nut_3d, MM_PER_INCH)
RenderSTL(nut_3d, QUALITY, "nut.stl")
}
//-----------------------------------------------------------------------------
func metric() {
// bolt
bolt_3d := Bolt("M16x2", "hex", MM_TOLERANCE, 50, 10)
RenderSTL(bolt_3d, QUALITY, "bolt.stl")
// nut
nut_3d := Nut("M16x2", "hex", MM_TOLERANCE)
RenderSTL(nut_3d, QUALITY, "nut.stl")
}
//-----------------------------------------------------------------------------
func main() {
//inch()
metric()
}
//----------------------------------------------------------------------------- | examples/3dp_nutbolt/main.go | 0.635901 | 0.412885 | main.go | starcoder |
package circuit
import (
"math"
"github.com/heustis/tsp-solver-go/model"
)
// ClosestGreedyByEdge is an O(n^3) greedy algorithm that:
// 1. creates a separate ClosestGreedy for each edge in the convex hull,
// 2. for each edge, update the corresponding ClosestGreedy by attaching that edge to its closest point,
// 3. updates each ClosestGreedy simulatneously, so that they all complete at the same time.
type ClosestGreedyByEdge struct {
circuits []model.Circuit
enableInteriorUpdates bool
}
// NewClosestGreedyByEdge creates a new Circuit that:
// 1. creates a separate ClosestGreedy for each edge in the convex hull,
// 2. for each edge, update the corresponding ClosestGreedy by attaching that edge to its closest point,
// 3. updates each ClosestGreedy simulatneously, so that they all complete at the same time.
// Complexity: O(n^3)
func NewClosestGreedyByEdge(vertices []model.CircuitVertex, perimeterBuilder model.PerimeterBuilder, enableInteriorUpdates bool) model.Circuit {
circuitEdges, unattachedVertices := perimeterBuilder(vertices)
closestEdges := make(map[model.CircuitVertex]*model.DistanceToEdge)
toAttach := make(map[*ClosestGreedy]*model.DistanceToEdge)
initLength := 0.0
for _, edge := range circuitEdges {
initLength += edge.GetLength()
}
circuits := make([]model.Circuit, len(circuitEdges))
// Create a greedy circuit for each edge, with each circuit attaching that edge to its closest point.
// This allows the greedy algorithm to detect scenarios where the points are individually closer to various edges, but are collectively closer to a different edge.
// This increases the complexity of this circuit implementation to O(n^3), the unsmiplified form being O(e*(n-e)*(n-e)), since the greedy implementation is O(n^2) or O((n-e)^2).
for i, e := range circuitEdges {
circuit := &ClosestGreedy{
circuitEdges: make([]model.CircuitEdge, len(circuitEdges)),
closestEdges: model.NewHeap(model.GetDistanceToEdgeForHeap),
unattachedVertices: make(map[model.CircuitVertex]bool),
length: initLength,
enableInteriorUpdates: enableInteriorUpdates,
}
copy(circuit.circuitEdges, circuitEdges)
for k, v := range unattachedVertices {
circuit.unattachedVertices[k] = v
}
vertexClosestToEdge := &model.DistanceToEdge{
Distance: math.MaxFloat64,
}
for v := range unattachedVertices {
d := e.DistanceIncrease(v)
if d < vertexClosestToEdge.Distance {
vertexClosestToEdge = &model.DistanceToEdge{
Vertex: v,
Edge: e,
Distance: d,
}
}
if prevClosest, okay := closestEdges[v]; !okay || d < prevClosest.Distance {
closestEdges[v] = &model.DistanceToEdge{
Vertex: v,
Edge: e,
Distance: d,
}
}
}
toAttach[circuit] = vertexClosestToEdge
circuits[i] = circuit
}
for circuit, closestToEdge := range toAttach {
for _, dist := range closestEdges {
if dist.Vertex != closestToEdge.Vertex {
// Need to create a new model.DistanceToEdge for each circuit, due to how greedy circuits update DistanceToEdges
circuit.closestEdges.Push(&model.DistanceToEdge{
Vertex: dist.Vertex,
Edge: dist.Edge,
Distance: dist.Distance,
})
}
}
circuit.Update(closestToEdge.Vertex, closestToEdge.Edge)
}
return &ClosestGreedyByEdge{
enableInteriorUpdates: enableInteriorUpdates,
circuits: circuits,
}
}
func (c *ClosestGreedyByEdge) FindNextVertexAndEdge() (model.CircuitVertex, model.CircuitEdge) {
if shortest := c.getShortestCircuit(); shortest != nil && len(shortest.GetUnattachedVertices()) > 0 {
next := shortest.(*ClosestGreedy).closestEdges.Peek().(*model.DistanceToEdge)
return next.Vertex, next.Edge
} else {
return nil, nil
}
}
func (c *ClosestGreedyByEdge) GetAttachedVertices() []model.CircuitVertex {
if shortest := c.getShortestCircuit(); shortest != nil {
return shortest.GetAttachedVertices()
}
return []model.CircuitVertex{}
}
func (c *ClosestGreedyByEdge) GetLength() float64 {
if shortest := c.getShortestCircuit(); shortest != nil {
return shortest.GetLength()
}
return 0.0
}
func (c *ClosestGreedyByEdge) GetUnattachedVertices() map[model.CircuitVertex]bool {
if shortest := c.getShortestCircuit(); shortest != nil {
return shortest.GetUnattachedVertices()
}
return make(map[model.CircuitVertex]bool)
}
func (c *ClosestGreedyByEdge) Update(ignoredVertex model.CircuitVertex, ignoredEdge model.CircuitEdge) {
for _, circuit := range c.circuits {
circuit.Update(circuit.FindNextVertexAndEdge())
}
}
func (c *ClosestGreedyByEdge) getShortestCircuit() model.Circuit {
shortestLen := math.MaxFloat64
var shortest model.Circuit
for _, circuit := range c.circuits {
if l := circuit.GetLength(); l < shortestLen {
shortest = circuit
shortestLen = l
}
}
return shortest
}
var _ model.Circuit = (*ClosestGreedyByEdge)(nil) | circuit/closestgreedybyedge.go | 0.810329 | 0.706342 | closestgreedybyedge.go | starcoder |
package main
import (
glmath "github.com/go-gl/mathgl/mgl64"
"math"
)
var (
forwardUnit = glmath.Vec3{1.0, 0.0, 0.0}
rightUnit = glmath.Vec3{0.0, 0.0, 1.0}
upUnit = glmath.Vec3{0.0, 1.0, 0.0}
)
type Camera struct {
position glmath.Vec3
velocity glmath.Vec3
acceleration glmath.Vec3
speed float64
drag float64
targetUnit glmath.Vec3
target glmath.Vec3
rotation glmath.Vec2
rotationSpeed float64
}
func NewCamera(position glmath.Vec3) *Camera {
camera := &Camera{
position: position,
velocity: glmath.Vec3{0.0, 0.0, 0.0},
acceleration: glmath.Vec3{0.0, 0.0, 0.0},
speed: 0.5,
drag: 0.5,
targetUnit: glmath.Vec3{0.0, 0.0, 0.0},
target: glmath.Vec3{0.0, 0.0, 0.0},
rotation: glmath.Vec2{math.Pi / 2.0, math.Pi / 2.0},
rotationSpeed: 0.001,
}
camera.Tick()
return camera
}
func (camera *Camera) MoveForward(amount float64) {
camera.acceleration = camera.acceleration.Add(camera.targetUnit.Mul(amount * camera.speed))
}
func (camera *Camera) MoveRight(amount float64) {
camera.acceleration = camera.acceleration.Sub(camera.targetUnit.Cross(upUnit).Mul(amount * camera.speed))
}
func (camera *Camera) MoveUp(amount float64) {
camera.acceleration = camera.acceleration.Add(upUnit.Mul(amount * camera.speed))
}
func (camera *Camera) Rotate(vec glmath.Vec2) {
camera.rotation = camera.rotation.Add(vec.Mul(camera.rotationSpeed))
}
// Tick updates all fields of the Camera
func (camera *Camera) Tick() {
camera.velocity = camera.velocity.Add(camera.acceleration).Mul(camera.drag)
camera.acceleration = camera.acceleration.Mul(0.0)
camera.position = camera.position.Add(camera.velocity)
camera.targetUnit = camera.calculateTargetUnit()
camera.target = camera.position.Add(camera.targetUnit)
}
func (camera *Camera) GetPosition() glmath.Vec3 {
return camera.position
}
func (camera *Camera) calculateTargetUnit() glmath.Vec3 {
x := camera.rotation.X()
y := camera.rotation.Y()
return glmath.Vec3{math.Cos(y) * math.Sin(x), math.Cos(x), math.Sin(y) * math.Sin(x)}
}
func (camera *Camera) GetTarget() glmath.Vec3 {
return camera.target
} | camera.go | 0.884102 | 0.512876 | camera.go | starcoder |
package histogram
import (
"io"
"sort"
"strconv"
"strings"
"github.com/grokify/mogo/type/maputil"
"github.com/olekukonko/tablewriter"
"github.com/grokify/gocharts/v2/data/point"
"github.com/grokify/gocharts/v2/data/table"
)
// Histogram stats is used to count how many times
// an item appears and how many times number of
// appearances appear.
type Histogram struct {
Name string
Bins map[string]int
Counts map[string]int // how many items have counts.
Percentages map[string]float64
BinCount uint
Sum int
}
func NewHistogram(name string) *Histogram {
return &Histogram{
Name: name,
Bins: map[string]int{},
Counts: map[string]int{},
Percentages: map[string]float64{},
BinCount: 0}
}
/*
func (hist *Histogram) AddInt(i int) {
hist.Add(strconv.Itoa(i), 1)
}
*/
func (hist *Histogram) Add(binName string, binCount int) {
hist.Bins[binName] += binCount
}
func (hist *Histogram) Inflate() {
hist.Counts = map[string]int{}
sum := 0
for _, binCount := range hist.Bins {
countString := strconv.Itoa(binCount)
if _, ok := hist.Counts[countString]; !ok {
hist.Counts[countString] = 0
}
hist.Counts[countString]++
sum += binCount
}
hist.BinCount = uint(len(hist.Bins))
hist.Percentages = map[string]float64{}
for binName, binCount := range hist.Bins {
hist.Percentages[binName] = float64(binCount) / float64(sum)
}
hist.Sum = sum
}
func (hist *Histogram) BinNames() []string {
binNames := []string{}
for binName := range hist.Bins {
binNames = append(binNames, binName)
}
sort.Strings(binNames)
return binNames
}
func (hist *Histogram) BinNameExists(binName string) bool {
if _, ok := hist.Bins[binName]; ok {
return true
}
return false
}
func (hist *Histogram) ValueSum() int {
totalCount := 0
for _, binCount := range hist.Bins {
totalCount += binCount
}
return totalCount
}
func (hist *Histogram) Stats() point.PointSet {
pointSet := point.NewPointSet()
for binName, binCount := range hist.Bins {
pointSet.PointsMap[binName] = point.Point{
Name: binName,
AbsoluteInt: int64(binCount)}
}
pointSet.Inflate()
return pointSet
}
const (
SortNameAsc = maputil.SortNameAsc
SortNameDesc = maputil.SortNameDesc
SortValueAsc = maputil.SortValueAsc
SortValueDesc = maputil.SortValueDesc
)
// ItemCounts returns sorted item names and values.
func (hist *Histogram) ItemCounts(sortBy string) []maputil.Record {
msi := maputil.MapStringInt(hist.Bins)
return msi.Sorted(sortBy)
}
// WriteTable writes an ASCII Table. For CLI apps, pass `os.Stdout` for `io.Writer`.
func (hist *Histogram) WriteTableASCII(writer io.Writer, header []string, sortBy string, inclTotal bool) {
rows := [][]string{}
sortedItems := hist.ItemCounts(sortBy)
for _, sortedItem := range sortedItems {
rows = append(rows, []string{
sortedItem.Name, strconv.Itoa(sortedItem.Value)})
}
if len(header) == 0 {
header = []string{"Name", "Value"}
} else if len(header) == 1 {
header[1] = "Value"
}
header[0] = strings.TrimSpace(header[0])
header[1] = strings.TrimSpace(header[1])
if len(header[0]) == 0 {
header[0] = "Name"
}
if len(header[1]) == 0 {
header[1] = "Value"
}
table := tablewriter.NewWriter(writer)
table.SetHeader(header)
if inclTotal {
table.SetFooter([]string{
"Total",
strconv.Itoa(hist.ValueSum()),
}) // Add Footer
}
table.SetBorder(false) // Set Border to false
table.AppendBulk(rows) // Add Bulk Data
table.Render()
}
func (hist *Histogram) Table(colNameBinName, colNameBinCount string) *table.Table {
tbl := table.NewTable(hist.Name)
tbl.Columns = []string{colNameBinName, colNameBinCount}
for binName, binCount := range hist.Bins {
tbl.Rows = append(tbl.Rows,
[]string{binName, strconv.Itoa(binCount)})
}
tbl.FormatMap = map[int]string{1: "int"}
return &tbl
}
func (hist *Histogram) WriteXLSX(filename, sheetname, colNameBinName, colNameBinCount string) error {
tbl := hist.Table(colNameBinName, colNameBinCount)
return tbl.WriteXLSX(filename, sheetname)
} | data/histogram/histogram.go | 0.61659 | 0.469095 | histogram.go | starcoder |
package desktop_parser
import (
"fmt"
)
type state uint8
// The parser works as a simple state machine.
// These are the states, and roughly what they are for.
const (
// The entry state. Only valid transition is to state_section, or state_key
// once a section has been identified.
state_none state = iota
// Transitions back to state_none when a section is found.
state_section
// Transitions to state_key_locale when a [ is found in a key.
state_key
// Transitions to state_key_locale_post when a ] is found after a locale.
state_key_locale
// Only allow equals or whitespace. Transitions to state_value_pre.
state_key_locale_post
// Eats whitespace. Transitions to state_value on anything else.
state_value_pre
// Reads a value. Transitions back to state_none once done.
state_value
)
type parser struct {
sections []DesktopSection
// Current section name being read.
sectionName string
// Current key name being read.
keyName string
// Current key locale being read (if any).
keyLocale string
// Current value being read.
value string
}
func (this *parser) parseStateNone(c rune) (state, error) {
switch c {
case '[': // [Desktop Entry
this.sectionName = ""
return state_section, nil
case '\n':
return state_none, nil
default:
if this.sectionName != "" {
return this.parseStateKey(c)
} else {
return state_none, fmt.Errorf("Unexpected character outside a section: %c", c)
}
}
}
func (this *parser) parseStateSection(c rune) (state, error) {
switch c {
case ']': // Desktop Entry]
this.sections = append(this.sections, DesktopSection{Name: this.sectionName})
return state_none, nil
default:
this.sectionName += string(c)
}
return state_section, nil
}
func (this *parser) parseStateKey(c rune) (state, error) {
switch {
case c == '[':
if this.keyLocale != "" {
return state_none, fmt.Errorf("Already found a language code: %s", this.keyLocale)
}
return state_key_locale, nil
case c == '=':
if this.keyName == "" {
return state_none, fmt.Errorf("Empty key found")
}
return state_value_pre, nil
case c == ' ':
// ignore
case c == '-':
fallthrough
case c >= '0' && c <= '9':
fallthrough
case c >= 'a' && c <= 'z':
fallthrough
case c >= 'A' && c <= 'Z':
this.keyName += string(c)
default:
return state_none, fmt.Errorf("Bad key character: %c", c)
}
return state_key, nil
}
func (this *parser) parseStateKeyLocale(c rune) (state, error) {
switch {
case c == ']':
return state_key, nil
default:
this.keyLocale += string(c)
}
return state_key_locale, nil
}
func (this *parser) parseStateKeyLocalePost(c rune) (state, error) {
switch {
case c == ' ':
return state_key_locale_post, nil
case c == '=':
return state_value_pre, nil
default:
return state_none, fmt.Errorf("Unexpected character after language code %s: %c", this.keyLocale, c)
}
return state_key_locale_post, nil
}
func (this *parser) parseStateValuePre(c rune) (state, error) {
switch c {
case ' ': // skip spaces
return state_value_pre, nil
default:
return state_value, nil
}
}
func (this *parser) endValue() {
if this.keyName == "" {
return
}
i := len(this.sections) - 1
this.sections[i].Values = append(this.sections[i].Values, DesktopValue{Key: this.keyName, Locale: this.keyLocale, Value: this.value})
this.keyName = ""
this.value = ""
this.keyLocale = ""
}
func (this *parser) parseStateValue(c rune) (state, error) {
switch c {
case '\n':
this.endValue()
return state_none, nil
default:
this.value += string(c)
}
return state_value, nil
} | lib/desktop_parser/parser.go | 0.521227 | 0.462412 | parser.go | starcoder |
package sorting
// Description: Given an integer array nums sorted in non-decreasing order, return an array of the squares of each number sorted in non-decreasing order.
// Time Complexity: O(N) where N is the lenth of the slice
// Space Complexity: O(N) if we count the output O(1) otherwise.
func SortedSquares(nums []int) []int {
l := 0
r := len(nums) - 1
sortedSquares := make([]int, len(nums))
for i := len(nums) - 1; i >= 0; i-- {
if abs(nums[r]) > abs(nums[l]) {
sortedSquares[i] = nums[r] * nums[r]
r--
} else {
sortedSquares[i] = nums[l] * nums[l]
l++
}
}
return sortedSquares
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}
//Given an array, rotate the array to the right by k steps, where k is non-negative.
func Rotate(nums []int, k int) []int {
t := make([]int, len(nums))
for i := 0; i < len(nums); i++ {
t[(i+k)%len(nums)] = nums[i]
}
return t
}
// Given an integer array nums, move all 0's to the end of it while maintaining the relative order of the non-zero elements.
// Note that you must do this in-place without making a copy of the array.
func MoveZeroes(nums []int) []int {
l := 0
for i := 0; i < len(nums)-1; i++ {
if nums[i] == 0 && nums[i+1] != 0 {
nums[l] = nums[i+1]
nums[i+1] = 0
l++
} else if nums[i] != 0 {
l++
}
}
return nums
}
// Given a 1-indexed array of integers numbers that is already sorted in non-decreasing order, find two numbers such that they add up to a specific target number. Let these two numbers be numbers[index1] and numbers[index2] where 1 <= index1 < index2 <= numbers.length.
// Return the indices of the two numbers, index1 and index2, added by one as an integer array [index1, index2] of length 2.
// The tests are generated such that there is exactly one solution. You may not use the same element twice.
// Time complexity: O(n). The input array is traversed at most once. Thus the time complexity is O(n).
// Space complexity: O(1). We only use additional space to store two indices and the sum, so the space complexity is O(1).
func TwoSum(nums []int, target int) []int {
l := 0
r := len(nums) - 1
for l < r {
s := nums[l] + nums[r]
if s < target {
l++
} else if s > target {
r--
} else {
return []int{l + 1, r + 1}
}
}
return []int{-1, -1}
} | algorithmsI/sorting/sorting.go | 0.869119 | 0.766687 | sorting.go | starcoder |
package dex
import (
"fmt"
"math/big"
. "github.com/ethereum/go-ethereum/common"
)
const MAX_PRICE = 10000000
//Pricepoint contains pointers to the first and the last order entered at that price
type PricePoint struct {
orderHead *Order
orderTail *Order
}
// The orderbook keeps track of the maximum bid and minimum ask
//The orderIndex is a mapping of OrderIDs to pointers so that we can easily cancel outstanding orders
//prices is an array of all possible pricepoints
//actions is a channels to report some action to a handler as they occur
type OrderBook struct {
ask uint64
bid uint64
orderIndex map[Hash]*Order
prices [MAX_PRICE]*PricePoint
actions chan *Action
logger []*Action
}
// NewOrderbook returns a default orderbook struct
func NewOrderBook(actions chan *Action) *OrderBook {
ob := new(OrderBook)
ob.bid = 0
ob.ask = MAX_PRICE
ob.actions = actions
ob.orderIndex = make(map[Hash]*Order)
for i := range ob.prices {
ob.prices[i] = new(PricePoint)
}
return ob
}
func (ob *OrderBook) String() string {
return fmt.Sprintf("Ask:%v, Bid:%v, orderIndex:%v", ob.ask, ob.bid, ob.orderIndex)
}
func (ob *OrderBook) GetLogs() []*Action {
return ob.logger
}
func (p *PricePoint) Insert(order *Order) {
if p.orderHead == nil {
p.orderHead = order
p.orderTail = order
} else {
p.orderTail.next = order
p.orderTail = order
}
}
func (ob *OrderBook) AddOrder(o *Order) {
if o.OrderType == BUY {
ob.actions <- NewBuyAction(o)
ob.FillBuy(o)
} else {
ob.actions <- NewSellAction(o)
ob.FillSell(o)
}
if o.Amount > 0 {
ob.openOrder(o)
}
}
func (ob *OrderBook) openOrder(o *Order) {
if o.events != nil {
o.events <- o.NewOrderPlacedEvent()
}
pricePoint := ob.prices[o.Price]
pricePoint.Insert(o)
o.status = OPEN
if o.OrderType == BUY && o.Price > ob.bid {
ob.bid = o.Price
} else if o.OrderType == SELL && o.Price < ob.ask {
ob.ask = o.Price
}
ob.orderIndex[o.Hash] = o
}
func (ob *OrderBook) CancelOrder(h Hash) {
if order, ok := ob.orderIndex[h]; ok {
order.Amount = 0
order.status = CANCELLED
ob.actions <- NewCancelAction(h)
}
}
func (ob *OrderBook) CancelTrade(t *Trade) {
if order, ok := ob.orderIndex[t.OrderHash]; ok {
order.Amount = order.Amount + t.Amount.Uint64()
order.status = OPEN
ob.actions <- NewCancelTradeAction()
}
}
func (ob *OrderBook) FillBuy(o *Order) {
for ob.ask <= o.Price && o.Amount > 0 {
pricePoint := ob.prices[ob.ask]
pricePointOrderHead := pricePoint.orderHead
for pricePointOrderHead != nil {
ob.fill(o, pricePointOrderHead)
pricePointOrderHead = pricePointOrderHead.next
pricePoint.orderHead = pricePointOrderHead
}
ob.ask++
}
}
func (ob *OrderBook) FillSell(o *Order) {
for ob.bid >= o.Price && o.Amount > 0 {
pricePoint := ob.prices[ob.bid]
pricePointOrderHead := pricePoint.orderHead
for pricePointOrderHead != nil {
ob.fill(o, pricePointOrderHead)
pricePointOrderHead = pricePointOrderHead.next //only of these two lines is necessary
pricePoint.orderHead = pricePointOrderHead
}
ob.bid--
}
}
func (ob *OrderBook) fill(o, pricePointOrderHead *Order) {
if pricePointOrderHead.Amount >= o.Amount {
ob.fillCompletely(o, pricePointOrderHead)
return
} else {
if pricePointOrderHead.Amount > 0 {
ob.fillPartially(o, pricePointOrderHead)
return
}
}
}
func (ob *OrderBook) fillCompletely(o, pricePointOrderHead *Order) {
ob.actions <- NewFilledAction(o, pricePointOrderHead)
amount := big.NewInt(int64(o.Amount))
trade := NewTrade(pricePointOrderHead, amount, o.Maker)
pricePointOrderHead.Amount -= o.Amount
o.Amount = 0
o.status = FILLED
if o.events != nil {
o.events <- o.NewOrderFilledEvent(trade)
}
return
}
func (ob *OrderBook) fillPartially(o, pricePointOrderHead *Order) {
ob.actions <- NewPartialFilledAction(o, pricePointOrderHead)
o.Amount -= pricePointOrderHead.Amount
o.status = PARTIAL_FILLED
if o.events != nil {
o.events <- o.NewOrderPartiallyFilledEvent()
}
return
}
func (ob *OrderBook) Done() {
ob.actions <- NewDoneAction()
} | dex/orderbook.go | 0.645567 | 0.421254 | orderbook.go | starcoder |
package txqr
import (
"fmt"
"strings"
)
// Decoder represents protocol decode.
type Decoder struct {
buffer []byte
read, total int
frames []frameInfo
cache map[string]struct{}
}
// frameInfo represents the information about read frames.
// As frames can change size dynamically, we keep size info as well.
type frameInfo struct {
offset, size int
}
// NewDecoder creates and inits a new decoder.
func NewDecoder() *Decoder {
return &Decoder{
buffer: []byte{},
cache: make(map[string]struct{}),
}
}
// NewDecoderSize creates and inits a new decoder for the known size.
// Note, it doesn't limit the size of the input, but optimizes memory allocation.
func NewDecoderSize(size int) *Decoder {
return &Decoder{
buffer: make([]byte, size),
}
}
// Decode takes a single chunk of data and decodes it.
// Chunk expected to be validated (see Validate) before.
func (d *Decoder) Decode(chunk string) error {
idx := strings.IndexByte(chunk, '|') // expected to be validated before
if idx == -1 {
return fmt.Errorf("invalid frame: \"%s\"", chunk)
}
header := chunk[:idx]
// continuous QR reading often sends the same chunk in a row, skip it
if d.isCached(header) {
return nil
}
var offset, total int
_, err := fmt.Sscanf(header, "%d/%d", &offset, &total)
if err != nil {
return fmt.Errorf("invalid header: %v (%s)", err, header)
}
// allocate enough memory at first total read
if d.total == 0 {
d.buffer = make([]byte, total)
d.total = total
}
if total > d.total {
return fmt.Errorf("total changed during sequence, aborting")
}
payload := chunk[idx+1:]
size := len(payload)
// TODO(divan): optmize memory allocation
d.frames = append(d.frames, frameInfo{offset: offset, size: size})
copy(d.buffer[offset:offset+size], payload)
d.updateCompleted()
return nil
}
// Validate checks if a given chunk of data is a valid txqr protocol packet.
func (d *Decoder) Validate(chunk string) error {
if chunk == "" || len(chunk) < 4 {
return fmt.Errorf("invalid frame: \"%s\"", chunk)
}
idx := strings.IndexByte(chunk, '|')
if idx == -1 {
return fmt.Errorf("invalid frame: \"%s\"", chunk)
}
return nil
}
// Data returns decoded data.
func (d *Decoder) Data() string {
return string(d.buffer)
}
// DataBytes returns decoded data as a byte slice.
func (d *Decoder) DataBytes() []byte {
return d.buffer
}
// Length returns length of the decoded data.
func (d *Decoder) Length() int {
return len(d.buffer)
}
// Read returns amount of currently read bytes.
func (d *Decoder) Read() int {
return d.read
}
// Total returns total amount of data.
func (d *Decoder) Total() int {
return d.total
}
// IsCompleted reports whether the read was completed successfully or not.
func (d *Decoder) IsCompleted() bool {
return d.total > 0 && d.read >= d.total
}
// isCached takes the header of chunk data and see if it's been cached.
// If not, it caches it.
func (d *Decoder) isCached(header string) bool {
if _, ok := d.cache[header]; ok {
return true
}
// cache it
d.cache[header] = struct{}{}
return false
}
// Reset resets decoder, preparing it for the next run.
func (d *Decoder) Reset() {
d.buffer = []byte{}
d.read, d.total = 0, 0
d.frames = []frameInfo{}
d.cache = map[string]struct{}{}
}
// TODO(divan): this will now work if frame size is dynamic. Rewrite it
// to support it.
func (d *Decoder) updateCompleted() {
var cur int
for _, frame := range d.frames {
cur += frame.size
}
d.read = cur
} | decode.go | 0.637031 | 0.500671 | decode.go | starcoder |
package main
import (
"errors"
"fmt"
"os"
"regexp"
"strings"
"github.com/kirsle/goadvent2016/advent"
)
const (
ScreenWidth = 50
ScreenHeight = 6
)
// Type Screen represents our 50x6 LCD screen. The pixels are represented as an
// array of rows (Y coord) with each row having the X coord.
type Screen struct {
Pixels [ScreenHeight][ScreenWidth]bool
}
// Regexps for the screen instructions.
var (
RectangleRegexp *regexp.Regexp = regexp.MustCompile(`^rect (\d+)x(\d+)$`)
ColumnRegexp *regexp.Regexp = regexp.MustCompile(`^rotate column x=(\d+) by (\d+)$`)
RowRegexp *regexp.Regexp = regexp.MustCompile(`^rotate row y=(\d+) by (\d+)$`)
)
func main() {
if len(os.Args) < 2 {
fmt.Println("Usage: main.go <input file>")
os.Exit(1)
}
input, err := advent.ReadFile(os.Args[1])
if err != nil {
panic(err)
}
// Create our screen.
screen := NewScreen()
// Process the instructions.
for _, instruction := range input {
err = screen.ProcessInstruction(instruction)
if err != nil {
fmt.Printf("ERROR: %s\n", err)
continue
}
// When debugging, print the screen after every update.
if os.Getenv("DEBUG") != "" {
screen.Print()
}
}
// Print the final screen.
fmt.Println("Final screen:")
screen.Print()
// Count the lit pixels.
fmt.Printf("Number of pixels lit: %d\n", screen.LitCount())
}
// NewScreen creates a new LCD screen with all the pixels turned off.
func NewScreen() *Screen {
return &Screen{
Pixels: [ScreenHeight][ScreenWidth]bool{},
}
}
// Light turns on a pixel at a given coordinate.
func (s *Screen) Light(x, y int) error {
err := s.BoundsCheck(x, y)
if err == nil {
s.Pixels[y][x] = true
}
return err
}
// Dark turns off a pixel at a given coordinate.
func (s *Screen) Dark(x, y int) error {
err := s.BoundsCheck(x, y)
if err == nil {
s.Pixels[y][x] = false
}
return err
}
// IsLit tells whether a pixel at a given coordinate is lit.
func (s *Screen) IsLit(x, y int) (bool, error) {
if err := s.BoundsCheck(x, y); err != nil {
return false, err
}
return s.Pixels[y][x], nil
}
// LitCount counts the number of lit pixels.
func (s *Screen) LitCount() int {
var count int
for _, y := range s.Pixels {
for _, x := range y {
if x {
count++
}
}
}
return count
}
// BoundsCheck checks whether an X and Y coordinate is valid.
func (s *Screen) BoundsCheck(x, y int) error {
if y < 0 || y > ScreenHeight {
return errors.New("Can't darken pixel: Y coordinate is out of bounds")
} else if x < 0 || x > ScreenWidth {
return errors.New("Can't darken pixel: X coordinate is out of bounds")
} else {
return nil
}
}
// Print shows what the screen looks like in ASCII art.
func (s *Screen) Print() {
for _, row := range s.Pixels {
for _, col := range row {
if col {
fmt.Print("#")
} else {
fmt.Print(".")
}
}
fmt.Print("\n")
}
fmt.Print("\n")
}
// ProcessInstruction parses and executes a pixel manipulation function.
func (s *Screen) ProcessInstruction(input string) error {
advent.Debug("INSTRUCTION: %s\n", input)
// Check the type of instruction we're dealing with.
if strings.HasPrefix(input, "rect") {
return s.ProcessRect(input)
} else if strings.HasPrefix(input, "rotate column") {
return s.ProcessColumn(input)
} else if strings.HasPrefix(input, "rotate row") {
return s.ProcessRow(input)
}
return fmt.Errorf("Invalid instruction: %s", input)
}
// ProcessRect handles a rectangle drawing instruction.
func (s *Screen) ProcessRect(input string) error {
match := RectangleRegexp.FindStringSubmatch(input)
if len(match) == 0 {
return fmt.Errorf("Failed regexp for rectangle: %s", input)
}
// Turn the regexp matches into ints.
values, err := advent.StringsToInts(match[1:])
if err != nil {
return err
}
width, height := values[0], values[1]
// Fill in the pixels.
for x := 0; x < width; x++ {
for y := 0; y < height; y++ {
err := s.Light(x, y)
if err != nil {
return err
}
}
}
return nil
}
// ProcessColumn handles a column shift instruction.
func (s *Screen) ProcessColumn(input string) error {
match := ColumnRegexp.FindStringSubmatch(input)
if len(match) == 0 {
return fmt.Errorf("Failed regexp for column: %s", input)
}
values, err := advent.StringsToInts(match[1:])
if err != nil {
return err
}
column, length := values[0], uint(values[1])
// Get all the pixels of this column into a convenient array.
var pixels []bool
for y := 0; y < ScreenHeight; y++ {
var lit bool
lit, err = s.IsLit(column, y)
if err != nil {
return err
}
pixels = append(pixels, lit)
}
// Shift the pixels.
pixels = ShiftRight(pixels, length)
// Update their lit values.
for y, lit := range pixels {
if lit {
err = s.Light(column, y)
} else {
err = s.Dark(column, y)
}
if err != nil {
return err
}
}
return nil
}
// ProcessRow handles a row shift instruction.
func (s *Screen) ProcessRow(input string) error {
match := RowRegexp.FindStringSubmatch(input)
if len(match) == 0 {
return fmt.Errorf("Failed regexp row row: %s", input)
}
values, err := advent.StringsToInts(match[1:])
if err != nil {
return err
}
row, length := values[0], uint(values[1])
// Get all the pixels of this row into a convenient array.
var pixels []bool
for x := 0; x < ScreenWidth; x++ {
var lit bool
lit, err = s.IsLit(x, row)
if err != nil {
return err
}
pixels = append(pixels, lit)
}
// Shift the pixels.
pixels = ShiftRight(pixels, length)
// Update their lit pixels.
for x, lit := range pixels {
if lit {
err = s.Light(x, row)
} else {
err = s.Dark(x, row)
}
if err != nil {
return err
}
}
return nil
}
// ShiftRight shifts an array of booleans forward with wrap-around.
func ShiftRight(array []bool, steps uint) []bool {
var last bool
var i uint
for i = 0; i < steps; i++ {
last = array[len(array)-1]
for j := len(array) - 1; j > 0; j-- {
array[j] = array[j-1]
}
array[0] = last
}
return array
} | day08/main.go | 0.655226 | 0.458288 | main.go | starcoder |
package bst
import "fmt"
type KeyType int64
const (
maxKey = KeyType(0x7fffffffffffffff)
minKey = -maxKey - 1
)
type Node struct {
Key KeyType
Left, Right *Node
}
type Tree struct {
Root *Node
}
func makeNode(k KeyType) *Node {
return &Node{Key: k, Left: nil, Right: nil}
}
func New() *Tree {
return &Tree{Root: nil}
}
func (t *Tree) Insert(k KeyType) {
var p *Node = nil
x := t.Root
for x != nil {
p = x
if k <= x.Key {
x = x.Left
} else {
x = x.Right
}
}
z := makeNode(k)
if p == nil {
t.Root = z
} else if k < p.Key {
p.Left = z
} else {
p.Right = z
}
}
func (t *Tree) Delete(k KeyType) {
// Find node with the given key
var p *Node = nil
x := t.Root
for x != nil && x.Key != k {
p = x
if k < x.Key {
x = x.Left
} else {
x = x.Right
}
}
switch {
// Key not found.
case x == nil: /* nothing */;
// Node to delete has no left child.
case x.Left == nil:
t.transplant(p, x, x.Right)
// Node to delete has left, but no right child.
case x.Right == nil:
t.transplant(p, x, x.Left)
default:
// Node to delete has two children.
z := detachMin(x, x.Right)
z.Left = x.Left
z.Right = x.Right
t.transplant(p, x, z)
}
}
func (t *Tree) transplant(p, u, v *Node) {
if p == nil {
t.Root = v
} else if u == p.Left {
p.Left = v
} else { // u == p.Right
p.Right = v
}
}
func detachMin(p, x *Node) *Node {
for x.Left != nil {
p = x
x = x.Left
}
if x == p.Right {
p.Right = x.Right
} else {
p.Left = x.Right
}
return x
}
func (t *Tree) WriteDot(name string) string {
var sn, se string
_, sn = writeDotNodes(1, t.Root)
_, se = writeDotEdges(1, t.Root)
return "digraph " + name + "{\n" + sn + se + "}\n"
}
func writeDotNodes(n uint, nd *Node) (uint, string) {
if nd == nil {
return n + 1, fmt.Sprintf("n%d[shape=point]\n", n)
}
var s, sl, sr string
s = fmt.Sprintf("n%d[label=\"%d\"]\n", n, int64(nd.Key))
n, sl = writeDotNodes(n + 1, nd.Left)
n, sr = writeDotNodes(n, nd.Right)
return n, s + sl + sr
}
func writeDotEdges(n uint, nd *Node) (uint, string) {
if nd == nil {
return n + 1, ""
}
nl, sl := writeDotEdges(n + 1, nd.Left)
nr, sr := writeDotEdges(nl, nd.Right)
s := fmt.Sprintf("n%d -> n%d[arrowhead=none]\n", n, n + 1)
s += fmt.Sprintf("n%d -> n%d[arrowhead=none]\n", n, nl)
return nr, s + sl + sr
}
func (t *Tree) verifyBSTProperty() bool {
return verifyBSTProperty(t.Root, minKey, maxKey)
}
func verifyBSTProperty(p *Node, low, high KeyType) bool {
if p == nil {
return true
}
if p.Key < low || p.Key > high {
return false
}
return verifyBSTProperty(p.Left, low, p.Key) && verifyBSTProperty(p.Right, p.Key + 1, high)
} | Lecture 10- Red-black trees/src/bst/bst.go | 0.690142 | 0.451568 | bst.go | starcoder |
package types
import (
"stashware/oo"
)
func (this *Block) TbBlock() (ret *TbBlock) {
ret = &TbBlock{
IndepHash: this.IndepHash,
Hash: this.Hash,
Height: this.Height,
PreviousBlock: this.PreviousBlock,
Nonce: this.Nonce,
Timestamp: this.Timestamp,
LastRetarget: this.LastRetarget,
Diff: this.Diff,
CumulativeDiff: this.CumulativeDiff,
RewardAddr: this.RewardAddr,
RewardFee: this.RewardFee,
RewardPool: this.RewardPool,
WeaveSize: this.WeaveSize,
BlockSize: this.BlockSize,
Txs: string(oo.JsonData(this.Txs)),
TxRoot: this.TxRoot,
WalletList: string(oo.JsonData(this.WalletList)),
WalletListHash: this.WalletListHash,
Network: NETWORK_MAINNET,
}
return
}
func (this *Transaction) TbTransaction() (ret *TbTransaction) {
ret = &TbTransaction{
ID: this.ID,
// BlockIndepHash: this.BlockIndepHash,
LastTx: this.LastTx,
Owner: this.Owner,
FromAddress: this.From,
Target: this.Target,
Quantity: this.Quantity,
Reward: this.Reward,
Tags: string(oo.JsonData(this.Tags)),
// this.Data,
DataHash: this.DataHash,
Signature: this.Signature,
}
return
}
func (this *TbBlock) Block() (ret *Block) {
ret = &Block{
IndepHash: this.IndepHash,
Hash: this.Hash,
Height: this.Height,
PreviousBlock: this.PreviousBlock,
Nonce: this.Nonce,
Timestamp: this.Timestamp,
LastRetarget: this.LastRetarget,
Diff: this.Diff,
CumulativeDiff: this.CumulativeDiff,
RewardAddr: this.RewardAddr,
RewardFee: this.RewardFee,
RewardPool: this.RewardPool,
WeaveSize: this.WeaveSize,
BlockSize: this.BlockSize,
TxRoot: this.TxRoot,
WalletListHash: this.WalletListHash,
}
if len(this.Txs) > 0 {
_ = oo.JsonUnmarshal([]byte(this.Txs), &ret.Txs)
}
if len(this.WalletList) > 0 {
_ = oo.JsonUnmarshal([]byte(this.WalletList), &ret.WalletList)
}
return
}
func (this *TbTransaction) Transaction() (ret *Transaction) {
ret = &Transaction{
ID: this.ID,
LastTx: this.LastTx,
Owner: this.Owner,
From: this.FromAddress,
Target: this.Target,
Quantity: this.Quantity,
Reward: this.Reward,
Signature: this.Signature,
DataHash: this.DataHash,
}
if len(this.Tags) > 0 {
_ = oo.JsonUnmarshal([]byte(this.Tags), &ret.Tags)
}
return
}
func (this *FullTx) Transaction() (ret *Transaction) {
ret = this.TbTransaction.Transaction()
if len(this.Data) > 0 {
ret.Data = oo.HexEncodeToString(this.Data)
}
return
} | types/transform.go | 0.541894 | 0.438304 | transform.go | starcoder |
package axcelerate
import jsontime "github.com/liamylian/jsontime/v2/v2"
/*
GetCoursesInstanceSearch Advanced Course Instance Search - Returns instances.
Request Parameters
ID
The Activity Type ID.
InstanceID
The Instance ID.
type
The type of the activity. w = workshop, p = accredited program, el = e-learning, all = workshops, accredited programs and e-learning.
trainingCategory
The Training Category to Search - Uses Like operator %name%
location
The course location to search- Uses Like operator %name%. Only works with type 'w' instances.
state
The course State to search - works with type 'w' & 'p' instances.
code
The course code to search - Uses Like operator: code%
name
The course name to search - Uses Like operator %name%
searchTerm
For a general search use this param
enrolmentOpen
Return Course Instances that are open for enrolment.
startDate_min
The course start date must be greater than this date. Null values will also be returned for el and p types. Ignored if instanceID is passed.
startDate_max
The course start date must be less than this date. Null values will also be returned for el and p types. Ignored if instanceID is passed.
finishDate_min
The course finish date must be greater than this date. Null values will also be returned for el and p types.
finishDate_max
The course finish date must be less than this date. Null values will also be returned for el and p types.
lastUpdated_min
In 'YYYY-MM-DD hh:mm' format with time optional. The course instance last updated date must be greater than or equal to this datetime. NOTE: lastUpdated_min & max must be used together (unless ID is passed) and can be up to 90 days apart. These fields are mutually exclusive with start and finish date min/max searches and are both ignored if instanceID is passed.
lastUpdated_max
In 'YYYY-MM-DD hh:mm' format with time optional. The course instance last updated date must be less than or equal to this datetime.
trainerContactID
The ContactID of the Trainer/Consultant the instance is assigned to.
domainID
The DomainID the instance belongs to (the domainID of the user).
deliveryLocationID
For type = p only. The unique ID of an accredited delivery location, reported to NCVER. Refers to locations listed under the course/deliveryLocations endpoint.
orgID
The organisation ID of the Client Contact of the course.
orgIDTree
The Client Contact of the course is either this Organisation ID or a child organisation of this Organisation ID.
offset
Used for paging - start at record.
displayLength
Used for paging - total records to retrieve.
sortColumn
The column index to sort by.
sortDirection
The sort by direction 'ASC' OR 'DESC'.
public
Whether to include public courses. If false, returns only In-House course instances.
isActive
You can chose to include or exclude Deleted / Archived and Inactive courses.
purgeCache
Currently the API will cache the query for 30 seconds - Setting this flag to true gets the latest data.
groupedCourseName
If the Grouped Workshop Data flag is on in your account, you can search by the Grouped Course Name (Type W Only)
groupedCourseID
If the Grouped Workshop Data flag is on in your account, you can search by the Grouped Course ID (Type W Only)
*/
func (s *CoursesService) GetCoursesInstanceSearch(parms map[string]string) ([]Instance, *Response, error) {
var obj []Instance
if len(parms) == 0 {
parms = map[string]string{}
}
resp, err := do(s.client, "POST", Params{parms: parms, u: "/course/instance/search"}, obj)
if err != nil {
return obj, resp, err
}
var json = jsontime.ConfigWithCustomTimeFormat
jsontime.AddTimeFormatAlias("axc_datetime", "2006-01-02 15:04:05")
json.Unmarshal([]byte(resp.Body), &obj)
return obj, resp, err
}
// 2020-11-30 13:00:00 | courseInstanceSearch.go | 0.695441 | 0.445107 | courseInstanceSearch.go | starcoder |
package api
type Address = string
// Range represents a set of one or more contiguous cells such as a cell, a row, a column, or a block of cells.
// https://docs.microsoft.com/en-us/javascript/api/excel/excel.range?view=excel-js-preview\
// Currently, only rectangular, single-set ranges are supported.
type Range struct {
address Address
columnCount uint
columnIndex uint
rowCount uint
rowIndex uint
// Represents the raw values of the specified range. The data returned could be a string,
// number, or boolean. Cells that contain an error will return the error string. If the
// returned value starts with a plus ("+"), minus ("-"), or equal sign ("="), Excel interprets
// this value as a formula.
// https://docs.microsoft.com/en-us/javascript/api/excel/excel.range?view=excel-js-preview#values
values [][]interface{}
valueTypes [][]ValueType
worksheet *Worksheet
}
func ByWorksheetAndIndexes(worksheet *Worksheet, startRow, startColumn, rowCount, columnCount uint) Range {
rows := worksheet.Impl().Rows[startRow : startRow+rowCount]
values := make([][]interface{}, rowCount)
valueTypes := make([][]ValueType, rowCount)
for iR, row := range rows {
values[iR] = make([]interface{}, columnCount)
valueTypes[iR] = make([]ValueType, columnCount)
if row.Cells != nil {
if uint(len(row.Cells)) > startColumn {
_cC := columnCount
if startColumn+columnCount >= uint(len(row.Cells)) {
_cC = uint(len(row.Cells)) - startColumn
}
_r := row.Cells[startColumn:_cC]
for iC, c := range _r {
values[iR][iC] = ValueFromImpl(c)
valueTypes[iR][iC] = TypeFromImpl(c)
}
}
}
}
return Range{
"",
columnCount,
startColumn,
rowCount,
startRow,
values,
valueTypes,
worksheet,
}
}
func (r *Range) GetAddress() Address { return r.address }
func (r *Range) GetColumnCount() uint { return r.columnCount }
func (r *Range) GetColumnIndex() uint { return r.columnIndex }
func (r *Range) GetRowIndex() uint { return r.rowIndex }
func (r *Range) GetValues() [][]interface{} { return r.values }
func (r *Range) GetValueTypes() [][]ValueType { return r.valueTypes }
//func (r *Range) SetValues(v [][]interface{}) { r.values = v } | pkg/api/range.go | 0.841663 | 0.67112 | range.go | starcoder |
package myqlib
import (
`bytes`
`fmt`
`sort`
`strconv`
`strings`
`time`
)
// Time Columns
var (
Timestamp_col Col = NewFuncCol(`time`, `Time data was printed`, 8,
func(state *MyqState, c Col) chan string {
ch := make(chan string, 1)
defer close(ch)
ch <- fit_string(time.Now().Format(`15:04:05`), c.Width())
return ch
})
Runtime_col Col = NewFuncCol(`time`, `Interval since data started`, 8,
func(state *MyqState, c Col) chan string {
ch := make(chan string, 1)
defer close(ch)
runtime := time.Duration(state.Cur.getI(`uptime`)-state.FirstUptime) * time.Second
ch <- fit_string(fmt.Sprintf("%.0fs", runtime.Seconds()), c.Width())
return ch
})
)
func DefaultViews() map[string]View {
return map[string]View{
`cttf`: NewNormalView(`Connections, Threads, Tables, and Files`,
NewGroupCol(`Connects`, `Collection related metrics`,
NewRateCol(`cons`, `Connections per second`, 4, `connections`, 0, NumberUnits),
NewRateCol(`acns`, `Aborted connections per second`, 4, `aborted_connects`, 0, NumberUnits),
NewRateCol(`acls`, `Aborted Clients (those with existing connections) per second`, 4, `aborted_clients`, 0, NumberUnits),
),
NewGroupCol(`Threads`, `Thread related metrics`,
NewGaugeCol(`conn`, `Threads Connected`, 4, `threads_connected`, 0, NumberUnits),
NewGaugeCol(`run`, `Threads running`, 4, `threads_running`, 0, NumberUnits),
NewGaugeCol(`cach`, `Threads Cached`, 4, `threads_cached`, 0, NumberUnits),
NewRateCol(`crtd`, `Threads Created per second`, 4, `threads_created`, 0, NumberUnits),
NewRateCol(`slow`, `Threads that were slow to launch per second`, 4, `slow_launch_threads`, 0, NumberUnits),
),
NewGroupCol(`Pool`, `Thread Pool metrics`,
NewGaugeCol(`tot`, `Threadpool Threads`, 4, `threadpool_threads`, 0, NumberUnits),
NewGaugeCol(`idle`, `Threadpool Idle Threads`, 4, `threadpool_idle_threads`, 0, NumberUnits),
),
NewGroupCol(`Tables`, `Table metrics`,
NewGaugeCol(`open`, `Open Tables`, 4, `open_tables`, 0, NumberUnits),
NewRateCol(`opns`, `Opened Tables per Second`, 4, `opened_tables`, 0, NumberUnits),
NewRateCol(`immd`, `Immediate Table locks`, 4, `table_locks_immediate`, 0, NumberUnits),
NewRateCol(`wait`, `Table locks Waited`, 4, `table_locks_waited`, 0, NumberUnits),
),
NewGroupCol(`Defs`, `Table Definition Metrics`,
NewGaugeCol(`open`, `Open Table Definitions`, 4, `open_table_definitions`, 0, NumberUnits),
NewRateCol(`opns`, `Opened Table Definitions per Second`, 4, `opened_table_definitions`, 0, NumberUnits),
),
NewGroupCol(`Files`, `File Metrics`,
NewGaugeCol(`open`, `Open Files`, 4, `open_files`, 0, NumberUnits),
NewRateCol(`opns`, `Opened Files per Second`, 4, `opened_files`, 0, NumberUnits),
),
),
`coms`: NewNormalView(`MySQL Commands`,
NewRateCol(`sel`, `Selects per second`, 5, `com_select`, 0, NumberUnits),
NewRateSumCol(`dml`, `Inserts, Updates + Deletes (and other various DML) / Second`, 5, 0, NumberUnits, `com_insert.*`, `com_update.*`, `com_delete.*`, `com_load`, `com_replace.*`, `com_truncate`),
NewRateSumCol(`ddl`, `Data Definition commands / Second`, 5, 0, NumberUnits, `com_alter.*`, `com_create.*`, `com_drop.*`, `com_rename_table`),
NewRateSumCol(`admin`, `Admin commands / Second`, 5, 0, NumberUnits, `com_admin.*`),
NewRateSumCol(`show`, `SHOW commands / Second`, 5, 0, NumberUnits, `com_show.*`),
NewRateSumCol(`set`, `SET commands / Second`, 5, 0, NumberUnits, `com_set.*`),
NewRateSumCol(`lock`, `LOCK commands / Second`, 5, 0, NumberUnits, `com_lock.*`, `com_unlock.*`),
NewRateSumCol(`trx`, `Transactional commands / Second`, 5, 0, NumberUnits, `com_begin`, `com_commit`, `com_rollback.*`, `com_savepoint`),
NewRateSumCol(`xa`, `XA commands / Second`, 5, 0, NumberUnits, `com_xa.*`),
NewRateSumCol(`prep`, `Prepared Statement commands / Second`, 5, 0, NumberUnits, `Com_stmt.*`, `Com_.*_sql`),
),
`throughput`: NewNormalView(`MySQL Server Throughput`,
NewGroupCol(`Throughput`, `Bytes in/out of the server`,
NewDiffCol(`recv`, `Data received since last sample`, 6, `bytes_received`, 0, MemoryUnits),
NewRateCol(`recv/s`, `Bytes received / sec`, 6, `bytes_received`, 0, MemoryUnits),
NewDiffCol(`sent`, `Data sent since last sample`, 6, `bytes_sent`, 0, MemoryUnits),
NewRateCol(`sent/s`, `Bytes sent / sec`, 6, `bytes_sent`, 0, MemoryUnits),
),
),
`query`: NewNormalView(`Query types and sorts`,
NewRateCol(`slow`, `Slow queries per second`, 4, `slow_queries`, 0, NumberUnits),
NewGroupCol(`Selects`, `Select Types`,
NewRateCol(`fjn`, `Full Joins / sec`, 5, `select_full_join`, 0, NumberUnits),
NewRateCol(`frj`, `Full Range Joins / sec`, 5, `select_full_range_join`, 0, NumberUnits),
NewRateCol(`rang`, `Range / sec`, 5, `select_range`, 0, NumberUnits),
NewRateCol(`rchk`, `Range Check / sec`, 5, `select_range_check`, 0, NumberUnits),
NewRateCol(`scan`, `Scan / sec`, 5, `select_scan`, 0, NumberUnits),
),
NewGroupCol(`Sorts`, `Sort Types`,
NewRateCol(`pass`, `Merge Passes / sec`, 5, `sort_merge_passes`, 0, NumberUnits),
NewRateCol(`rang`, `Range / sec`, 5, `sort_range`, 0, NumberUnits),
NewRateCol(`rows`, `Rows / sec`, 5, `sort_rows`, 0, NumberUnits),
NewRateCol(`scan`, `Scan / sec`, 5, `sort_scan`, 0, NumberUnits),
),
),
`temp`: NewNormalView(`Internal Temporary Tables`,
NewRateCol(`tmps`, `Temporary Tables / second`, 5, `created_tmp_tables`, 0, NumberUnits),
NewRateCol(`disk`, `On Disk Temp Tables / second`, 5, `created_tmp_disk_tables`, 0, NumberUnits),
NewRateCol(`files`, `Temp Files / second`, 5, `created_tmp_files`, 0, NumberUnits),
),
`handler`: NewNormalView(`Storage Engine Handler metrics`,
NewGroupCol(`Reads`, `Handler read stats`,
NewRateCol(`rfst`, `Read First / s`, 5, `handler_read_first`, 0, NumberUnits),
NewRateCol(`rkey`, `Read Key / s`, 5, `handler_read_key`, 0, NumberUnits),
NewRateCol(`rnex`, `Read Next / s`, 5, `handler_read_next`, 0, NumberUnits),
NewRateCol(`rprv`, `Read Prev / s`, 5, `handler_read_prev`, 0, NumberUnits),
NewRateCol(`rrd`, `Random reads / s`, 5, `handler_read_rnd`, 0, NumberUnits),
NewRateCol(`rrdn`, `Read First / s`, 5, `handler_read_rnd_next`, 0, NumberUnits),
),
NewGroupCol(`Other`, `Other handler stats`,
NewRateCol(`ins`, `Inserts / s`, 5, `handler_write`, 0, NumberUnits),
NewRateCol(`upd`, `Updates / s`, 5, `handler_update`, 0, NumberUnits),
NewRateCol(`del`, `Deletes / s`, 5, `handler_delete`, 0, NumberUnits),
NewRateCol(`cmt`, `Commits / s`, 5, `handler_commit`, 0, NumberUnits),
NewRateCol(`rbk`, `Rollbacks / s`, 5, `handler_rollback`, 0, NumberUnits),
NewRateCol(`disc`, `Discovers / s`, 5, `handler_discover`, 0, NumberUnits),
),
),
`innodb`: NewNormalView(`Innodb metrics`,
NewGroupCol(`Row Ops`, `Row-level operations`,
NewRateCol(`read`, `Reads / s`, 5, `innodb_rows_read`, 0, NumberUnits),
NewRateSumCol(`dml`, `Inserts, Updates + Deletes / Second`, 5, 0, NumberUnits, `innodb_rows_inserted`, `innodb_rows_updated`, `innodb_rows_deleted`),
),
NewGroupCol(`Buffer Pool`, `Buffer Pool Stats`,
NewGaugeCol(`data`, `Data Buffered`, 5, `innodb_buffer_pool_bytes_data`, 0, MemoryUnits),
NewPercentCol(`dirt`, `Buffer pool %dirty`, 4, `innodb_buffer_pool_pages_dirty`, `innodb_buffer_pool_pages_total`, 0),
NewRateCol(`rreq`, `Read Requests (Logical) / s`, 5, `innodb_buffer_pool_read_requests`, 0, NumberUnits),
NewRateCol(`read`, `Reads (Physical) / s`, 4, `innodb_buffer_pool_reads`, 0, NumberUnits),
NewRateCol(`wreq`, `Write Requests / s`, 5, `innodb_buffer_pool_write_requests`, 0, NumberUnits),
NewRateCol(`write`, `Writes (Physical) / s`, 4, `innodb_buffer_pool_pages_flushed`, 0, NumberUnits),
),
NewGroupCol(`Log`, `Log Information`,
NewGaugeCol(`Chkpt`, `Checkpoint age`, 5, `innodb_checkpoint_age`, 0, MemoryUnits),
NewPercentCol(`%`, `% of Checkpoint age target`, 4, `innodb_checkpoint_age`, `innodb_checkpoint_max_age`, 0),
NewRateCol(`lsn`, `Log growth (LSN)`, 5, `innodb_lsn_current`, 0, MemoryUnits),
),
NewGroupCol(`Data`, `Data Operations`,
NewRateCol(`read`, `Bytes Read / s`, 5, `innodb_data_read`, 0, MemoryUnits),
NewRateCol(`writes`, `Bytes Written / s`, 5, `innodb_data_written`, 0, MemoryUnits),
),
NewGaugeCol(`Hist`, `History List Length`, 5, `innodb_history_list_length`, 0, NumberUnits),
),
`innodb_buffer_pool`: NewNormalView(`Innodb Buffer Pool stats`,
NewGroupCol(`Buffer Pool Pages`, `Innodb Buffer Pool Pages stats`,
NewGaugeCol(`data`, `BP data pages`, 4, `innodb_buffer_pool_pages_data`, 0, NumberUnits),
NewGaugeCol(`old`, `BP old pages`, 4, `innodb_buffer_pool_pages_old`, 0, NumberUnits),
NewGaugeCol(`dirty`, `BP dirty pages`, 4, `innodb_buffer_pool_pages_dirty`, 0, NumberUnits),
NewGaugeCol(`free`, `BP free pages`, 4, `innodb_buffer_pool_pages_free`, 0, NumberUnits),
NewGaugeCol(`latched`, `BP latched pages`, 4, `innodb_buffer_pool_pages_latched`, 0, NumberUnits),
NewGaugeCol(`misc`, `BP misc pages`, 4, `innodb_buffer_pool_pages_misc`, 0, NumberUnits),
NewGaugeCol(`total`, `BP total pages`, 4, `innodb_buffer_pool_pages_total`, 0, NumberUnits),
),
NewGroupCol(`Read Ahead`, `Read ahead stats`,
NewRateCol(`Reads`, `Read-ahead operations`, 4, `innodb_buffer_pool_read_ahead`, 0, NumberUnits),
NewRateCol(`Evicted`, `Read-ahead evictions`, 4, `innodb_buffer_pool_read_ahead_evicted`, 0, NumberUnits),
),
NewGroupCol(`Reads`, `Read stats`,
NewRateCol(`reqs`, `Read requests`, 4, `innodb_buffer_pool_read_requests`, 0, NumberUnits),
NewRateCol(`phys`, `Physical Reads`, 4, `innodb_buffer_pool_reads`, 0, NumberUnits),
),
NewRateCol(`wait`, `Page waits`, 4, `innodb_buffer_pool_wait_free`, 0, NumberUnits),
NewGroupCol(`Writes`, `Write stats`,
NewRateCol(`reqs`, `Write requests`, 4, `innodb_buffer_pool_write_requests`, 0, NumberUnits),
NewRateCol(`phys`, `Physical Writes`, 4, `innodb_buffer_pool_pages_flushed`, 0, NumberUnits),
NewRateCol(`lruf`, `LRU flushed`, 4, `innodb_buffer_pool_pages_lru_flushed`, 0, NumberUnits),
),
NewGroupCol(`Midpoint`, `Midpoint Insertion stats`,
NewRateCol(`old`, `Old pages inserted`, 4, `innodb_buffer_pool_pages_made_not_young`, 0, NumberUnits),
NewRateCol(`new`, `New pages inserted`, 4, `innodb_buffer_pool_pages_made_young`, 0, NumberUnits),
),
),
`innodb_flush`: NewNormalView(`Innodb flushing metrics`,
NewGroupCol(`Pages`, `Checkpoint info`,
NewPercentCol(`dirt`, `Buffer pool %dirty`, 4, `innodb_buffer_pool_pages_dirty`, `innodb_buffer_pool_pages_total`, 0),
NewRateCol(`flush`, `All pages flushed`, 5, `innodb_buffer_pool_pages_flushed`, 0, NumberUnits),
NewRateCol(`lruf`, `LRU flushes`, 5, `innodb_buffer_pool_pages_lru_flushed`, 0, NumberUnits),
),
NewGroupCol(`Checkpoint`, `Checkpoint info`,
NewGaugeCol(`age`, `Checkpoint Age`, 5, `innodb_checkpoint_age`, 0, MemoryUnits),
NewPercentCol(`max`, `Percent of checkpoint age out of max`, 5, `innodb_checkpoint_age`, `innodb_checkpoint_max_age`, 0),
),
NewGroupCol(`Data`, `Data stats`,
NewRateCol(`pages`, `Pages written`, 5, `innodb_pages_written`, 0, NumberUnits),
NewRateCol(`wops`, `Write operations`, 5, `innodb_data_writes`, 0, NumberUnits),
NewRateCol(`bytes`, `Write data`, 5, `innodb_data_written`, 0, MemoryUnits),
),
NewGroupCol(`Log`, `Log Sequence Number stats`,
NewRateCol(`lsn`, `Log growth (LSN)`, 5, `innodb_lsn_current`, 0, MemoryUnits),
NewRateCol(`chkpt`, `Log checkpoints`, 5, `innodb_lsn_last_checkpoint`, 0, MemoryUnits),
),
),
`wsrep`: NewExtraHeaderView(`Galera Wsrep statistics`,
func(state *MyqState) chan string {
ch := make(chan string, 1)
defer close(ch)
ch <- fmt.Sprintf("%s / %s (idx: %d) / %s %s",
state.Cur.getStr(`V_wsrep_cluster_name`),
state.Cur.getStr(`V_wsrep_node_name`),
state.Cur.getI(`wsrep_local_index`),
state.Cur.getStr(`wsrep_provider_name`),
state.Cur.getStr(`wsrep_provider_version`))
return ch
},
NewGroupCol(`Cluster`, `Cluster-wide stats (at least according to this node)`,
NewStringCol(`P`, `Primary (P) or Non-primary (N)`, 1, `wsrep_cluster_status`),
NewRightmostCol(`cnf`, `Cluster configuration id (increments every time a node joins/leaves the cluster)`, 3, `wsrep_cluster_conf_id`),
NewGaugeCol(`#`, `Cluster size`, 2, `wsrep_cluster_size`, 0, NumberUnits),
),
NewGroupCol(`Node`, `Node's specific state`,
// NewStringCol(`state`, `State of this node`, 4, `wsrep_local_state_comment`),
NewFuncCol(`state`, `State of this node`, 4, func(state *MyqState, c Col) chan string {
ch := make( chan string, 1)
defer close(ch)
st := state.Cur.getStr(`wsrep_local_state_comment`)
if strings.HasPrefix(st, `Join`) {
switch st {
case `Joining`:
ch <- `Jing`
case `Joining: preparing for State Transfer`:
ch <- `J:Pr`
case `Joining: requested State Transfer`:
ch <- `J:Rq`
case `Joining: receiving State Transfer`:
ch <- `J:Rc`
case `Joining: State Transfer request failed`:
ch <- `J:RF`
case `Joining: State Transfer failed`:
ch <- `J:F`
case `Joined`:
ch <- `Jned`
default:
ch <- st[0:4]
}
} else {
ch <- st[0:4]
}
return ch
}),
),
NewFuncCol(`laten`, `Average replication latency`, 5, func(state *MyqState, c Col) chan string {
ch := make(chan string, 1)
defer close(ch)
vals := strings.Split(state.Cur.getStr(`wsrep_evs_repl_latency`), `/`)
// Expecting 5 vals here, filler if not
if len(vals) != 5 {
ch <- column_filler(c)
} else {
if avg, err := strconv.ParseFloat(vals[1], 64); err == nil {
cv := collapse_number(avg, c.Width(), 2, SecondUnits)
ch <- fmt.Sprintf(fmt.Sprint(`%`, c.Width(), `s`), cv)
} else {
ch <- column_filler(c)
}
}
return ch
}),
NewGroupCol(`Outbound`, `Sent replication events`,
NewRateCol(`msgs`, `Replicated messages (usually transactions) per second`, 4, `wsrep_replicated`, 0, NumberUnits),
NewRateCol(`data`, `Replicated bytes per second`, 4, `wsrep_replicated_bytes`, 0, MemoryUnits),
NewGaugeCol(`queue`, `Outbound replication queue`, 3, `wsrep_local_send_queue`, 0, NumberUnits),
),
NewGroupCol(`Inbound`, `Received replication events`,
NewRateCol(`msgs`, `Received messages (usually transactions) per second`, 4, `wsrep_received`, 0, NumberUnits),
NewRateCol(`data`, `Received bytes per second`, 4, `wsrep_received_bytes`, 0, MemoryUnits),
NewGaugeCol(`queue`, `Received replication apply queue`, 3, `wsrep_local_recv_queue`, 0, NumberUnits),
),
NewGroupCol(`FlowC`, `Flow control stats`,
NewDiffCol(`paused`, `Flow control paused (could be from anywhere in the cluster)`, 5, `wsrep_flow_control_paused_ns`, 0, NanoSecondUnits),
NewDiffCol(`snt`, `Flow control sent messages (could be starting or stopping FC)`, 3, `wsrep_flow_control_sent`, 0, NumberUnits),
),
NewGroupCol(`Conflcts`, `Galera replication conflicts (on this node)`,
NewDiffCol(`lcf`, `Local certification failures since last sample`, 3, `wsrep_local_cert_failures`, 0, NumberUnits),
NewDiffCol(`bfa`, `Brute force aborts since last sample`, 3, `wsrep_local_bf_aborts`, 0, NumberUnits),
),
NewGroupCol(`Gcache`, `Galera cache (gcache) information`,
NewCurDiffCol(`ist`, `Gcached transactions`, 5, `wsrep_last_committed`, `wsrep_local_cached_downto`, 0, NumberUnits),
NewGaugeCol(`idx`, `Certification index size (keys)`, 4, `wsrep_cert_index_size`, 0, NumberUnits),
),
NewGroupCol(`Apply`, `Theoretical and actual apply efficiency`,
NewPercentCol(`%ef`, `Percent of threads being used`, 4, `wsrep_apply_window`, `V_wsrep_slave_threads`, 0),
),
),
`qcache`: NewNormalView(`Query cache stats`,
NewStringCol(`type`, `Query cache type`, 6, `V_query_cache_type`),
NewRateSumCol(`sel`, `Total Selects + Qcache Hits per second`, 4, 0, NumberUnits, `com_select`, `qcache_hits`),
NewRateCol(`hits`, `Query cache hits per second`, 4, `qcache_hits`, 0, NumberUnits),
NewRateCol(`ins`, `Query inserts per second (new entries to the cache)`, 4, `qcache_inserts`, 0, NumberUnits),
NewRateCol(`notc`, `Queries not cached per second (either can not be cached, or SELECT SQL_NO_CACHE)`, 4, `qcache_not_cached`, 0, NumberUnits),
NewGaugeCol(`tot`, `Total queries in the cache`, 4, `qcache_queries_in_cache`, 0, NumberUnits),
NewRateCol(`lowm`, `Low memory prunes (cache entries removed due to memory limit)`, 4, `qcache_lowmem_prunes`, 0, NumberUnits),
NewPercentCol(`%free`, `Percent of cache memory free`, 5, `qcache_free_blocks`, `qcache_total_blocks`, 0),
),
`myisam`: NewNormalView(`MyISAM stats`,
NewGroupCol(`Key Buffer`, `Key Buffer Stats`,
NewGaugeCol(`used`, `Current Key Buffer blocks unused`, 6, `key_blocks_unused`, 0, NumberUnits),
NewGaugeCol(`maxu`, `Maxiumum Key Buffer blocks used`, 6, `key_blocks_used`, 0, NumberUnits),
),
NewGroupCol(`I/O`, `MyISAM Key Buffer IO Stats (not data)`,
NewRateCol(`logr`, `Logical read requests`, 5, `key_read_requests`, 0, NumberUnits),
NewRateCol(`phyr`, `Physical reads (cache misses)`, 5, `key_reads`, 0, NumberUnits),
NewRateCol(`logw`, `Logical write requests`, 5, `key_write_requests`, 0, NumberUnits),
NewRateCol(`phyw`, `Physical writes`, 5, `key_writes`, 0, NumberUnits),
),
),
`commands`: NewNormalView(`Sorted list of all commands run in a given interval`,
NewFuncCol(`Counts`, `All commands tracked by the Com_* counters`, 4, func(state *MyqState, c Col) chan string {
var all_diffs []float64
diff_variables := map[float64][]string{}
// Get the rate for every ^com* variable
for _, variable := range expand_variables([]string{`^com.*`}, state.Cur) {
diff := calculate_diff(state.Cur.getF(variable), state.Prev.getF(variable))
// Skip those without activity
if diff <= 0 {
continue
}
// Create the [] slice for a rate we haven't seen yet
if _, ok := diff_variables[diff]; ok == false {
diff_variables[diff] = make([]string, 0)
all_diffs = append(all_diffs, diff) // record the diff the first time
}
// Push the variable name onto the rate slice
diff_variables[diff] = append(diff_variables[diff], variable)
}
// Sort all the rates so we can iterate through them from big to small
sort.Sort(sort.Reverse(sort.Float64Slice(all_diffs)))
// Each rate
ch := make(chan string)
go func() {
defer close(ch)
for _, diff := range all_diffs {
var out bytes.Buffer
out.WriteString(fit_string(collapse_number(diff, c.Width(), 0, NumberUnits), c.Width()))
out.WriteString(fmt.Sprintf(" %v", diff_variables[diff]))
ch <- out.String()
}
}()
return ch
}),
),
}
} | myqlib/view_defaults.go | 0.566019 | 0.552479 | view_defaults.go | starcoder |
package texture
import (
"github.com/jphsd/graphics2d/util"
"math"
)
// Generator is used to construct a one dimensional pattern with a fixed wavelength, center and phase, rotated
// by theta. It has an optional filter. The generator function takes a value in the range [0,1] and returns a
// value in [-1,1].
type Generator struct {
Lambda float64 // [1,...)
Center float64 // [0,1]
Phase float64 // [0,1]
GFunc func(float64) float64
FFunc func(float64) float64
CosTh, SinTh float64
}
// NewGenerator constructs a new Generator instance with wavelength lambda and rotation theta using the
// supplied generator function.
func NewGenerator(lambda, theta float64, f func(float64) float64) *Generator {
if lambda < 1 {
lambda = 1
}
// Snap to quad
ct := math.Cos(theta)
if closeTo(0, ct) {
ct = 0
} else if closeTo(1, ct) {
ct = 1
} else if closeTo(-1, ct) {
ct = -1
}
st := math.Sin(theta)
if closeTo(0, st) {
st = 0
} else if closeTo(1, st) {
st = 1
} else if closeTo(-1, st) {
st = -1
}
return &Generator{lambda, 0.5, 0, f, nil, ct, st}
}
// Eval2 implements the Field interface.
func (g *Generator) Eval2(x, y float64) float64 {
v := x*g.CosTh + y*g.SinTh
if g.FFunc == nil {
return g.GFunc(g.VtoT(v))
}
return g.FFunc(g.GFunc(g.VtoT(v)))
}
// VtoT converts a value in (-inf,inf) to [0,1] based on the generator's orientation, lambda and phase values.
func (g *Generator) VtoT(v float64) float64 {
// Vs Div and Floor ...
for v < 0 {
v += g.Lambda
}
for v > g.Lambda {
v -= g.Lambda
}
t := v/g.Lambda + g.Phase
if t > 1 {
t -= 1
}
if t <= g.Center {
return t * 0.5 / g.Center
}
return 0.5*(t-g.Center)/(1-g.Center) + 0.5
}
// FlatGF is a flat generator function and returns a fixed value.
type FlatGF struct {
Val float64
}
// Flat returns a fixed value regardless of t.
func (fgf *FlatGF) Flat(t float64) float64 {
return fgf.Val
}
// Sin returns a sine wave (offset by -90 degrees)
func Sin(t float64) float64 {
t *= 2 * math.Pi
t -= math.Pi / 2
return math.Sin(t)
}
// Square returns a square wave.
func Square(t float64) float64 {
if t > 0.5 {
return 1
}
return -1
}
// Triangle returns a triangle wave.
func Triangle(t float64) float64 {
v := 0.0
if t < 0.5 {
v = t * 2
} else {
v = (1 - t) * 2
}
return v*2 - 1
}
// Saw returns a saw wave.
func Saw(t float64) float64 {
return t*2 - 1
}
// NLGF captures a non-linear function.
type NLGF struct {
NL util.NonLinear
}
// NL1 uses the NLGF function twice (once up and once down) to make a wave form.
func (g *NLGF) NL1(t float64) float64 {
v := 0.0
if t < 0.5 {
v = g.NL.Transform(2 * t)
} else {
v = g.NL.Transform(2 * (1 - t))
}
return v*2 - 1
}
// NL2GF captures two non-linear functions, one for up and the other for down.
type NL2GF struct {
NLU, NLD util.NonLinear
}
// NL2 uses the NL2GF functions to make a wave form.
func (g *NL2GF) NL2(t float64) float64 {
v := 0.0
if t < 0.5 {
v = g.NLU.Transform(2 * t)
} else {
v = g.NLD.Transform(2 * (1 - t))
}
return v*2 - 1
}
// Noise1DGF captures a scaled Perlin noise instance.
type Noise1DGF struct {
Scale float64
Noise *Perlin
OffsX, OffsY float64
}
// NewNoise1DGF returns a new Noise1DGF instance.
func NewNoise1DGF(scale float64) *Noise1DGF {
return &Noise1DGF{scale, NewPerlin(), 0, 0}
}
// Noise1D returns a wave derived from a Perlin noise function.
func (n *Noise1DGF) Noise1D(t float64) float64 {
return n.Noise.Eval2(t*n.Scale+n.OffsX, n.OffsY)
}
func closeTo(a, b float64) bool {
d := a - b
if d < 0 {
d = -d
}
return d < 0.000001
} | generate.go | 0.897538 | 0.704973 | generate.go | starcoder |
package e2e
import (
"time"
v1alpha1 "github.com/interconnectedcloud/qdr-operator/pkg/apis/interconnectedcloud/v1alpha1"
"github.com/interconnectedcloud/qdr-operator/test/e2e/framework"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("[edge] Interconnect edge deployment tests", func() {
f := framework.NewFramework("basic-edge", nil)
It("Should be able to create a default edge deployment", func() {
testEdgeDefaults(f)
})
})
func testEdgeDefaults(f *framework.Framework) {
By("Creating an edge interconnect with default size")
ei, err := f.CreateInterconnect(f.Namespace, 0, func(ei *v1alpha1.Interconnect) {
ei.Name = "edge-interconnect"
ei.Spec.DeploymentPlan.Role = "edge"
})
Expect(err).NotTo(HaveOccurred())
// Make sure we cleanup the Interconnect resource after we're done testing.
defer func() {
err = f.DeleteInterconnect(ei)
Expect(err).NotTo(HaveOccurred())
}()
By("Creating a Deployment with 1 replicas")
err = framework.WaitForDeployment(f.KubeClient, f.Namespace, "edge-interconnect", 1, framework.RetryInterval, framework.Timeout)
Expect(err).NotTo(HaveOccurred())
By("Creating an Interconnect resource in the namespace")
ei, err = f.GetInterconnect("edge-interconnect")
Expect(err).NotTo(HaveOccurred())
By("Verifying the deployment plan")
Expect(ei.Name).To(Equal("edge-interconnect"))
Expect(ei.Spec.DeploymentPlan.Size).To(Equal(int32(1)))
Expect(ei.Spec.DeploymentPlan.Role).To(Equal(v1alpha1.RouterRoleType("edge")))
Expect(ei.Spec.DeploymentPlan.Placement).To(Equal(v1alpha1.PlacementAny))
By("Creating an Interconnect resource in the namespace")
dep, err := f.GetDeployment("edge-interconnect")
Expect(err).NotTo(HaveOccurred())
Expect(*dep.Spec.Replicas).To(Equal(int32(1)))
By("Creating a service for the interconnect default listeners")
svc, err := f.GetService("edge-interconnect")
Expect(err).NotTo(HaveOccurred())
By("Verifying the owner reference for the service")
Expect(svc.OwnerReferences[0].APIVersion).To(Equal(framework.GVR))
Expect(svc.OwnerReferences[0].Name).To(Equal("edge-interconnect"))
Expect(*svc.OwnerReferences[0].Controller).To(Equal(true))
By("Setting up default listener on qdr instances")
pods, err := f.ListPodsForDeployment(dep)
Expect(err).NotTo(HaveOccurred())
Expect(len(pods.Items)).To(Equal(1))
for _, pod := range pods.Items {
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Router started in Edge mode", time.Second*5)
Expect(err).NotTo(HaveOccurred())
_, err = framework.LookForRegexpInLog(f.Namespace, pod.Name, "edge-interconnect", `Version:.*1\.8\.0`, time.Second*5)
Expect(err).NotTo(HaveOccurred())
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: 0.0.0.0:5672 proto=any, role=normal", time.Second*1)
Expect(err).NotTo(HaveOccurred())
if f.CertManagerPresent {
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: 0.0.0.0:5671 proto=any, role=normal, sslProfile=default", time.Second*1)
Expect(err).NotTo(HaveOccurred())
}
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: 0.0.0.0:8080 proto=any, role=normal, http", time.Second*1)
Expect(err).NotTo(HaveOccurred())
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: :8888 proto=any, role=normal, http", time.Second*1)
Expect(err).NotTo(HaveOccurred())
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: 0.0.0.0:55672 proto=any, role=inter-router", time.Second*1)
Expect(err).To(HaveOccurred())
_, err = framework.LookForStringInLog(f.Namespace, pod.Name, "edge-interconnect", "Configured Listener: 0.0.0.0:45672 proto=any, role=edge", time.Second*1)
Expect(err).To(HaveOccurred())
}
} | test/e2e/deployment-plans/edge.go | 0.53048 | 0.418637 | edge.go | starcoder |
package pass
import (
"errors"
"math"
"github.com/mmcloughlin/avo/reg"
)
// edge is an edge of the interference graph, indicating that registers X and Y
// must be in non-conflicting registers.
type edge struct {
X, Y reg.Register
}
// Allocator is a graph-coloring register allocator.
type Allocator struct {
registers []reg.Physical
allocation reg.Allocation
edges []*edge
possible map[reg.Virtual][]reg.Physical
vidtopid map[reg.VID]reg.PID
}
// NewAllocator builds an allocator for the given physical registers.
func NewAllocator(rs []reg.Physical) (*Allocator, error) {
if len(rs) == 0 {
return nil, errors.New("no registers")
}
return &Allocator{
registers: rs,
allocation: reg.NewEmptyAllocation(),
possible: map[reg.Virtual][]reg.Physical{},
vidtopid: map[reg.VID]reg.PID{},
}, nil
}
// NewAllocatorForKind builds an allocator for the given kind of registers.
func NewAllocatorForKind(k reg.Kind) (*Allocator, error) {
f := reg.FamilyOfKind(k)
if f == nil {
return nil, errors.New("unknown register family")
}
return NewAllocator(f.Registers())
}
// AddInterferenceSet records that r interferes with every register in s. Convenience wrapper around AddInterference.
func (a *Allocator) AddInterferenceSet(r reg.Register, s reg.Set) {
for y := range s {
a.AddInterference(r, y)
}
}
// AddInterference records that x and y must be assigned to non-conflicting physical registers.
func (a *Allocator) AddInterference(x, y reg.Register) {
a.Add(x)
a.Add(y)
a.edges = append(a.edges, &edge{X: x, Y: y})
}
// Add adds a register to be allocated. Does nothing if the register has already been added.
func (a *Allocator) Add(r reg.Register) {
v, ok := r.(reg.Virtual)
if !ok {
return
}
if _, found := a.possible[v]; found {
return
}
a.possible[v] = a.possibleregisters(v)
}
// Allocate allocates physical registers.
func (a *Allocator) Allocate() (reg.Allocation, error) {
for {
if err := a.update(); err != nil {
return nil, err
}
if a.remaining() == 0 {
break
}
v := a.mostrestricted()
if err := a.alloc(v); err != nil {
return nil, err
}
}
return a.allocation, nil
}
// update possible allocations based on edges.
func (a *Allocator) update() error {
for v := range a.possible {
pid, found := a.vidtopid[v.VirtualID()]
if !found {
continue
}
a.possible[v] = filterregisters(a.possible[v], func(r reg.Physical) bool {
return r.PhysicalID() == pid
})
}
var rem []*edge
for _, e := range a.edges {
e.X, e.Y = a.allocation.LookupDefault(e.X), a.allocation.LookupDefault(e.Y)
px, py := reg.ToPhysical(e.X), reg.ToPhysical(e.Y)
vx, vy := reg.ToVirtual(e.X), reg.ToVirtual(e.Y)
switch {
case vx != nil && vy != nil:
rem = append(rem, e)
continue
case px != nil && py != nil:
if reg.AreConflicting(px, py) {
return errors.New("impossible register allocation")
}
case px != nil && vy != nil:
a.discardconflicting(vy, px)
case vx != nil && py != nil:
a.discardconflicting(vx, py)
default:
panic("unreachable")
}
}
a.edges = rem
return nil
}
// mostrestricted returns the virtual register with the least possibilities.
func (a *Allocator) mostrestricted() reg.Virtual {
n := int(math.MaxInt32)
var v reg.Virtual
for r, p := range a.possible {
if len(p) < n || (len(p) == n && v != nil && r.VirtualID() < v.VirtualID()) {
n = len(p)
v = r
}
}
return v
}
// discardconflicting removes registers from vs possible list that conflict with p.
func (a *Allocator) discardconflicting(v reg.Virtual, p reg.Physical) {
a.possible[v] = filterregisters(a.possible[v], func(r reg.Physical) bool {
if pid, found := a.vidtopid[v.VirtualID()]; found && pid == p.PhysicalID() {
return true
}
return !reg.AreConflicting(r, p)
})
}
// alloc attempts to allocate a register to v.
func (a *Allocator) alloc(v reg.Virtual) error {
ps := a.possible[v]
if len(ps) == 0 {
return errors.New("failed to allocate registers")
}
p := ps[0]
a.allocation[v] = p
delete(a.possible, v)
a.vidtopid[v.VirtualID()] = p.PhysicalID()
return nil
}
// remaining returns the number of unallocated registers.
func (a *Allocator) remaining() int {
return len(a.possible)
}
// possibleregisters returns all allocate-able registers for the given virtual.
func (a *Allocator) possibleregisters(v reg.Virtual) []reg.Physical {
return filterregisters(a.registers, func(r reg.Physical) bool {
return v.SatisfiedBy(r) && (r.Info()®.Restricted) == 0
})
}
func filterregisters(in []reg.Physical, predicate func(reg.Physical) bool) []reg.Physical {
var rs []reg.Physical
for _, r := range in {
if predicate(r) {
rs = append(rs, r)
}
}
return rs
} | vendor/github.com/mmcloughlin/avo/pass/alloc.go | 0.735926 | 0.433442 | alloc.go | starcoder |
package algorithms
import (
"github.com/dploop/golib/stl/iterators"
"github.com/dploop/golib/stl/types"
)
func MakeHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate) {
if n := first.Distance(last); n > 1 {
for start := (n - 2) / 2; start >= 0; start-- {
siftDown(first, last, less, n, first.Advance(start).(iterators.MutableRandomAccessIterator))
}
}
}
func PushHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate) {
siftUp(first, last, less, first.Distance(last))
}
func PopHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate) {
popHeap(first, last, less, first.Distance(last))
}
func popHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate,
length types.Size) {
if length > 1 {
last = last.Prev().(iterators.MutableRandomAccessIterator)
firstData, lastData := first.Read(), last.Read()
first.Write(lastData)
last.Write(firstData)
siftDown(first, last, less, length-1, first)
}
}
func SortHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate) {
sortHeap(first, last, less)
}
func sortHeap(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate) {
for n := first.Distance(last); n > 1; n-- {
popHeap(first, last, less, n)
last = last.Prev().(iterators.MutableRandomAccessIterator)
}
}
//nolint:cyclop
func siftDown(first, _ iterators.MutableRandomAccessIterator, less types.BinaryPredicate,
length types.Size, start iterators.MutableRandomAccessIterator) {
child := first.Distance(start)
if length < 2 || (length-2)/2 < child {
return
}
child = child*2 + 1
childIt := first.Advance(child).(iterators.MutableRandomAccessIterator)
if (child+1) < length && less(childIt.Read(), childIt.Next().(iterators.MutableRandomAccessIterator).Read()) {
// Right child exists and is greater than left child.
childIt = childIt.Next().(iterators.MutableRandomAccessIterator)
child++
}
// Check if we are in heap-order.
if less(childIt.Read(), start.Read()) {
return
}
for top := start.Read(); ; {
// We are not in heap-order, swap the parent with its largest child.
start.Write(childIt.Read())
start = childIt
if (length-2)/2 < child {
break
}
// Recompute the child based off of the updated parent.
child = child*2 + 1
childIt = first.Advance(child).(iterators.MutableRandomAccessIterator)
if (child+1) < length && less(childIt.Read(), childIt.Next().(iterators.MutableRandomAccessIterator).Read()) {
// Right child exists and is greater than left child.
childIt = childIt.Next().(iterators.MutableRandomAccessIterator)
child++
}
if less(childIt.Read(), top) {
break
}
}
}
//nolint:nestif
func siftUp(first, last iterators.MutableRandomAccessIterator, less types.BinaryPredicate,
length types.Size) {
if length > 1 {
length = (length - 2) / 2
ptr := first.Advance(length).(iterators.MutableRandomAccessIterator)
last = last.Prev().(iterators.MutableRandomAccessIterator)
if less(ptr.Read(), last.Read()) {
t := last.Read()
for {
last.Write(ptr.Read())
last = ptr
if length == 0 {
break
}
length = (length - 1) / 2
ptr = first.Advance(length).(iterators.MutableRandomAccessIterator)
if !less(ptr.Read(), t) {
break
}
}
last.Write(t)
}
}
}
func IsHeap(first, last iterators.RandomAccessIterator, less types.BinaryPredicate) bool {
return IsHeapUntil(first, last, less) == last
}
func IsHeapUntil(first, last iterators.RandomAccessIterator, less types.BinaryPredicate,
) iterators.RandomAccessIterator {
length := first.Distance(last)
p, c := 0, 1
pp := first
for c < length {
cp := first.Advance(c)
if less(pp.Read(), cp.Read()) {
return cp
}
c++
cp = cp.Next().(iterators.RandomAccessIterator)
if c == length {
return last
}
if less(pp.Read(), cp.Read()) {
return cp
}
p++
pp = pp.Next().(iterators.RandomAccessIterator)
c = 2*p + 1
}
return last
} | stl/algorithms/heap.go | 0.595375 | 0.471588 | heap.go | starcoder |
package polyline
type ChartPoint struct {
X float64
Y float64
}
type Point interface {
GetX() float64
GetY() float64
}
func (p ChartPoint) GetX() float64 {
return p.X
}
func (p ChartPoint) GetY() float64 {
return p.Y
}
func getSqDist(p1 Point, p2 Point) float64 {
dx := p1.GetX() - p2.GetX()
dy := p1.GetY() - p2.GetY()
return dx*dx + dy*dy
}
func getSqSegDist(p Point, p1 Point, p2 Point) float64 {
x := p1.GetX()
y := p1.GetY()
dx := p2.GetX() - x
dy := p2.GetY() - y
if dx != 0 || dy != 0 {
t := ((p.GetX()-x)*dx + (p.GetY()-y)*dy) / (dx*dx + dy*dy)
if t > 1 {
x = p2.GetX()
y = p2.GetY()
} else if t > 0 {
x += dx * t
y += dy * t
}
}
dx = p.GetX() - x
dy = p.GetY() - y
return dx*dx + dy*dy
}
func simplifyRadialDist(points []Point, sqTolerance float64) []Point {
prevPoint := points[0]
newPoints := []Point{prevPoint}
var point Point
for i := 1; i < len(points); i++ {
point = points[i]
if getSqDist(point, prevPoint) > sqTolerance {
newPoints = append(newPoints, point)
prevPoint = point
}
}
if &prevPoint != &point {
newPoints = append(newPoints, point)
}
return newPoints
}
func simplifyDPStep(points []Point, first int, last int, sqTolerance float64, simplified []Point) []Point {
maxSqDist := sqTolerance
var index int
for i := first + 1; i < last; i++ {
sqDist := getSqSegDist(points[i], points[first], points[last])
if sqDist > maxSqDist {
index = i
maxSqDist = sqDist
}
}
if maxSqDist > sqTolerance {
if index-first > 1 {
simplified = simplifyDPStep(points, first, index, sqTolerance, simplified)
}
simplified = append(simplified, points[index])
if last-index > 1 {
simplified = simplifyDPStep(points, index, last, sqTolerance, simplified)
}
}
return simplified
}
func simplifyDouglasPeucker(points []Point, sqTolerance float64) []Point {
last := len(points) - 1
simplified := []Point{points[0]}
simplified = simplifyDPStep(points, 0, last, sqTolerance, simplified)
simplified = append(simplified, points[last])
return simplified
}
func Simplify(points *[]Point, tolerance float64, highestQuality bool) []Point {
arr := *points
if len(arr) <= 2 {
return arr
}
var sqTolerance float64
if tolerance == 0 {
sqTolerance = 1
} else {
sqTolerance = tolerance * tolerance
}
if !highestQuality {
arr = simplifyRadialDist(arr, sqTolerance)
}
arr = simplifyDouglasPeucker(arr, sqTolerance)
return arr
} | simplify.go | 0.744099 | 0.565419 | simplify.go | starcoder |
package gorgonia
import (
"fmt"
"hash"
"github.com/chewxy/hm"
"github.com/pkg/errors"
"gorgonia.org/tensor"
)
type byIndicesOp struct {
axis int
}
func newByIndicesOp(axis int) *byIndicesOp {
if axis < 0 {
axis = 0
}
return &byIndicesOp{
axis: axis,
}
}
// ByIndices is an operation that takes the indices as input and return the selected values from those indices.
// The default axis in 0
func ByIndices(x *Node, indices *Node, axis int) (*Node, error) {
op := newByIndicesOp(axis)
return ApplyOp(op, x, indices)
}
func (op *byIndicesOp) Arity() int { return 2 }
func (op *byIndicesOp) ReturnsPtr() bool { return false }
func (op *byIndicesOp) CallsExtern() bool { return false }
func (op *byIndicesOp) WriteHash(h hash.Hash) { fmt.Fprintf(h, op.String()) }
func (op *byIndicesOp) Hashcode() uint32 { return simpleHash(op) }
func (op *byIndicesOp) String() string {
return fmt.Sprintf("ByIndicesOp{axis=%d}", op.axis)
}
func (op *byIndicesOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
s := inputs[0].(tensor.Shape).Clone()
i := inputs[1].(tensor.Shape).Clone()
if !i.IsVectorLike() {
return nil, errors.Errorf("Expected indices to be a vector-like. Got %v instead", i)
}
s[op.axis] = i.TotalSize()
return s, nil
}
func (op *byIndicesOp) Type() hm.Type {
a := hm.TypeVariable('a')
b := makeTensorType(1, tensor.Int)
return hm.NewFnType(a, b, a)
}
func (op *byIndicesOp) OverwritesInput() int { return -1 }
func (op *byIndicesOp) checkInput(inputs ...Value) (x, indices tensor.Tensor, err error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, nil, err
}
var ok bool
if x, ok = inputs[0].(tensor.Tensor); !ok {
return nil, nil, errors.Errorf("Expected input to be a tensor, got %T", inputs[0])
}
if indices, ok = inputs[1].(tensor.Tensor); !ok {
return nil, nil, errors.Errorf("Expected indices to be a tensor. Got %T instead", inputs[1])
}
if indices.Dtype() != tensor.Int {
return nil, nil, errors.Errorf("Expected indices to have tensor.Int as a Dtype. Got %T instead", indices.Dtype())
}
return x, indices, nil
}
func (op *byIndicesOp) Do(inputs ...Value) (Value, error) {
inputTensor, indices, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check ByIndicesOp input: %w", err)
}
return tensor.ByIndices(inputTensor, indices, op.axis)
}
// DoDiff calculates the diff and sets its value to the output node. Implementation for ADOp interface.
func (op *byIndicesOp) DoDiff(ctx ExecutionContext, inputs Nodes, output *Node) error {
if len(inputs) != 2 {
return fmt.Errorf("byIndicesOp.DoDiff needs 2 arguments")
}
odv := output.boundTo.(*dualValue)
odvd := odv.Value.(tensor.Tensor)
diffOp := &byIndicesOpDiffOp{op}
result, err := diffOp.Do(inputs[0].boundTo, inputs[1].boundTo)
if err != nil {
return err
}
err = result.(*tensor.Dense).Reshape(odvd.Shape()...)
if err != nil {
return err
}
sum, err := odvd.(*tensor.Dense).Add(result.(*tensor.Dense), tensor.UseUnsafe())
if err != nil {
return err
}
odv.d = sum
return nil
}
// SymDiff applies the diff op. Implementation for SDOp interface.
func (op *byIndicesOp) SymDiff(inputs Nodes, output, grad *Node) (Nodes, error) {
err := checkArity(op, len(inputs))
if err != nil {
return nil, err
}
x := inputs[0]
indices := inputs[1]
diffOp := &byIndicesOpDiffOp{op}
nodes := make(Nodes, op.Arity())
nodes[0], err = ApplyOp(diffOp, x, grad, indices)
return nodes, err
}
// DiffWRT is an implementation for the SDOp interface
func (op *byIndicesOp) DiffWRT(inputs int) []bool {
if inputs != op.Arity() {
panic(fmt.Sprintf("ByIndicesOp operator needs %d inputs, got %d instead", op.Arity(), inputs))
}
return []bool{true, false}
}
type byIndicesOpDiffOp struct {
*byIndicesOp
}
func (op *byIndicesOpDiffOp) Arity() int { return 3 }
func (op *byIndicesOpDiffOp) ReturnsPtr() bool { return false }
func (op *byIndicesOpDiffOp) CallsExtern() bool { return false }
func (op *byIndicesOpDiffOp) WriteHash(h hash.Hash) {
fmt.Fprintf(h, op.String())
}
func (op *byIndicesOpDiffOp) Hashcode() uint32 { return simpleHash(op) }
func (op *byIndicesOpDiffOp) String() string {
return fmt.Sprintf("ByIndicesOpDiff{}(%d)", op.axis)
}
func (op *byIndicesOpDiffOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
s := inputs[0].(tensor.Shape).Clone()
return s, nil
}
func (op *byIndicesOpDiffOp) Type() hm.Type {
a := hm.TypeVariable('a')
b := makeTensorType(1, tensor.Int)
return hm.NewFnType(a, a, b, a)
}
func (op *byIndicesOpDiffOp) OverwritesInput() int { return -1 }
func (op *byIndicesOpDiffOp) checkInput(inputs ...Value) (in, indices, gradient *tensor.Dense, err error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, nil, nil, err
}
var (
ok bool
)
switch t := inputs[0].(type) {
case *dualValue:
if in, ok = t.Value.(*tensor.Dense); !ok {
return nil, nil, nil, errors.Errorf("input should be a tensor.Tensor, got %T", inputs[0])
}
case *tensor.Dense:
in = t
default:
return nil, nil, nil, errors.Errorf("input type is not supported, got %T", inputs[0])
}
switch t := inputs[2].(type) {
case *dualValue:
if gradient, ok = t.Value.(*tensor.Dense); !ok {
return nil, nil, nil, errors.Errorf("gradient should be a tensor, got %T", inputs[2])
}
case *tensor.Dense:
gradient = t
default:
return nil, nil, nil, errors.Errorf("gradient type is not supported, got %T", inputs[2])
}
switch t := inputs[1].(type) {
case *tensor.Dense:
indices = t
default:
return nil, nil, nil, errors.Errorf("indices type %T is not supported", inputs[1])
}
return in, indices, gradient, nil
}
func (op *byIndicesOpDiffOp) Do(inputs ...Value) (Value, error) {
inputTensor, gradTensor, indices, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check ByIndicesOpDiff input: %w", err)
}
output, err := tensor.ByIndicesB(inputTensor, gradTensor, indices, op.axis)
if err != nil {
return nil, err
}
return output, nil
}
// ensure it complies with the Op interface
var (
_ Op = &byIndicesOpDiffOp{}
_ Op = &byIndicesOp{}
_ SDOp = &byIndicesOp{}
_ ADOp = &byIndicesOp{}
) | op_by_indices.go | 0.722429 | 0.536799 | op_by_indices.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.