code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package common
import (
"fmt"
)
// Vector is a resizeable array. It takes its name from the C++ std::vector class.
type Vector struct {
array []interface{}
emptyIndices Queue
Length int
}
// MakeVector returns a pointer to a Vector
func MakeVector() *Vector {
return &Vector{
make([]interface{}, 0),
Queue{}, 0,
}
}
// Array returns the underlying array used by the vector
func (vc *Vector) Array() []interface{} {
return vc.array
}
// Push_back will append a data structure to the end of the vector.
// If the length of the vector is less than the space required for the new data structure, then the vector is extended and the data is copied into the new, bigger vector.
// Then the new data structure is appended to the end.
func (vc *Vector) Push_back(data interface{}, resizeStep, checkDistance int) {
if cap(vc.array) <= vc.Length+checkDistance {
tmp := vc.array
vc.array = make([]interface{}, vc.Length+resizeStep)
for i := range tmp {
vc.array[i] = tmp[i]
}
}
vc.array[vc.Length] = data
vc.Length++
}
// Insert will attempt to insert a value into the vector using an empty slot.
// If no empty space is found, Insert resorts to Push_back to append the data structure to the end of the vector.
func (vc *Vector) Insert(data interface{}) int {
space, err := vc.emptyIndices.Dequeue()
if err != nil {
vc.Push_back(data, 1, 1)
return vc.Length - 1
}
vc.array[space.(int)] = data
return space.(int)
}
func (vc *Vector) InsertInto(index int, data interface{}) {
if cap(vc.array) <= vc.Length+index {
tmp := vc.array
vc.array = make([]interface{}, vc.Length+index)
for i := range tmp {
vc.array[i] = tmp[i]
}
}
vc.array[index] = data
vc.Length = len(vc.array)
}
// Difference will return a vector that is composed of the data structures that were unique in one of the two input vectors.
func (vec1 *Vector) Difference(vec2 *Vector) *Vector {
ret := MakeVector()
list1 := vec1.Array()
list2 := vec2.Array()
for i := range list2 {
if func() bool {
for j := range list1 {
if list2[i] == list1[j] {
return false
}
}
return true
}() {
ret.Insert(list2[i])
}
}
return ret
}
// Erase will delete the data from an array position in the vector.
func (vc *Vector) Erase(index int) {
vc.array[index] = nil
vc.emptyIndices.Queue(index)
vc.Length--
}
// GetValueOfIndex returns the value at the index requested.
func (vc *Vector) GetValueOfIndex(index int) (interface{}, error) {
if index < len(vc.array) {
return vc.array[index], nil
} else {
return nil, fmt.Errorf("invalid index into vector: %s", index)
}
}
// Empty will delete all data in the vector and essentially create a new vector.
func (vc *Vector) Empty() {
vc.array = make([]interface{}, 0)
vc.Length = 0
}
func (vc *Vector) IsEmpty() bool {
return vc.Length < 1
} | common/vector.go | 0.734405 | 0.770292 | vector.go | starcoder |
package main
/*
// Example of interfacing between Go and C programs.
*/
import (
"C"
)
import (
"fmt"
"strconv"
"strings"
"unsafe"
)
// A main function must be present, even if empty.
func main() {}
// All exported functions must have a '//export [name]' comment.
//export add
// add adds two integers
func add(x, y C.int) C.int { // This function when used in C takes as input int and returns int
return x + y
}
// We can either use C types directly or use Go types that are mapped to C types.
// GoInt is a such a mapping defined in go2c.h
//export square
// square returns the square of an integer
func square(x int) int { // This function when used in C takes as input GoInt and returns GoInt
return x * x
}
//export printBits
// printBits prints an integer in binary format
func printBits(x C.int) { // This function when used in C takes as input int and returns void
fmt.Println(strconv.FormatInt(int64(x), 2))
}
//export toBits
// toBits returns a string with the binary representation of an integer
// Returned value must be freed with free() from C or with C.free() from Go.
func toBits(x C.int) *C.char { // This function when used in C takes as input int and returns char*
return C.CString(fmt.Sprintf(strconv.FormatInt(int64(x), 2)))
}
//export conCat
// conCat concatenates 2 strings.
// Returned value must be freed with free() from C or with C.free() from Go.
func conCat(a, b *C.char) *C.char { // This function when used in C takes as input char* and returns char*
return C.CString(C.GoString(a) + C.GoString(b))
}
//export toUpper
// toUpper converts a string to upper case
// Returned value must be freed with free() from C or with C.free() from Go.
func toUpper(a string) *C.char { // This function when used in C takes as input a GoString struct and returns char*
return C.CString(strings.ToUpper(a))
}
//export toString
// toString takes an integer and returns its sign and absolute value as strings.
// Multiple return values are represented in C as stuctures.
// Returned values must be freed with free() from C or with C.free() from Go.
func toString(x int) (*C.char, *C.char) { // This function when used in C takes as input GoInt and returns a structure.
var sign, num *C.char
if x < 0 {
sign = C.CString("-")
x = -x
} else {
sign = C.CString("+")
}
num = C.CString(strconv.Itoa(x))
return sign, num
}
//export toUpper2
// toUpper2 converts a string to upper case
// We cannot use this function from C safely, Go will panic at runtime unless
// we disable cgo runtime checks. The checks can be disabled by setting the environment
// variable GODEBUG=cgocheck=0, but it's not advised to. A Go function called by C code
// may not return a Go pointer.
func toUpper2(a string) string { // This function when used in C takes as input a GoString struct and returns a GoString
return strings.ToUpper(a)
}
//export toUpper3
// ToUpper3 converts a string to upper case
// This beats the above restrictions, but it is really a recipe for disaster :)
// Don't write or use code similar to this!
func toUpper3(a string) unsafe.Pointer { // This function when used in C returns a void* that points to a Go string
s := strings.ToUpper(a)
return unsafe.Pointer(&s)
} | go2c.go | 0.777131 | 0.488405 | go2c.go | starcoder |
package color
/*
*/
import (
"math"
"strconv"
)
// Custom types to hold return values
// The RGB type holds three values: one for red (R), green, (G) and
// blue (B). Each of these colors are on the domain of [0, 255].
type RGB struct {
R int `json:"R"`
G int `json:"G"`
B int `json:"B"`
}
// HSV type holds three values: one for the hue (H), one for the
// saturation (S), and one for the value (V). The value for hue is on the
// domain of [0, 360] and the saturation and value values are on the
// domain of [0, 100].
type HSV struct {
H int `json:"H"`
S int `json:"S"`
V int `json:"V"`
}
// HSL type holds three values: one for the hue (H), one for the
// saturation (S), and one for the length (L). The value for hue is on the
// domain of [0, 360] and the saturatino and length values are on the
// domain of [0, 100].
type HSL struct {
H int `json:"H"`
S int `json:"S"`
L int `json:"L"`
}
// CMYK type holds three values: one for the cyan percentage (C),
// magenta percentage (M), one for the yellow percentage (Y), and one for
// the black percentage (K). All values are on the domain [0, 100].
type CMYK struct {
C int `json:"C"`
M int `json:"M"`
Y int `json:"Y"`
K int `json:"K"`
}
// Hex type holds a hexadecimal value as a string. This can be any
// hexadecimal value from 000000 to FFFFFF. The '#' is left off for
// simplicity's sake, as it can be added later easily by the user.
type Hex string
// Decimal type holds a converted hexadecimal value to a computer-readable
// decimal value (EX: 0xFFFFFF). The converted decimal value is on the
// domain [0, 16777215].
type Decimal int
// Ansi type holds an ansi escape code. It uses the escape code format
// \x1b[38;2;R;G;Bm, where R, G, and B are the RGB values, respectively.
// Each value is on the domain [0, 255], but will always work as long as
// the input value is in the correct domain.
type Ansi string
// Color type holds all of the different color types. This is useful if
// a program is returning a certian color in all of its different types
type Color struct {
RGB `json:"RGB"`
HSV `json:"HSV"`
HSL `json:"HSL"`
CMYK `json:"CMYK"`
Hex `json:"Hex"`
Decimal `json:"Decimal"`
Ansi `json:"Ansi"`
}
// TYPEtoRGB conversion functions
// HSVtoRGB converts HSV values to RGB values
func HSVtoRGB(hsv HSV) RGB {
s := float64(hsv.S) / float64(100)
v := float64(hsv.V) / float64(100)
c := s * v
x := c * float64(1-math.Abs(float64(math.Mod(float64(hsv.H)/float64(60), 2)-1)))
m := v - c
var r, g, b float64
switch {
case hsv.H < 60:
r = c
g = x
b = 0
case hsv.H < 120 && hsv.H >= 60:
r = x
g = c
b = 0
case hsv.H < 180 && hsv.H >= 120:
r = 0
g = c
b = x
case hsv.H < 240 && hsv.H >= 180:
r = 0
g = x
b = c
case hsv.H < 300 && hsv.H >= 240:
r = x
g = 0
b = c
case hsv.H < 360 && hsv.H >= 300:
r = c
g = 0
b = x
}
R := int(math.Round((r + m) * 255))
G := int(math.Round((g + m) * 255))
B := int(math.Round((b + m) * 255))
return RGB{R, G, B}
}
// HSLtoRGB converts HSL values to RGB values
func HSLtoRGB(hsl HSL) RGB {
s := float64(hsl.S) / float64(100)
l := float64(hsl.L) / float64(100)
c := (1 - math.Abs(2*l-float64(1))) * s
x := c * (float64(1) - math.Abs(math.Mod(float64(hsl.H)/60, 2)-float64(1)))
m := l - c/2
var r, g, b float64
switch {
case hsl.H < 60:
r = c
g = x
b = 0
case hsl.H < 120 && hsl.H >= 60:
r = x
g = c
b = 0
case hsl.H < 180 && hsl.H >= 120:
r = 0
g = c
b = x
case hsl.H < 240 && hsl.H >= 180:
r = 0
g = x
b = c
case hsl.H < 300 && hsl.H >= 240:
r = x
g = 0
b = c
case hsl.H < 360 && hsl.H >= 300:
r = c
g = 0
b = x
}
R := int(math.Round((r + m) * 255))
G := int(math.Round((g + m) * 255))
B := int(math.Round((b + m) * 255))
return RGB{R, G, B}
}
// HextoRGB converts a hexadecimal string to RGB values
func HextoRGB(hex Hex) RGB {
if hex[0:1] == "#" {
hex = hex[1:]
}
r := string(hex)[0:2]
g := string(hex)[2:4]
b := string(hex)[4:6]
R, _ := strconv.ParseInt(r, 16, 0)
G, _ := strconv.ParseInt(g, 16, 0)
B, _ := strconv.ParseInt(b, 16, 0)
return RGB{int(R), int(G), int(B)}
}
// DecimaltoRGB converts a decimal value to RGB values
func DecimaltoRGB(decimal Decimal) RGB {
hex := Ansi(strconv.FormatInt(int64(decimal), 16))
for len(hex) < 6 {
hex = "0" + hex
}
rgb := HextoRGB(Hex(hex))
return RGB{int(rgb.R), int(rgb.G), int(rgb.B)}
}
// CMYKtoRGB converts CMYK values to RGB Values
func CMYKtoRGB(cmyk CMYK) RGB {
c := float64(cmyk.C) / 100
m := float64(cmyk.M) / 100
y := float64(cmyk.Y) / 100
k := float64(cmyk.K) / 100
R := int(math.Round(255 * (1 - c) * (1 - k)))
G := int(math.Round(255 * (1 - m) * (1 - k)))
B := int(math.Round(255 * (1 - y) * (1 - k)))
return RGB{R, G, B}
}
//RGBtoTYPE conversion functions
// RGBtoHSV converts RGB values to HSV values
func RGBtoHSV(rgb RGB) HSV {
r := float64(rgb.R) / 255
g := float64(rgb.G) / 255
b := float64(rgb.B) / 255
var cmax float64
switch {
case r >= g && r >= b:
cmax = r
case g >= r && g >= b:
cmax = g
case b >= r && b >= g:
cmax = b
}
var cmin float64
switch {
case r <= g && r <= b:
cmin = r
case g <= r && g <= b:
cmin = g
case b <= r && b <= g:
cmin = b
}
d := cmax - cmin
var H float64
switch {
case 0 == cmax || d == 0:
H = 0
case r == cmax:
H = math.Round(60 * math.Mod((g-b)/d, 6))
case g == cmax:
H = math.Round(60 * ((b-r)/d + 2))
case b == cmax:
H = math.Round(60 * ((r-g)/d + 4))
}
if H < 0 {
H += 360
}
var S float64
if cmax == 0 {
S = 0
} else {
S = math.Round(d / cmax * 100)
}
V := math.Round(cmax * 100)
return HSV{int(H), int(S), int(V)}
}
// RGBtoHSL converts RGB values to HSL values
func RGBtoHSL(rgb RGB) HSL {
return HSVtoHSL(RGBtoHSV(rgb))
}
// RGBtoHex converts RGB values to a hexadecimal value in a string
func RGBtoHex(rgb RGB) Hex {
hex := strconv.FormatInt(int64(rgb.R*65536+rgb.G*256+rgb.B), 16)
for len(hex) < 6 {
hex = "0" + hex
}
return Hex(hex)
}
// RGBtoDecimal converts RGB values to a decimal value
func RGBtoDecimal(rgb RGB) Decimal {
decimal := rgb.R*65536 + rgb.G*256 + rgb.B
return Decimal(decimal)
}
// RGBtoCMYK converts RGB values to CMYK values
func RGBtoCMYK(rgb RGB) CMYK {
r := float64(rgb.R) / 255
g := float64(rgb.G) / 255
b := float64(rgb.B) / 255
var k float64
switch {
case r >= g && r >= b:
k = 1 - r
case g >= r && g >= b:
k = 1 - g
case b >= r && b >= g:
k = 1 - b
}
var c, m, y float64
if k != 1 {
c = (1 - r - k) / (1 - k)
m = (1 - g - k) / (1 - k)
y = (1 - b - k) / (1 - k)
} else {
c = 0
m = 0
y = 0
}
return CMYK{int(math.Round(c * 100)), int(math.Round(m * 100)), int(math.Round(y * 100)), int(math.Round(k * 100))}
}
// Other type conversions using RGB as a standard (except HSVtoHSL for
// simplicity and convience)
// HSVtoHSL converts HSV values to HSL values
func HSVtoHSL(hsv HSV) HSL {
s := float64(hsv.S) / float64(100)
v := float64(hsv.V) / float64(100)
l := float64(2-s) * float64(v/2)
if l != 0 {
switch {
case l == 1:
s = 0
case l < 0.5:
s = s * v / (l * 2)
default:
s = s * v / (2 - l*2)
}
}
return HSL{hsv.H, int(math.Round(s * 100)), int(math.Round(l * 100))}
}
// HSVtoCMYK converts HSV values to CMYK values
func HSVtoCMYK(hsv HSV) CMYK {
return RGBtoCMYK(HSVtoRGB(hsv))
}
// HSVtoHex converts HSV values to a hexadecimal string
func HSVtoHex(hsv HSV) Hex {
return RGBtoHex(HSVtoRGB(hsv))
}
// HSVtoDecimal converts HSV values to a decimal value
func HSVtoDecimal(hsv HSV) Decimal {
return RGBtoDecimal(HSVtoRGB(hsv))
}
// HSLtoHSV converts HSL values to HSV values
func HSLtoHSV(hsl HSL) HSV {
return RGBtoHSV(HSLtoRGB(hsl))
}
// HSLtoCMYK converts HSL values to CMYK values
func HSLtoCMYK(hsl HSL) CMYK {
return RGBtoCMYK(HSLtoRGB(hsl))
}
// HSLtoHex converts HSL values to a hexadecimal string
func HSLtoHex(hsl HSL) Hex {
return RGBtoHex(HSLtoRGB(hsl))
}
// HSLtoDecimal converts HSL values to a decimal value
func HSLtoDecimal(hsl HSL) Decimal {
return RGBtoDecimal(HSLtoRGB(hsl))
}
// HextoHSV converts hexadecimal string to HSV values
func HextoHSV(hex Hex) HSV {
return RGBtoHSV(HextoRGB(hex))
}
// HextoHSL converts hexadecimal string to HSL values
func HextoHSL(hex Hex) HSL {
return RGBtoHSL(HextoRGB(hex))
}
// HextoCMYK converts hexadecimal string to CMYK values
func HextoCMYK(hex Hex) CMYK {
return RGBtoCMYK(HextoRGB(hex))
}
// HextoDecimal converts hexadecimal string to decimal value
func HextoDecimal(hex Hex) Decimal {
return RGBtoDecimal(HextoRGB(hex))
}
// DecimaltoHSV converts decimal value to HSV values
func DecimaltoHSV(decimal Decimal) HSV {
return RGBtoHSV(DecimaltoRGB(decimal))
}
// DecimaltoHSL converts decimal value to HSL values
func DecimaltoHSL(decimal Decimal) HSL {
return RGBtoHSL(DecimaltoRGB(decimal))
}
// DecimaltoCMYK converts decimal value to CMYK values
func DecimaltoCMYK(decimal Decimal) CMYK {
return RGBtoCMYK(DecimaltoRGB(decimal))
}
// DecimaltoHex converts decimal value to hexadecimal string
func DecimaltoHex(decimal Decimal) Hex {
return RGBtoHex(DecimaltoRGB(decimal))
}
// CMYKtoHSV converts CMYK values to HSV values
func CMYKtoHSV(cmyk CMYK) HSV {
return RGBtoHSV(CMYKtoRGB(cmyk))
}
// CMYKtoHSL converts CMYK values to HSL values
func CMYKtoHSL(cmyk CMYK) HSL {
return RGBtoHSL(CMYKtoRGB(cmyk))
}
// CMYKtoHex converts CMYK values to a hexadecimal string
func CMYKtoHex(cmyk CMYK) Hex {
return RGBtoHex(CMYKtoRGB(cmyk))
}
// CMYKtoDecimal converts CMYK values to a decimal value
func CMYKtoDecimal(cmyk CMYK) Decimal {
return RGBtoDecimal(CMYKtoRGB(cmyk))
}
// Ansi Conversions
// RGBtoAnsi converts RGB values to an Ansi escape code
func RGBtoAnsi(rgb RGB) Ansi {
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
}
// HSVtoAnsi converts HSV values to an Ansi escape code
func HSVtoAnsi(hsv HSV) Ansi {
rgb := HSVtoRGB(hsv)
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
}
// HSLtoAnsi converts HSL values to an Ansi escape code
func HSLtoAnsi(hsl HSL) Ansi {
rgb := HSLtoRGB(hsl)
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
}
// DecimaltoAnsi converts a decimal value to an Ansi escape code
func DecimaltoAnsi(decimal Decimal) Ansi {
rgb := DecimaltoRGB(decimal)
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
}
// HextoAnsi converts a hexadecimal string to an Ansi escape code
func HextoAnsi(hex Hex) Ansi {
rgb := HextoRGB(hex)
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
}
// CMYKtoAnsi converts CMYK values to an Ansi escape code
func CMYKtoAnsi(cmyk CMYK) Ansi {
rgb := CMYKtoRGB(cmyk)
str := "\x1b[38;2;" + strconv.FormatInt(int64(rgb.R), 10) + ";" + strconv.FormatInt(int64(rgb.G), 10) + ";" + strconv.FormatInt(int64(rgb.B), 10) + "m"
return Ansi(str)
} | types.go | 0.83825 | 0.601418 | types.go | starcoder |
package fsm
import (
"bytes"
"fmt"
)
const highlightingColor = "#00AA00"
// MermaidDiagramType the type of the mermaid diagram type
type MermaidDiagramType string
const (
// FlowChart the diagram type for output in flowchart style (https://mermaid-js.github.io/mermaid/#/flowchart) (including current state)
FlowChart MermaidDiagramType = "flowChart"
// StateDiagram the diagram type for output in stateDiagram style (https://mermaid-js.github.io/mermaid/#/stateDiagram)
StateDiagram MermaidDiagramType = "stateDiagram"
)
// VisualizeForMermaidWithGraphType outputs a visualization of a FSM in Mermaid format as specified by the graphType.
func VisualizeForMermaidWithGraphType(fsm *FSM, graphType MermaidDiagramType) (string, error) {
switch graphType {
case FlowChart:
return visualizeForMermaidAsFlowChart(fsm), nil
case StateDiagram:
return visualizeForMermaidAsStateDiagram(fsm), nil
default:
return "", fmt.Errorf("unknown MermaidDiagramType: %s", graphType)
}
}
func visualizeForMermaidAsStateDiagram(fsm *FSM) string {
var buf bytes.Buffer
sortedTransitionKeys := getSortedTransitionKeys(fsm.transitions)
buf.WriteString("stateDiagram-v2\n")
buf.WriteString(fmt.Sprintln(` [*] -->`, fsm.current))
for _, k := range sortedTransitionKeys {
v := fsm.transitions[k]
buf.WriteString(fmt.Sprintf(` %s --> %s: %s`, k.src, v, k.event))
buf.WriteString("\n")
}
return buf.String()
}
// visualizeForMermaidAsFlowChart outputs a visualization of a FSM in Mermaid format (including highlighting of current state).
func visualizeForMermaidAsFlowChart(fsm *FSM) string {
var buf bytes.Buffer
sortedTransitionKeys := getSortedTransitionKeys(fsm.transitions)
sortedStates, statesToIDMap := getSortedStates(fsm.transitions)
writeFlowChartGraphType(&buf)
writeFlowChartStates(&buf, sortedStates, statesToIDMap)
writeFlowChartTransitions(&buf, fsm.transitions, sortedTransitionKeys, statesToIDMap)
writeFlowChartHightlightCurrent(&buf, fsm.current, statesToIDMap)
return buf.String()
}
func writeFlowChartGraphType(buf *bytes.Buffer) {
buf.WriteString("graph LR\n")
}
func writeFlowChartStates(buf *bytes.Buffer, sortedStates []string, statesToIDMap map[string]string) {
for _, state := range sortedStates {
buf.WriteString(fmt.Sprintf(` %s[%s]`, statesToIDMap[state], state))
buf.WriteString("\n")
}
buf.WriteString("\n")
}
func writeFlowChartTransitions(buf *bytes.Buffer, transitions map[eKey]string, sortedTransitionKeys []eKey, statesToIDMap map[string]string) {
for _, transition := range sortedTransitionKeys {
target := transitions[transition]
buf.WriteString(fmt.Sprintf(` %s --> |%s| %s`, statesToIDMap[transition.src], transition.event, statesToIDMap[target]))
buf.WriteString("\n")
}
buf.WriteString("\n")
}
func writeFlowChartHightlightCurrent(buf *bytes.Buffer, current string, statesToIDMap map[string]string) {
buf.WriteString(fmt.Sprintf(` style %s fill:%s`, statesToIDMap[current], highlightingColor))
buf.WriteString("\n")
} | mermaid_visualizer.go | 0.672224 | 0.602442 | mermaid_visualizer.go | starcoder |
package discovery
import "fmt"
type Sensor struct {
// A list of MQTT topics subscribed to receive availability (online/offline) updates. Must not be used together with `availability_topic`
// Default: <no value>
Availability []Availability `json:"availability,omitempty"`
// When `availability` is configured, this controls the conditions needed to set the entity to `available`. Valid entries are `all`, `any`, and `latest`. If set to `all`, `payload_available` must be received on all configured availability topics before the entity is marked as online. If set to `any`, `payload_available` must be received on at least one configured availability topic before the entity is marked as online. If set to `latest`, the last `payload_available` or `payload_not_available` received on any configured availability topic controls the availability
// Default: latest
AvailabilityMode string `json:"availability_mode,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract device's availability from the `availability_topic`. To determine the devices's availability result of this template will be compared to `payload_available` and `payload_not_available`
// Default: <no value>
AvailabilityTemplate string `json:"availability_template,omitempty"`
// The MQTT topic subscribed to receive availability (online/offline) updates
// Default: <no value>
AvailabilityTopic string `json:"availability_topic,omitempty"`
// Information about the device this sensor is a part of to tie it into the [device registry](https://developers.home-assistant.io/docs/en/device_registry_index.html). Only works through [MQTT discovery](/docs/mqtt/discovery/) and when [`unique_id`](#unique_id) is set. At least one of identifiers or connections must be present to identify the device
// Default: <no value>
Device *Device `json:"device,omitempty"`
// The [type/class](/integrations/sensor/#device-class) of the sensor to set the icon in the frontend
// Default: None
DeviceClass string `json:"device_class,omitempty"`
// Flag which defines if the entity should be enabled when first added
// Default: true
EnabledByDefault bool `json:"enabled_by_default,omitempty"`
// The encoding of the payload received at `state_topic` and availability topics `availability_topic` and `topic`. Set to `""` to disable decoding
// Default: utf-8
Encoding string `json:"encoding,omitempty"`
// The [category](https://developers.home-assistant.io/docs/core/entity#generic-properties) of the entity
// Default: None
EntityCategory string `json:"entity_category,omitempty"`
// Defines the number of seconds after the sensor's state expires, if it's not updated. After expiry, the sensor's state becomes `unavailable`
// Default: 0
ExpireAfter int `json:"expire_after,omitempty"`
// Sends update events even if the value hasn't changed. Useful if you want to have meaningful value graphs in history
// Default: false
ForceUpdate bool `json:"force_update,omitempty"`
// [Icon](/docs/configuration/customizing-devices/#icon) for the entity
// Default: <no value>
Icon string `json:"icon,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract the JSON dictionary from messages received on the `json_attributes_topic`
// Default: <no value>
JsonAttributesTemplate string `json:"json_attributes_template,omitempty"`
// The MQTT topic subscribed to receive a JSON dictionary payload and then set as sensor attributes. Implies `force_update` of the current sensor state when a message is received on this topic
// Default: <no value>
JsonAttributesTopic string `json:"json_attributes_topic,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract the last_reset. Available variables: `entity_id`. The `entity_id` can be used to reference the entity's attributes
// Default: <no value>
LastResetValueTemplate string `json:"last_reset_value_template,omitempty"`
// The name of the MQTT sensor
// Default: MQTT Sensor
Name string `json:"name,omitempty"`
// Used instead of `name` for automatic generation of `entity_id
// Default: <no value>
ObjectId string `json:"object_id,omitempty"`
// The payload that represents the available state
// Default: online
PayloadAvailable string `json:"payload_available,omitempty"`
// The payload that represents the unavailable state
// Default: offline
PayloadNotAvailable string `json:"payload_not_available,omitempty"`
// The maximum QoS level of the state topic
// Default: 0
Qos int `json:"qos,omitempty"`
// The [state_class](https://developers.home-assistant.io/docs/core/entity/sensor#available-state-classes) of the sensor
// Default: None
StateClass string `json:"state_class,omitempty"`
// The MQTT topic subscribed to receive sensor values
// Default: <no value>
StateTopic string `json:"state_topic"`
// An ID that uniquely identifies this sensor. If two sensors have the same unique ID, Home Assistant will raise an exception
// Default: <no value>
UniqueId string `json:"unique_id,omitempty"`
// Defines the units of measurement of the sensor, if any
// Default: <no value>
UnitOfMeasurement string `json:"unit_of_measurement,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract the value. Available variables: `entity_id`. The `entity_id` can be used to reference the entity's attributes
// Default: <no value>
ValueTemplate string `json:"value_template,omitempty"`
}
// AnnounceTopic returns the topic to announce the discoverable Sensor
// Topic has the format below:
// <discovery_prefix>/<component>/<object_id>/config
// 'object_id' is either the UniqueId, the Name, or a hash of the Sensor
func (d *Sensor) AnnounceTopic(prefix string) string {
topicFormat := "%s/sensor/%s/config"
objectID := ""
switch {
case d.UniqueId != "":
objectID = d.UniqueId
case d.Name != "":
objectID = d.Name
default:
objectID = hash(d)
}
return fmt.Sprintf(topicFormat, prefix, objectID)
} | sensor.go | 0.813164 | 0.422028 | sensor.go | starcoder |
package golassert
import (
"fmt"
"reflect"
)
/*
AssertType asserts type of expected and result.
*/
func AssertType(expected interface{}, result interface{}) {
expectedType := reflect.TypeOf(expected)
resultType := reflect.TypeOf(result)
if expectedType != resultType {
err := "Error: [AssertEqual] Mismatched Types"
err = fmt.Sprintf("%s\nExpected Value Type: %v\nResult: %v", err, expectedType, resultType)
panic(err)
}
}
func Equal(expected interface{}, result interface{}) {
AssertEqual(expected, result)
}
func EqualStringArray(expected interface{}, result interface{}) {
AssertEqualStringArray(expected.([]string), result.([]string))
}
func EqualStringMap(expected interface{}, result interface{}) {
AssertEqualStringMap(expected.(map[string]string), result.(map[string]string))
}
/*
AssertEqual asserts if expected result is same as returned result.
*/
func AssertEqual(expected interface{}, result interface{}) {
AssertType(expected, result)
if expected == nil && result == nil {
return
}
switch result.(type) {
case string, uint, uint64, int, int64, error, bool:
if expected != result {
panic(fmt.Sprintf("Error: [] Mismatched Values\nExpected value: %v\nResult: %v", expected, result))
}
default:
panic("Error: AssertEqual doesn't handles this type yet.")
}
}
/*
AssertEqualStringArray asserts two string arrays.
*/
func AssertEqualStringArray(expected []string, result []string) {
AssertType(expected, result)
if expected == nil && result == nil {
return
}
if len(expected) != len(result) {
panic(fmt.Sprintf("Error: [] Different count of items\nExpected Value: %v\nResult: %v", expected, result))
}
for expectedIdx := range expected {
elementExists := false
for resultIdx := range result {
if result[resultIdx] == expected[expectedIdx] {
elementExists = true
}
}
if !elementExists {
panic(fmt.Sprintf("Error: [] Item missing: %v.\nExpected Value: %v\nResult: %v", expected[expectedIdx], expected, result))
}
}
}
/*
AssertEqualStringMap asserts two string:string maps.
*/
func AssertEqualStringMap(expected map[string]string, result map[string]string) {
AssertType(expected, result)
if expected == nil && result == nil {
return
}
if len(expected) != len(result) {
panic(fmt.Sprintf("Error: [] Different count of items\nExpected Value: %v\nResult: %v", expected, result))
}
for expectedKey := range expected {
if result[expectedKey] != expected[expectedKey] {
panic(fmt.Sprintf("Error: [] Item missing: %v.\nExpected Value: %v\nResult: %v", expected[expectedKey], expected, result))
}
}
} | golassert/equal.go | 0.583203 | 0.651064 | equal.go | starcoder |
package preprocessor
import (
"bytes"
"encoding/json"
"errors"
"github.com/armory/dinghy/pkg/git"
"strconv"
"strings"
"text/template"
"unicode"
)
func parseWhitespace(it *iterator) string {
for !it.end() && unicode.IsSpace(it.get()) {
it.pos++
}
return " "
}
func parseString(it *iterator) string {
begin := it.pos
it.pos++
for !it.end() && it.get() != '"' {
if it.get() == '\\' {
it.pos++
}
it.pos++
}
it.pos++
return it.slice(begin)
}
func parseToken(it *iterator) string {
begin := it.pos
var prevstr string
for !it.end() && !unicode.IsSpace(it.get()) {
if prevstr + string(it.get()) == "}}" {
it.pos--
break
}
prevstr = string(it.get())
it.pos++
}
return it.slice(begin)
}
func parseJSONObject(it *iterator) string {
begin := it.pos
stack := []rune{it.get()}
it.pos++
for !it.end() && len(stack) > 0 {
switch it.get() {
case '"':
parseString(it)
case '[', '{':
stack = append(stack, it.get())
it.pos++
case ']':
if stack[len(stack)-1] == '[' {
stack = stack[:len(stack)-1]
}
it.pos++
case '}':
if stack[len(stack)-1] == '{' {
stack = stack[:len(stack)-1]
}
it.pos++
default:
it.pos++
}
}
return strconv.Quote(it.slice(begin))
}
func isElvisOperator(it *iterator) bool {
if it.pos+2 < it.length {
if it.text[it.pos:it.pos+2] == "?:" {
if unicode.IsSpace(rune(it.text[it.pos+2])) {
return true
}
}
}
return false
}
func parseElvisOperator(it *iterator) string {
it.pos += 2
for !it.end() && unicode.IsSpace(it.get()) {
it.pos++
}
// ignore the elvis operator -- it's just for improved readability
return ""
}
// Preprocess makes a first pass at the dinghyfile and stringifies the JSON args to a module
func Preprocess(text string) (string, error) {
length := len(text)
for i := 0; i < length-1 && length >= 2; i++ {
if text[i:i+2] != "{{" {
continue
}
it := newIterator(text)
it.pos = i + 2
parts := []string{"{{"}
for !it.end() {
if it.pos+2 > length {
return text, errors.New("Index out of bounds while pre-processing template action, possibly a missing '}}'")
}
if it.text[it.pos:it.pos+2] == "}}" {
parts = append(parts, "}}")
it.pos += 2
break
}
ch := it.get()
var part string
if unicode.IsSpace(ch) {
part = parseWhitespace(it)
} else if ch == '"' {
part = parseString(it)
} else if ch == '{' || ch == '[' {
part = parseJSONObject(it)
} else if isElvisOperator(it) {
part = parseElvisOperator(it)
} else {
part = parseToken(it)
}
parts = append(parts, part)
}
remaining, err := Preprocess(text[it.pos:])
if err != nil {
return text, err
}
return text[:i] + strings.Join(parts, "") + remaining, nil
}
return text, nil
}
// ParseGlobalVars returns the map of global variables in the dinghyfile
func ParseGlobalVars(input string, gitInfo git.GitInfo) (interface{}, error) {
d := make(map[string]interface{})
input = removeModules(input, gitInfo)
err := json.Unmarshal([]byte(input), &d)
if err != nil {
return nil, err
}
if val, ok := d["globals"]; ok {
return val, nil
}
return make(map[string]interface{}), nil
}
func dummySubstitute(args ...interface{}) string {
return `{ "a": "b" }`
}
func dummyKV(args ...interface{}) string {
return `"a": "b"`
}
// since {{ var ... }} can be a string or an int!
func dummyVar(args ...interface{}) string {
return "1"
}
func dummySlice(args ...interface{}) []string {
return make([]string, 0)
}
// removeModules replaces all template function calls ({{ ... }}) in the dinghyfile with
// the JSON: { "a": "b" } so that we can extract the global vars using JSON.Unmarshal
func removeModules(input string, gitInfo git.GitInfo) string {
funcMap := template.FuncMap{
"module": dummySubstitute,
"local_module": dummySubstitute,
"appModule": dummyKV,
"var": dummyVar,
"pipelineID": dummyVar,
"makeSlice": dummySlice,
"if": dummySlice,
}
tmpl, err := template.New("blank-out").Funcs(funcMap).Parse(input)
if err != nil {
return input
}
buf := new(bytes.Buffer)
err = tmpl.Execute(buf, gitInfo)
if err != nil {
return input
}
return buf.String()
} | pkg/preprocessor/preprocessor.go | 0.510008 | 0.407628 | preprocessor.go | starcoder |
package contnet
import (
"github.com/asaskevich/EventBus"
"sort"
"sync"
)
type Trend struct {
Topic Topic
Popularity float64
}
var trendPopularityCriteria = func(t1, t2 *Trend) bool {
return t1.Popularity > t2.Popularity
}
// function that defines ordering between trend objects
type TrendBy func(t1, t2 *Trend) bool
// method on the function type, sorts the argument slice according to the function
func (trendBy TrendBy) Sort(trends []*Trend) {
ts := &trendSorter{
trends: trends,
trendBy: trendBy,
}
sort.Sort(ts)
}
type trendSorter struct {
trends []*Trend
trendBy func(t1, t2 *Trend) bool
}
// Len is part of sort.Interface.
func (ts *trendSorter) Len() int {
return len(ts.trends)
}
// Swap is part of sort.Interface.
func (ts *trendSorter) Swap(i, j int) {
ts.trends[i], ts.trends[j] = ts.trends[j], ts.trends[i]
}
// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
func (ts *trendSorter) Less(i, j int) bool {
return ts.trendBy(ts.trends[i], ts.trends[j])
}
type TrendDescription struct {
Keyword1 Keyword
Keyword2 Keyword
Popularity float64
}
func (trend *Trend) Describe() *TrendDescription {
k1, k2 := trend.Topic.GetKeywords()
return &TrendDescription{
Keyword1: k1,
Keyword2: k2,
Popularity: trend.Popularity,
}
}
type TrendStore struct {
sync.RWMutex
bus *EventBus.EventBus
cache map[Topic]*Trend
trends []*Trend
}
type TrendStoreFactory struct{}
func (factory TrendStoreFactory) New(bus *EventBus.EventBus) *TrendStore {
store := &TrendStore{
bus: bus,
cache: map[Topic]*Trend{},
trends: []*Trend{},
}
bus.SubscribeAsync("topics:mentioned", store.Register, false)
bus.SubscribeAsync("topics:unmentioned", store.Unregister, false)
return store
}
func (store *TrendStore) Describe() []*TrendDescription {
store.RLock()
defer store.RUnlock()
out := []*TrendDescription{}
for i := 0; i < len(store.trends); i++ {
out = append(out, store.trends[i].Describe())
}
return out
}
func (store *TrendStore) Snapshot(path, filename string) error {
store.RLock()
defer store.RUnlock()
return __snapshot(path, filename, &store.trends)
}
func (store *TrendStore) RestoreFromSnapshot(path, filename string) error {
store.Lock()
defer store.Unlock()
_, err := __restoreFromSnapshot(path, filename, &store.trends)
// fill the cache
for i := 0; i < len(store.trends); i++ {
store.cache[store.trends[i].Topic] = store.trends[i]
}
return err
}
func (store *TrendStore) GetTopN(n int) Topics {
store.RLock()
defer store.RUnlock()
bound := n
if n > len(store.trends) {
bound = len(store.trends)
}
topNTrends := store.trends[0:bound]
out := Topics{}
for i := 0; i < len(topNTrends); i++ {
out = append(out, &topNTrends[i].Topic)
}
return out
}
func (store *TrendStore) Register(topics Topics, popularity float64) {
store.Lock()
defer store.Unlock()
// foreach topic
for i := 0; i < len(topics); i++ {
// check if topic is cached. If not cached, this is this topic's first mention..
if trend, exists := store.cache[*topics[i]]; !exists {
trend = &Trend{
Topic: *topics[i],
Popularity: popularity,
}
store.cache[*topics[i]] = trend
store.trends = append(store.trends, trend)
} else {
trend.Popularity += popularity
}
}
// sort trends
TrendBy(trendPopularityCriteria).Sort(store.trends)
}
func (store *TrendStore) Unregister(topics Topics, popularity float64) {
store.Lock()
defer store.Unlock()
// foreach topic
for i := 0; i < len(topics); i++ {
// check if topic is cached. If cached, decrease popularity
if trend, exists := store.cache[*topics[i]]; exists {
trend.Popularity -= popularity
// remove if popularity becomes too low
if trend.Popularity <= 0 {
// delete from cache
delete(store.cache, *topics[i])
// delete from trends
for j := 0; j < len(store.trends); j++ {
if store.trends[j].Topic == *topics[i] {
store.trends = append(store.trends[:j], store.trends[j+1:]...)
break
}
}
}
}
}
// sort trends
TrendBy(trendPopularityCriteria).Sort(store.trends)
} | contnet/trendstore.go | 0.655667 | 0.41834 | trendstore.go | starcoder |
package gmgmap
import (
"errors"
"math"
"math/rand"
)
type vec2 struct {
x, y int
}
type rect struct {
x, y, w, h int
}
func (r rect) IsAdjacent(r2 rect, overlapSize int) bool {
// If left/right edges adjacent
if r.x-(r2.x+r2.w) == 0 || r2.x-(r.x+r.w) == 0 {
return r.y+overlapSize < r2.y+r2.h && r2.y+overlapSize < r.y+r.h
}
if r.y-(r2.y+r2.h) == 0 || r2.y-(r.y+r.h) == 0 {
return r.x+overlapSize < r2.x+r2.w && r2.x+overlapSize < r.x+r.w
}
return false
}
func (r rect) Overlaps(r2 rect) bool {
return r.x < r2.x+r2.w && r.x+r.w > r2.x &&
r.y < r2.y+r2.h && r.y+r.h > r2.y
}
func (r rect) isIn(x, y int) bool {
return x >= r.x && x < r.x+r.w && y >= r.y && y < r.y+r.h
}
func randomWalk(x, y, w, h int) (int, int) {
for {
// Choose random direction, up/right/down/left
switch rand.Intn(4) {
case 0:
// up
if y > 0 {
return x, y - 1
}
case 1:
// right
if x < w-1 {
return x + 1, y
}
case 2:
// down
if y < h-1 {
return x, y + 1
}
case 3:
// left
if x > 0 {
return x - 1, y
}
}
}
}
func imin(i1, i2 int) int {
if i1 < i2 {
return i1
}
return i2
}
func imax(i1, i2 int) int {
if i1 > i2 {
return i1
}
return i2
}
func iclamp(v, min, max int) int {
switch {
case v < min:
return min
case v > max:
return max
default:
return v
}
}
func irand(min, max int) int {
if min == max {
return min
}
return rand.Intn(max-min) + min
}
type bspRoom struct {
r rect
parent int
child1, child2 int
level int
horizontal bool
}
func bspRoomRoot(width, height int) bspRoom {
return bspRoom{rect{0, 0, width, height}, -1, -1, -1, 0, false}
}
func (room *bspRoom) Split(i, minRoomSize, maxRoomSize int) (bspRoom, bspRoom, error) {
// If the room is too small, then don't split
if room.r.w-minRoomSize*2 < 0 && room.r.h-minRoomSize < 0 {
return bspRoom{}, bspRoom{}, errors.New("room too small")
}
// If the room is small enough already, consider not splitting
if room.r.w <= maxRoomSize && room.r.h <= maxRoomSize && rand.Intn(2) == 0 {
return bspRoom{}, bspRoom{}, errors.New("room is small enough")
}
// If more than 2:1, split the long dimension, otherwise randomise
if room.r.w*2 > room.r.h || (room.r.h*2 < room.r.w && rand.Intn(2) == 0) {
return room.SplitHorizontal(i, minRoomSize)
}
return room.SplitVertical(i, minRoomSize)
}
// Split rooms horizontally (left + right children)
func (room *bspRoom) SplitHorizontal(i, minRoomSize int) (bspRoom, bspRoom, error) {
r := room.r.w - minRoomSize*2
var x int
if r < 0 {
return bspRoom{}, bspRoom{}, errors.New("room too small")
} else if r == 0 {
x = minRoomSize
} else {
x = rand.Intn(r) + minRoomSize
}
return bspRoom{rect{room.r.x, room.r.y, x, room.r.h}, i, -1, -1, room.level + 1, true},
bspRoom{rect{room.r.x + x, room.r.y, room.r.w - x, room.r.h}, i, -1, -1, room.level + 1, true},
nil
}
// Split rooms horizontally (top + bottom children)
func (room *bspRoom) SplitVertical(i, minRoomSize int) (bspRoom, bspRoom, error) {
r := room.r.h - minRoomSize*2
var y int
if r < 0 {
return bspRoom{}, bspRoom{}, errors.New("room too small")
} else if r == 0 {
y = minRoomSize
} else {
y = rand.Intn(r) + minRoomSize
}
return bspRoom{rect{room.r.x, room.r.y, room.r.w, y}, i, -1, -1, room.level + 1, false},
bspRoom{rect{room.r.x, room.r.y + y, room.r.w, room.r.h - y}, i, -1, -1, room.level + 1, false},
nil
}
func (room *bspRoom) IsLeaf() bool {
return room.child1 < 0 && room.child2 < 0
}
// Abs - absolute value, integer
func Abs(x int) int {
if x < 0 {
return -x
}
return x
}
func manhattanDistance(x1, y1, x2, y2 int) int {
return Abs(x1-x2) + Abs(y1-y2)
}
func euclideanDistance(x1, y1, x2, y2 int) float64 {
return math.Sqrt(math.Pow(float64(x1-x2), 2) + math.Pow(float64(y1-y2), 2))
} | gmgmap/util.go | 0.614278 | 0.477554 | util.go | starcoder |
package token
// LexItr represents an iterator of Lexemes.
type LexItr struct {
Items []Lexeme
Idx int
}
// NewLexItr returns a new initialised LexItr.
func NewLexItr(items []Lexeme) *LexItr {
return &LexItr{
Items: items,
Idx: -1,
}
}
// More returns true if the end of iterator has not been reached yet.
func (itr *LexItr) More() bool {
return itr.Idx+1 < len(itr.Items)
}
// More returns true if the start of iterator has not been reached yet.
func (itr *LexItr) Less() bool {
return itr.Idx > 0
}
// Get returns th current lexeme referenced by the iterators pointer.
func (itr *LexItr) Get() Lexeme {
if itr.Idx < 0 {
panic("Beyond start of iterator, call LexItr.Next first")
}
return itr.Items[itr.Idx]
}
// Back decrements the iterators index if the index is not referencing the point
// before the first item.
func (itr *LexItr) Back() {
if itr.Idx != -1 {
itr.Idx--
}
}
// Next returns the next lexeme in the iterator incrementing the iterators
// index accordingly. If the end of the iterator has already been reached then
// a panic insues.
func (itr *LexItr) Next() Lexeme {
if !itr.More() {
panic("End of iterator reached, check using LexItr.More first")
}
itr.Idx++
return itr.Items[itr.Idx]
}
// Prev returns the previous lexeme in the iterator decrementing the iterators
// index accordingly. If the start of the iterator has already been reached
// then a panic insues.
func (itr *LexItr) Prev() Lexeme {
if !itr.Less() {
panic("Start of iterator reached, check using LexItr.Less first")
}
itr.Idx--
return itr.Items[itr.Idx]
}
// Peek returns the next lexeme in the iterator or the zero lexeme if the end
// of the iterator has been reached.
func (itr *LexItr) Peek() Lexeme {
if !itr.More() {
return Lexeme{}
}
return itr.Items[itr.Idx+1]
}
// Window returns the lexeme indicated by the iterators pointer along with the
// lexemes before and after it. If any index is out of bounds then the zero
// Lexeme is returned in their place.
func (itr *LexItr) Window() (prev, curr, next Lexeme) {
if itr.Less() {
prev = itr.Items[itr.Idx-1]
}
if itr.Idx >= 0 {
curr = itr.Items[itr.Idx]
}
if itr.More() {
next = itr.Items[itr.Idx+1]
}
return
}
// End returns the UTF8Pos in the last item.
func (itr *LexItr) End() UTF8Pos {
size := len(itr.Items)
if size == 0 {
return UTF8Pos{}
}
return itr.Items[size-1].End
} | scarlet/token/lexitr.go | 0.752286 | 0.466846 | lexitr.go | starcoder |
package gubrak
import (
"errors"
"fmt"
"reflect"
)
func inspectFunc(err *error, data interface{}) (reflect.Value, reflect.Type) {
var dataValue reflect.Value
var dataValueType reflect.Type
dataValue = reflect.ValueOf(data)
if dataValue.Kind() == reflect.Ptr {
dataValue = dataValue.Elem()
}
if dataValue.Kind() != reflect.Func {
*err = errors.New("callback should be function")
return dataValue, dataValueType
}
dataValueType = dataValue.Type()
return dataValue, dataValueType
}
func inspectData(data interface{}) (reflect.Value, reflect.Type, reflect.Kind, int) {
var dataValue reflect.Value
var dataValueType reflect.Type
var dataValueKind reflect.Kind
dataValueLen := 0
if data != nil {
dataValue = reflect.ValueOf(data)
dataValueType = dataValue.Type()
dataValueKind = dataValue.Kind()
if dataValueKind == reflect.Ptr {
dataValue = dataValue.Elem()
}
if dataValueKind == reflect.Slice || dataValueKind == reflect.Array {
dataValueLen = dataValue.Len()
} else if dataValueKind == reflect.Map {
dataValueLen = len(dataValue.MapKeys())
}
}
return dataValue, dataValueType, dataValueKind, dataValueLen
}
func makeSlice(valueType reflect.Type, args ...int) reflect.Value {
sliceLen := 0
sliceCap := 0
if len(args) > 0 {
sliceLen = args[0]
if len(args) > 1 {
sliceCap = args[1]
}
}
if valueType.Kind() == reflect.Array {
sliceUnaddresable := reflect.MakeSlice(reflect.SliceOf(valueType.Elem()), 0, 0)
sliceAddressable := reflect.New(sliceUnaddresable.Type())
sliceAddressable.Elem().Set(sliceUnaddresable)
return sliceAddressable.Elem()
}
return reflect.MakeSlice(valueType, sliceLen, sliceCap)
}
func validateFuncInputForSliceLoop(err *error, funcType reflect.Type, data reflect.Value) int {
funcTypeNumIn := funcType.NumIn()
if funcTypeNumIn == 0 || funcTypeNumIn >= 3 {
*err = errors.New("callback must only have one or two parameters")
return funcTypeNumIn
}
if funcType.In(0).Kind() != data.Type().Elem().Kind() {
*err = errors.New("callback 1st parameter's data type should be same with slice element data type")
return funcTypeNumIn
}
if funcTypeNumIn == 2 {
if funcType.In(1).Kind() != reflect.Int {
*err = errors.New("callback 2nd parameter's data type should be int")
return funcTypeNumIn
}
}
return funcTypeNumIn
}
func validateFuncInputForSliceLoopWithoutIndex(err *error, funcType reflect.Type, data reflect.Value) {
if funcType.NumIn() != 1 {
*err = errors.New("callback must only have one parameters")
return
}
if funcType.In(0).Kind() != data.Type().Elem().Kind() {
*err = errors.New("callback parameter's data type should be same with slice data type")
}
}
func validateFuncInputForCollectionLoop(err *error, funcType reflect.Type, data reflect.Value) int {
funcTypeNumIn := funcType.NumIn()
if funcTypeNumIn == 0 || funcTypeNumIn >= 3 {
*err = errors.New("callback must only have one or two parameters")
return funcTypeNumIn
}
if funcType.In(0).Kind() != data.Type().Elem().Kind() {
*err = errors.New("callback 1st parameter's data type should be same with map value data type")
return funcTypeNumIn
}
if funcTypeNumIn == 2 {
if funcType.In(1).Kind() != data.Type().Key().Kind() {
*err = errors.New("callback 2nd parameter's data type should be same with map key type")
return funcTypeNumIn
}
}
return funcTypeNumIn
}
func validateFuncOutputNone(err *error, funcType reflect.Type) {
callbackTypeNumOut := funcType.NumOut()
if callbackTypeNumOut != 0 {
*err = errors.New("callback should not have return value")
}
}
func validateFuncOutputOneVarDynamic(err *error, funcType reflect.Type) int {
callbackTypeNumOut := funcType.NumOut()
if callbackTypeNumOut != 1 {
*err = errors.New("callback return value should only be 1 variable")
return callbackTypeNumOut
}
return callbackTypeNumOut
}
func validateFuncOutputOneVarBool(err *error, callbackType reflect.Type, isMust bool) int {
isOptional := !isMust
message := "callback return value should be one variable with bool type"
if isOptional {
message = "callback return value data type should be bool, ... or no return value at all"
}
callbackTypeNumOut := callbackType.NumOut()
if callbackTypeNumOut == 1 {
if callbackType.Out(0).Kind() != reflect.Bool {
*err = errors.New(message)
return callbackTypeNumOut
}
} else {
if isOptional {
if callbackTypeNumOut > 1 {
*err = errors.New(message)
return callbackTypeNumOut
}
} else {
*err = errors.New(message)
return callbackTypeNumOut
}
}
return callbackTypeNumOut
}
func forEachSlice(slice reflect.Value, sliceLen int, eachCallback func(reflect.Value, int)) {
forEachSliceStoppable(slice, sliceLen, func(each reflect.Value, i int) bool {
eachDataValue := slice.Index(i)
eachCallback(eachDataValue, i)
return true
})
}
func forEachSliceStoppable(slice reflect.Value, sliceLen int, eachCallback func(reflect.Value, int) bool) {
for i := 0; i < sliceLen; i++ {
eachDataValue := slice.Index(i)
shouldContinue := eachCallback(eachDataValue, i)
if !shouldContinue {
return
}
}
}
func forEachCollection(collection reflect.Value, keys []reflect.Value, eachCallback func(reflect.Value, reflect.Value, int)) {
forEachCollectionStoppable(collection, keys, func(value, key reflect.Value, i int) bool {
eachCallback(value, key, i)
return true
})
}
func forEachCollectionStoppable(collection reflect.Value, keys []reflect.Value, eachCallback func(reflect.Value, reflect.Value, int) bool) {
for i, key := range keys {
shouldContinue := eachCallback(collection.MapIndex(key), key, i)
if !shouldContinue {
return
}
}
}
func callFuncSliceLoop(funcToCall, param reflect.Value, i int, numIn int) []reflect.Value {
if numIn == 1 {
return funcToCall.Call([]reflect.Value{param})
}
return funcToCall.Call([]reflect.Value{param, reflect.ValueOf(i)})
}
func callFuncCollectionLoop(funcToCall, value, key reflect.Value, numIn int) []reflect.Value {
if numIn == 1 {
return funcToCall.Call([]reflect.Value{value})
}
return funcToCall.Call([]reflect.Value{value, key})
}
func isSlice(err *error, label string, dataValue ...reflect.Value) bool {
if len(dataValue) == 0 {
*err = fmt.Errorf("%s cannot be empty", label)
return false
} else if len(dataValue) == 1 {
if dataValue[0].Kind() == reflect.Slice || dataValue[0].Kind() == reflect.Array {
return true
}
*err = fmt.Errorf("%s must be slice", label)
return false
} else {
res := dataValue[0].Kind() == reflect.Slice || dataValue[0].Kind() == reflect.Array
for i, each := range dataValue {
if i > 0 {
res = res || (each.Kind() == reflect.Slice) || (each.Kind() == reflect.Array)
}
}
return res
}
}
func isNonNilData(err *error, label string, data interface{}) bool {
if data == nil {
*err = fmt.Errorf("%s cannot be nil", label)
return false
}
valueOfData := reflect.ValueOf(data)
switch valueOfData.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice, reflect.UnsafePointer, reflect.Struct:
if valueOfData.IsNil() {
*err = fmt.Errorf("%s cannot be nil", label)
return false
}
}
return true
}
func isZeroOrPositiveNumber(err *error, label string, size int) bool {
if size < 0 {
*err = fmt.Errorf("%s must not be negative number", label)
return false
} else if size == 0 {
return true
}
return true
}
func isPositiveNumber(err *error, label string, size int) bool {
if size < 0 {
*err = fmt.Errorf("%s must be positive number", label)
return false
} else if size == 0 {
*err = fmt.Errorf("%s must be positive number", label)
return false
}
return true
}
func isLeftShouldBeGreaterOrEqualThanRight(err *error, labelLeft string, valueLeft int, labelRight string, valueRight int) bool {
if valueLeft < valueRight {
*err = fmt.Errorf("%s should be greater than %s", labelLeft, labelRight)
return false
}
return true
}
func isTypeEqual(err *error, labelLeft string, typeLeft reflect.Type, labelRight string, typeRight reflect.Type) bool {
if typeLeft != typeRight {
*err = fmt.Errorf("type of %s should be same with type of %s", labelLeft, labelRight)
return false
}
return true
}
func catch(err *error) {
if r := recover(); r != nil {
*err = fmt.Errorf("%v", r)
}
}
func catchWithCustomErrorMessage(err *error, callback func(string) string) {
if r := recover(); r != nil {
*err = errors.New(callback(fmt.Sprintf("%v", r)))
}
} | operation_chainable_helper.go | 0.524395 | 0.636014 | operation_chainable_helper.go | starcoder |
package gocomplex
import "math"
// Complex128 is the float structure representing the complex number
type Complex128 struct {
real float64
imaginary float64
}
// CreateComplex128 Returns a Complex 128 structure
func CreateComplex128(real float64, imaginary float64) Complex128 {
c := Complex128{real: real, imaginary: imaginary}
return c
}
// Real Returns Real Part of Complex number
func (c Complex128) Real() float64 {
return c.real
}
// Imaginary returns Imaginary part of complex number
func (c Complex128) Imaginary() float64 {
return c.imaginary
}
// Conjugate Returns the conjugate of a complex number
func (c Complex128) Conjugate() Complex128 {
return Complex128{c.real, -c.imaginary}
}
// Phase Returns the phase of complex number
func (c Complex128) Phase() float64 {
return math.Atan(c.imaginary / c.real)
}
// Magnitude Returns the magnitude of complex numeber
func (c Complex128) Magnitude() float64 {
return math.Hypot(c.real, c.imaginary)
}
// Polar Returns the magnitude and phase of complex number
func (c Complex128) Polar() (float64, float64) {
return c.Magnitude(), c.Phase()
}
// PolarToArgand Converts Polar Representation of Complex number to a + ib form
func PolarToArgand(magnitude, phase float64) Complex128 {
return Complex128{real: magnitude * math.Cos(phase), imaginary: magnitude * math.Sin(phase)}
}
// NthPower Returns Nth Power of complex number
func (c Complex128) NthPower(n float64) Complex128 {
magnitude, phase := c.Polar()
newPhase := phase * n
newMagnitude := math.Pow(magnitude, n)
return PolarToArgand(newMagnitude, newPhase)
}
// Square Returns Square of complex number
func (c Complex128) Square() Complex128 {
return c.NthPower(2)
}
// NthRoot Returns Nth root of complex number
func (c Complex128) NthRoot(n float64) Complex128 {
magnitude, phase := c.Polar()
newPhase := phase / n
newMagnitude := math.Pow(magnitude, 1.0/n)
return PolarToArgand(newMagnitude, newPhase)
}
//SquareRoot Returns Square root of complex number
func (c Complex128) SquareRoot() Complex128 {
return c.NthRoot(2)
}
// Log Return Natural Logarithm of complex number
func (c Complex128) Log() Complex128 {
return Complex128{real: math.Log(c.Magnitude()), imaginary: c.Phase()}
}
// Negate Returns a complex number with same magnitude but opposite sign
func (c Complex128) Negate() Complex128 {
return Complex128{real: -c.real, imaginary: -c.imaginary}
}
// Compare Checks if both complex numbers are equal
func (c Complex128) Compare(c2 Complex128) bool {
return c.real == c2.real && c.imaginary == c2.imaginary
}
// Vector Returns an array of two elements where 1st element is the real part and 2nd the imaginary part
func (c Complex128) Vector() []float64 {
return []float64{c.real, c.imaginary}
}
// Rotate Rotates a Complex Number by angle theta in radians
func (c Complex128) Rotate(angleInRadians float64) Complex128 {
magnitude, phase := c.Polar()
newPhase := phase + angleInRadians
return PolarToArgand(magnitude, newPhase)
}
// TranslateWithComplexNumber Translates a complex number with another Complex number
func (c Complex128) TranslateWithComplexNumber(c1 Complex128) Complex128 {
return Complex128{c.real + c1.real, c.imaginary + c1.imaginary}
}
// TranslateeWithVector Translates a complex number with a Vector
func (c Complex128) TranslateeWithVector(v [2]float64) Complex128 {
return Complex128{c.real + v[0], c.imaginary + v[1]}
}
// Scale Scales a complex number with mentioned scaling factor
func (c Complex128) Scale(scalingFactor float64) Complex128 {
return Complex128{scalingFactor * c.real, scalingFactor * c.imaginary}
}
// MultiplyBy multiplies the original complex number with the complex number passed as argument
func (c Complex128) MultiplyBy(c1 Complex128) Complex128 {
return Complex128{real: c.real*c1.real - c.imaginary*c1.imaginary, imaginary: c.real*c1.imaginary + c.imaginary*c1.real}
}
// Multiply multiplies the two complex numbers passed as argument
func Multiply(c, c1 Complex128) Complex128 {
return Complex128{real: c.real*c1.real - c.imaginary*c1.imaginary, imaginary: c.real*c1.imaginary + c.imaginary*c1.real}
}
// TODO (tan)
// TODO (sin)
// TODO (rect)
// TODO (isnan.go)
// TODO (isinf.go)
// TODO (exp.go)s
// TODO (asin.go) | complex.go | 0.877962 | 0.555857 | complex.go | starcoder |
package golispy
import (
"errors"
)
type greaterThanCallable struct {}
func (g greaterThanCallable) Call(exps []Exp, env Env) (Exp, error) {
exp := Exp{}
var ret int64
atom := Atom{integer: &ret}
exp.atom = &atom
if op1, op2 := Eval(exps[0], env), Eval(exps[1], env); len(exps) == 2 && op1.IsNumber() && op2.IsNumber() {
if op1.atom.integer != nil && op2.atom.integer != nil {
if *op1.atom.integer > *op2.atom.integer {
ret = 1
}
}
if op1.atom.float != nil && op2.atom.float != nil {
if *op1.atom.float > *op2.atom.float {
ret = 1
}
}
return exp, nil
}
return exp, errors.New("operands to '>' must be 2 numbers")
}
type lessThanCallable struct {}
func (l lessThanCallable) Call(exps []Exp, env Env) (Exp, error) {
exp := Exp{}
var ret int64
atom := Atom{integer: &ret}
exp.atom = &atom
if op1, op2 := Eval(exps[0], env), Eval(exps[1], env); len(exps) == 2 && op1.IsNumber() && op2.IsNumber() {
if op1.atom.integer != nil && op2.atom.integer != nil {
if *op1.atom.integer < *op2.atom.integer {
ret = 1
}
}
if op1.atom.float != nil && op2.atom.float != nil {
if *op1.atom.float < *op2.atom.float {
ret = 1
}
}
return exp, nil
}
return exp, errors.New("operands to '>' must be 2 numbers")
}
type addCallable struct {}
func (a addCallable) Call(exps []Exp, env Env) (Exp, error) {
opWithFloats := func(a float64,b float64) float64{
return a + b
}
opWithInts := func(a int64,b int64) int64{
return a + b
}
return numberOperandFunc(opWithFloats, opWithInts, exps, env)
}
type multCallable struct {}
func (m multCallable) Call(exps []Exp, env Env) (Exp, error) {
opWithFloats := func(a float64,b float64) float64{
return a * b
}
opWithInts := func(a int64,b int64) int64{
return a * b
}
return numberOperandFunc(opWithFloats, opWithInts, exps, env)
}
type subtractCallable struct {}
func (m subtractCallable) Call(exps []Exp, env Env) (Exp, error) {
opWithFloats := func(a float64,b float64) float64{
return a - b
}
opWithInts := func(a int64,b int64) int64{
return a - b
}
return numberOperandFunc(opWithFloats, opWithInts, exps, env)
}
type divideCallable struct {}
func (d divideCallable) Call(exps []Exp, env Env) (Exp, error) {
opWithFloats := func(a float64,b float64) float64{
return a / b
}
opWithInts := func(a int64,b int64) int64{
return a / b
}
return numberOperandFunc(opWithFloats, opWithInts, exps, env)
}
func numberOperandFunc(opWithFloats func(float64,float64)float64,
opWithInts func(int64, int64)int64, exps []Exp, env Env) (Exp, error){
exp := Eval(exps[0].DeepAtomCopy(), env)
if !exp.IsNumber() {
return exp, errors.New("invalid operand type, must be number")
}
atom := exp.atom
for _, op := range exps[1:] {
evaled := Eval(op, env)
if evaled.atom == nil || evaled.atom.symbol != nil {
return exp, errors.New("invalid operand type")
}
if evaled.atom.integer != nil && atom.integer != nil {
*atom.integer = opWithInts(*atom.integer, *evaled.atom.integer)
}else if evaled.atom.float != nil && atom.integer != nil {
var f float64
atom.float = &f
f = opWithFloats(*evaled.atom.float, float64(*atom.integer))
atom.integer = nil
}else if evaled.atom.integer != nil && atom.float != nil {
*atom.float = opWithFloats(*atom.float, float64(*evaled.atom.integer))
}else if evaled.atom.float != nil && atom.float != nil {
*atom.float = opWithFloats(*atom.float, *evaled.atom.float)
}
}
return exp, nil
}
type listCallable struct {}
func (l listCallable) Call(exps[]Exp, env Env) (Exp, error) {
var list List
for _, v := range exps {
list = append(list, v)
}
return Exp{list: &list}, nil
}
type carCallable struct {}
func (l carCallable) Call(exps[]Exp, env Env) (Exp, error) {
if len(exps) != 1 {
return Exp{}, errors.New("car takes a single argument")
}
if *exps[0].list == nil {
return Exp{}, errors.New("argument to car must be a list")
}
return (*exps[0].list)[0], nil
}
type cdrCallable struct {}
func (c cdrCallable) Call(exps[] Exp, env Env) (Exp, error) {
if len(exps) != 1 {
return Exp{}, errors.New("car takes a single argument")
}
if *exps[0].list == nil {
return Exp{}, errors.New("argument to car must be a list")
}
argList := exps[0].list
var newList List
for _, v := range (*argList)[1:] {
newList = append(newList, v)
}
return Exp{list:&newList}, nil
}
type beginCallable struct {}
func (b beginCallable) Call(exps[] Exp, env Env) (Exp, error) {
return exps[len(exps)-1], nil
} | functions.go | 0.565899 | 0.453504 | functions.go | starcoder |
package scenario
import (
"errors"
"fmt"
"math"
"reflect"
"strconv"
"github.com/isucon/isucon11-final/benchmarker/api"
"github.com/isucon/isucon11-final/benchmarker/model"
)
func AssertEqual(msg string, expected interface{}, actual interface{}) bool {
r := assertEqual(expected, actual)
if !r {
AdminLogger.Printf("%s: expected: %v / actual: %v", msg, expected, actual)
}
return r
}
func assertEqual(expected interface{}, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
actualType := reflect.TypeOf(actual)
if actualType == nil {
return false
}
expectedValue := reflect.ValueOf(expected)
if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
}
return false
}
func AssertGreaterOrEqual(msg string, expectMin, actual int) bool {
r := expectMin <= actual
if !r {
AdminLogger.Printf("%s: expected: >= %d / actual: %d", msg, expectMin, actual)
}
return r
}
func AssertWithinTolerance(msg string, expect, actual, tolerance float64) bool {
r := math.Abs(expect-actual) <= tolerance
if !r {
AdminLogger.Printf("%s: expected: %f ± %.2f / actual: %f", msg, expect, tolerance, actual)
}
return r
}
func errMismatch(message string, expected interface{}, actual interface{}) error {
return fmt.Errorf("%s (expected: %v, actual: %v)", message, expected, actual)
}
func AssertEqualUserAccount(expected *model.UserAccount, actual *api.GetMeResponse) error {
if !AssertEqual("account code", expected.Code, actual.Code) {
return errMismatch("ユーザ情報の code が期待する値と一致しません", expected.Code, actual.Code)
}
if !AssertEqual("account name", expected.Name, actual.Name) {
return errMismatch("ユーザ情報の name が期待する値と一致しません", expected.Name, actual.Name)
}
if !AssertEqual("account is_admin", expected.IsAdmin, actual.IsAdmin) {
return errMismatch("ユーザ情報の is_admin が期待する値と一致しません", expected.IsAdmin, actual.IsAdmin)
}
return nil
}
func AssertEqualRegisteredCourse(expected *model.Course, actual *api.GetRegisteredCourseResponseContent) error {
if !AssertEqual("registered_course id", expected.ID, actual.ID) {
return errMismatch("科目の id が期待する値と一致しません", expected.ID, actual.ID)
}
if !AssertEqual("registered_course name", expected.Name, actual.Name) {
return errMismatch("科目の name が期待する値と一致しません", expected.Name, actual.Name)
}
if !AssertEqual("registered_course teacher", expected.Teacher().Name, actual.Teacher) {
return errMismatch("科目の teacher が期待する値と一致しません", expected.Teacher().Name, actual.Teacher)
}
if !AssertEqual("registered_course period", uint8(expected.Period+1), actual.Period) {
return errMismatch("科目の period が期待する値と一致しません", uint8(expected.Period+1), actual.Period)
}
if !AssertEqual("registered_course day_of_weeek", api.DayOfWeekTable[expected.DayOfWeek], actual.DayOfWeek) {
return errMismatch("科目の day_of_week が期待する値と一致しません", api.DayOfWeekTable[expected.DayOfWeek], actual.DayOfWeek)
}
return nil
}
func AssertEqualGrade(expected *model.GradeRes, actual *api.GetGradeResponse) error {
if !AssertEqual("grade courses length", len(expected.CourseResults), len(actual.CourseResults)) {
return errMismatch("成績取得の courses の数が期待する値と一致しません", len(expected.CourseResults), len(actual.CourseResults))
}
err := AssertEqualSummary(&expected.Summary, &actual.Summary)
if err != nil {
return err
}
for _, courseResult := range actual.CourseResults {
if _, ok := expected.CourseResults[courseResult.Code]; !ok {
return errors.New("成績取得の courses に期待しない科目が含まれています")
}
expected := expected.CourseResults[courseResult.Code]
err := AssertEqualCourseResult(expected, &courseResult)
if err != nil {
return err
}
}
return nil
}
func AssertEqualSummary(expected *model.Summary, actual *api.Summary) error {
if !AssertEqual("grade summary credits", expected.Credits, actual.Credits) {
return errMismatch("成績取得の summary の credits が期待する値と一致しません", expected.Credits, actual.Credits)
}
if !AssertWithinTolerance("grade summary gpa", expected.GPA, actual.GPA, validateGPAErrorTolerance) {
return errMismatch("成績取得の summary の gpa が期待する値と一致しません", expected.GPA, actual.GPA)
}
if !AssertWithinTolerance("grade summary gpa_max", expected.GpaMax, actual.GpaMax, validateGPAErrorTolerance) {
return errMismatch("成績取得の summary の gpa_max が期待する値と一致しません", expected.GpaMax, actual.GpaMax)
}
if !AssertWithinTolerance("grade summary gpa_min", expected.GpaMin, actual.GpaMin, validateGPAErrorTolerance) {
return errMismatch("成績取得の summary の gpa_min が期待する値と一致しません", expected.GpaMin, actual.GpaMin)
}
if !AssertWithinTolerance("grade summary gpa_avg", expected.GpaAvg, actual.GpaAvg, validateGPAErrorTolerance) {
return errMismatch("成績取得の summary の gpa_avg が期待する値と一致しません", expected.GpaAvg, actual.GpaAvg)
}
if !AssertWithinTolerance("grade summary gpa_t_score", expected.GpaTScore, actual.GpaTScore, validateGPAErrorTolerance) {
return errMismatch("成績取得の summary の gpa_t_score が期待する値と一致しません", expected.GpaTScore, actual.GpaTScore)
}
return nil
}
func AssertEqualCourseResult(expected *model.CourseResult, actual *api.CourseResult) error {
if !AssertEqual("grade courses name", expected.Name, actual.Name) {
return errMismatch("成績取得の科目の name が期待する値と一致しません", expected.Name, actual.Name)
}
if !AssertEqual("grade courses code", expected.Code, actual.Code) {
return errMismatch("成績取得の科目の code が期待する値と一致しません", expected.Code, actual.Code)
}
if !AssertEqual("grade courses total_score", expected.TotalScore, actual.TotalScore) {
return errMismatch("成績取得の科目の total_score が期待する値と一致しません", expected.TotalScore, actual.TotalScore)
}
if !AssertEqual("grade courses total_score_max", expected.TotalScoreMax, actual.TotalScoreMax) {
return errMismatch("成績取得の科目の total_score_max が期待する値と一致しません", expected.TotalScoreMax, actual.TotalScoreMax)
}
if !AssertEqual("grade courses total_score_min", expected.TotalScoreMin, actual.TotalScoreMin) {
return errMismatch("成績取得の科目の total_score_min が期待する値と一致しません", expected.TotalScoreMin, actual.TotalScoreMin)
}
if !AssertWithinTolerance("grade courses total_score_avg", expected.TotalScoreAvg, actual.TotalScoreAvg, validateTotalScoreErrorTolerance) {
return errMismatch("成績取得の科目の total_score_avg が期待する値と一致しません", expected.TotalScoreAvg, actual.TotalScoreAvg)
}
if !AssertWithinTolerance("grade courses total_score_t_score", expected.TotalScoreTScore, actual.TotalScoreTScore, validateTotalScoreErrorTolerance) {
return errMismatch("成績取得の科目の total_score_t_score が期待する値と一致しません", expected.TotalScoreTScore, actual.TotalScoreTScore)
}
if !AssertEqual("grade courses class_scores length", len(expected.ClassScores), len(actual.ClassScores)) {
return errMismatch("成績取得の科目の class_scores の数が期待する値と一致しません", len(expected.ClassScores), len(actual.ClassScores))
}
// class_scores の順序の検証
for i := 0; i < len(actual.ClassScores)-1; i++ {
if actual.ClassScores[i].Part < actual.ClassScores[i+1].Part {
return errors.New("成績取得の科目の class_scores の順序が part の降順になっていません")
}
}
for i := 0; i < len(expected.ClassScores); i++ {
// webapp 側は新しい(partが大きい)classから順番に帰ってくるので古い講義から見るようにしている
err := AssertEqualClassScore(expected.ClassScores[i], &actual.ClassScores[len(actual.ClassScores)-i-1])
if err != nil {
return err
}
}
return nil
}
func AssertEqualClassScore(expected *model.ClassScore, actual *api.ClassScore) error {
if !AssertEqual("grade courses class_scores class_id", expected.ClassID, actual.ClassID) {
return errMismatch("成績取得の講義の class_id が期待する値と一致しません", expected.ClassID, actual.ClassID)
}
if !AssertEqual("grade courses class_scores title", expected.Title, actual.Title) {
return errMismatch("成績取得の講義の title が期待する値と一致しません", expected.Title, actual.Title)
}
if !AssertEqual("grade courses class_scores part", expected.Part, actual.Part) {
return errMismatch("成績取得の講義の part が期待する値と一致しません", expected.Part, actual.Part)
}
if !AssertEqual("grade courses class_scores score", expected.Score, actual.Score) {
return errMismatch("成績取得での講義の score が期待する値と一致しません", scoreToString(expected.Score), scoreToString(actual.Score))
}
if !AssertEqual("grade courses class_scores submitters", expected.SubmitterCount, actual.Submitters) {
return errMismatch("成績取得の講義の submitters が期待する値と一致しません", expected.SubmitterCount, actual.Submitters)
}
return nil
}
func AssertEqualSimpleClassScore(expected *model.SimpleClassScore, actual *api.ClassScore) error {
if !AssertEqual("grade courses class_scores class_id", expected.ClassID, actual.ClassID) {
return errMismatch("成績取得での講義の class_id が期待する値と一致しません", expected.ClassID, actual.ClassID)
}
if !AssertEqual("grade courses class_scores title", expected.Title, actual.Title) {
return errMismatch("成績取得での講義の title が期待する値と一致しません", expected.Title, actual.Title)
}
if !AssertEqual("grade courses class_scores part", expected.Part, actual.Part) {
return errMismatch("成績取得での講義の part が期待する値と一致しません", expected.Part, actual.Part)
}
if !AssertEqual("grade courses class_scores score", expected.Score, actual.Score) {
return errMismatch("成績取得での講義の score が期待する値と一致しません", scoreToString(expected.Score), scoreToString(actual.Score))
}
return nil
}
func AssertEqualCourse(expected *model.Course, actual *api.GetCourseDetailResponse, verifyStatus bool) error {
if !AssertEqual("course id", expected.ID, actual.ID) {
return errMismatch("科目の id が期待する値と一致しません", expected.ID, actual.ID)
}
if !AssertEqual("course code", expected.Code, actual.Code) {
return errMismatch("科目の code が期待する値と一致しません", expected.Code, actual.Code)
}
if !AssertEqual("course type", api.CourseType(expected.Type), actual.Type) {
return errMismatch("科目の type が期待する値と一致しません", api.CourseType(expected.Type), actual.Type)
}
if !AssertEqual("course name", expected.Name, actual.Name) {
return errMismatch("科目の name が期待する値と一致しません", expected.Name, actual.Name)
}
if !AssertEqual("course description", expected.Description, actual.Description) {
return errMismatch("科目の description が期待する値と一致しません", expected.Description, actual.Description)
}
if !AssertEqual("course credit", uint8(expected.Credit), actual.Credit) {
return errMismatch("科目の credit が期待する値と一致しません", uint8(expected.Credit), actual.Credit)
}
if !AssertEqual("course period", uint8(expected.Period+1), actual.Period) {
return errMismatch("科目の period が期待する値と一致しません", uint8(expected.Period+1), actual.Period)
}
if !AssertEqual("course day_of_week", api.DayOfWeekTable[expected.DayOfWeek], actual.DayOfWeek) {
return errMismatch("科目の day_of_week が期待する値と一致しません", api.DayOfWeekTable[expected.DayOfWeek], actual.DayOfWeek)
}
if !AssertEqual("course teacher", expected.Teacher().Name, actual.Teacher) {
return errMismatch("科目の teacher が期待する値と一致しません", expected.Teacher().Name, actual.Teacher)
}
if verifyStatus && !AssertEqual("course status", expected.Status(), actual.Status) {
return errMismatch("科目の status が期待する値と一致しません", expected.Status(), actual.Status)
}
if !AssertEqual("course keywords", expected.Keywords, actual.Keywords) {
return errMismatch("科目の keywords が期待する値と一致しません", expected.Keywords, actual.Keywords)
}
return nil
}
func AssertEqualClass(expected *model.Class, actual *api.GetClassResponse, student *model.Student) error {
if !AssertEqual("class id", expected.ID, actual.ID) {
return errMismatch("講義の id が期待する値と一致しません", expected.ID, actual.ID)
}
if !AssertEqual("class part", expected.Part, actual.Part) {
return errMismatch("講義の part が期待する値と一致しません", expected.Part, actual.Part)
}
if !AssertEqual("class title", expected.Title, actual.Title) {
return errMismatch("講義の title が期待する値と一致しません", expected.Title, actual.Title)
}
if !AssertEqual("class description", expected.Desc, actual.Description) {
return errMismatch("講義の description が期待する値と一致しません", expected.Desc, actual.Description)
}
if !AssertEqual("class submission_closed", expected.IsSubmissionClosed(), actual.SubmissionClosed) {
return errMismatch("講義の submission_closed が期待する値と一致しません", expected.IsSubmissionClosed(), actual.SubmissionClosed)
}
isSubmitted := expected.GetSubmissionByStudentCode(student.Code) != nil
if !AssertEqual("class submitted", isSubmitted, actual.Submitted) {
return errMismatch("講義の submitted が期待する値と一致しません", isSubmitted, actual.Submitted)
}
return nil
}
func AssertEqualAnnouncementListContent(expected *model.AnnouncementStatus, actual *api.AnnouncementResponse, verifyUnread bool) error {
if !AssertEqual("announcement_list announcements id", expected.Announcement.ID, actual.ID) {
return errMismatch("お知らせの id が期待する値と一致しません", expected.Announcement.ID, actual.ID)
}
if !AssertEqual("announcement_list announcements course_id", expected.Announcement.CourseID, actual.CourseID) {
return errMismatch("お知らせの course_id が期待する値と一致しません", expected.Announcement.CourseID, actual.CourseID)
}
if !AssertEqual("announcement_list announcements course_name", expected.Announcement.CourseName, actual.CourseName) {
return errMismatch("お知らせの course_name が期待する値と一致しません", expected.Announcement.CourseName, actual.CourseName)
}
if !AssertEqual("announcement_list announcements title", expected.Announcement.Title, actual.Title) {
return errMismatch("お知らせの title が期待する値と一致しません", expected.Announcement.Title, actual.Title)
}
if verifyUnread && !AssertEqual("announcement_list announcements unread", expected.Unread, actual.Unread) {
return errMismatch("お知らせの unread が期待する値と一致しません", expected.Unread, actual.Unread)
}
return nil
}
func AssertEqualAnnouncementDetail(expected *model.AnnouncementStatus, actual *api.GetAnnouncementDetailResponse, verifyUnread bool) error {
if !AssertEqual("announcement_detail id", expected.Announcement.ID, actual.ID) {
return errMismatch("お知らせの id が期待する値と一致しません", expected.Announcement.ID, actual.ID)
}
if !AssertEqual("announcement_detail course_id", expected.Announcement.CourseID, actual.CourseID) {
return errMismatch("お知らせの course_id が期待する値と一致しません", expected.Announcement.CourseID, actual.CourseID)
}
if !AssertEqual("announcement_detail course_name", expected.Announcement.CourseName, actual.CourseName) {
return errMismatch("お知らせの course_name が期待する値と一致しません", expected.Announcement.CourseName, actual.CourseName)
}
if !AssertEqual("announcement_detail title", expected.Announcement.Title, actual.Title) {
return errMismatch("お知らせの title が期待する値と一致しません", expected.Announcement.Title, actual.Title)
}
if !AssertEqual("announcement_detail message", expected.Announcement.Message, actual.Message) {
return errMismatch("お知らせの message が期待する値と一致しません", expected.Announcement.Message, actual.Message)
}
if verifyUnread && !AssertEqual("announcement_detail unread", expected.Unread, actual.Unread) {
return errMismatch("お知らせの unread が期待する値と一致しません", expected.Unread, actual.Unread)
}
return nil
}
func scoreToString(score *int) string {
var str string
if score == nil {
str = "null"
} else {
str = strconv.Itoa(*score)
}
return str
} | benchmarker/scenario/assert.go | 0.60871 | 0.638356 | assert.go | starcoder |
package runtime
import (
"strings"
"sync/atomic"
"github.com/apmckinlay/gsuneido/util/pack"
)
/*
Record is an immutable record stored in a string
using the same format as cSuneido and jSuneido.
NOTE: This is the post 2019 format using a two byte header.
It is used for storing data records in the database
and for transferring data records across the client-server protocol.
An empty Record is a single zero byte.
First two bytes are the type and the count of values, high two bits are the type
followed by the total length (uint8, uint16, or uint32)
followed by the offsets of the fields (uint8, uint16, or uint32)
followed by the contents of the fields
integers are stored big endian (most significant first)
*/
type Record string
const (
type8 = iota + 1
type16
type32
)
const sizeMask = 0x3fff
const hdrlen = 2
// Count returns the number of values in the record
func (r Record) Count() int {
if r == "" || r[0] == 0 {
return 0
}
return (int(r[0])<<8 + int(r[1])) & sizeMask
}
func (r Record) Len() int {
if r[0] == 0 {
return 1
}
switch r.mode() {
case type8:
j := hdrlen
return int(r[j])
case type16:
j := hdrlen
return (int(r[j]) << 8) | int(r[j+1])
case type32:
j := hdrlen
return (int(r[j]) << 24) | (int(r[j+1]) << 16) |
(int(r[j+2]) << 8) | int(r[j+3])
default:
panic("invalid record type")
}
}
func RecLen(r []byte) int {
if r[0] == 0 {
return 1
}
switch r[0] >> 6 {
case type8:
j := hdrlen
return int(r[j])
case type16:
j := hdrlen
return (int(r[j]) << 8) | int(r[j+1])
case type32:
j := hdrlen
return (int(r[j]) << 24) | (int(r[j+1]) << 16) |
(int(r[j+2]) << 8) | int(r[j+3])
default:
panic("invalid record type")
}
}
// GetVal is a convenience method to get and unpack
func (r Record) GetVal(i int) Value {
return Unpack(r.GetRaw(i))
}
// GetStr is a more direct method to get a packed string
func (r Record) GetStr(i int) string {
s := r.GetRaw(i)
if s[0] != PackString {
panic("Record GetStr not string")
}
return s[1:]
}
// Get returns one of the (usually packed) values
func (r Record) GetRaw(i int) string {
if i < 0 || r.Count() <= i {
return ""
}
var pos, end int
switch r.mode() {
case type8:
j := hdrlen + i
end = int(r[j])
pos = int(r[j+1])
case type16:
j := hdrlen + 2*i
end = (int(r[j]) << 8) | int(r[j+1])
pos = (int(r[j+2]) << 8) | int(r[j+3])
case type32:
j := hdrlen + 4*i
end = (int(r[j]) << 24) | (int(r[j+1]) << 16) |
(int(r[j+2]) << 8) | int(r[j+3])
pos = (int(r[j+4]) << 24) | (int(r[j+5]) << 16) |
(int(r[j+6]) << 8) | int(r[j+7])
default:
panic("invalid record type")
}
return string(r)[pos:end]
}
func (r Record) mode() byte {
return r[0] >> 6
}
func (r Record) String() string {
if r == "" {
return "<nil>"
}
var sb strings.Builder
sep := "<"
for i := 0; i < r.Count(); i++ {
sb.WriteString(sep)
sep = ", "
sb.WriteString(r.GetVal(i).String())
}
sb.WriteString(">")
return sb.String()
}
// ------------------------------------------------------------------
// RecordBuilder is used to construct records. Zero value is ready to use.
type RecordBuilder struct {
vals []Packable
}
const MaxValues = 0x3fff
// Add appends a Packable
func (b *RecordBuilder) Add(p Packable) *RecordBuilder {
b.vals = append(b.vals, p)
return b
}
// AddRaw appends a string containing an already packed value
func (b *RecordBuilder) AddRaw(s string) *RecordBuilder {
if s == "" {
b.Add(SuStr(""))
} else {
b.Add(Packed(s))
}
return b
}
// Packed is a Packable wrapper for an already packed value
type Packed string
func (p Packed) Pack(_ int32, buf *pack.Encoder) {
buf.PutStr(string(p))
}
func (p Packed) PackSize(*int32) int {
return len(p)
}
func (p Packed) PackSize2(int32, packStack) int {
return len(p)
}
func (p Packed) PackSize3() int {
return len(p)
}
// Trim removes trailing empty fields
func (b *RecordBuilder) Trim() *RecordBuilder {
n := len(b.vals)
for n > 0 && b.vals[n-1] == SuStr("") {
n--
}
b.vals = b.vals[:n]
return b
}
// Build
func (b *RecordBuilder) Build() Record {
clock := atomic.AddInt32(&packClock, 1)
stack := newPackStack()
if len(b.vals) > MaxValues {
panic("too many values for record")
}
if len(b.vals) == 0 {
return Record("\x00")
}
sizes := make([]int, len(b.vals))
for i, v := range b.vals {
sizes[i] = v.PackSize2(clock, stack)
}
length := b.recSize(sizes)
buf := pack.NewEncoder(length)
b.build(clock, buf, length, sizes)
//assert.That(len(buf.String()) == length)
return Record(buf.String())
}
func (b *RecordBuilder) recSize(sizes []int) int {
nfields := len(b.vals)
datasize := 0
for _, size := range sizes {
datasize += size
}
return tblength(nfields, datasize)
}
func tblength(nfields, datasize int) int {
if nfields == 0 {
return 1
}
length := hdrlen + (1 + nfields) + datasize
if length < 0x100 {
return length
}
length = hdrlen + 2*(1+nfields) + datasize
if length < 0x10000 {
return length
}
return hdrlen + 4*(1+nfields) + datasize
}
func (b *RecordBuilder) build(clock int32, dst *pack.Encoder, length int, sizes []int) {
b.buildHeader(dst, length, sizes)
nfields := len(b.vals)
for i := nfields - 1; i >= 0; i-- {
b.vals[i].Pack(clock, dst)
}
}
func (b *RecordBuilder) buildHeader(dst *pack.Encoder, length int, sizes []int) {
mode := mode(length)
nfields := len(b.vals)
dst.Uint16(uint16(mode<<14 | nfields))
b.buildOffsets(dst, length, sizes)
}
func (b *RecordBuilder) buildOffsets(dst *pack.Encoder, length int, sizes []int) {
nfields := len(b.vals)
offset := length
switch mode(length) {
case type8:
dst.Put1(byte(offset))
for i := 0; i < nfields; i++ {
offset -= sizes[i]
dst.Put1(byte(offset))
}
case type16:
dst.Uint16(uint16(offset))
for i := 0; i < nfields; i++ {
offset -= sizes[i]
dst.Uint16(uint16(offset))
}
case type32:
dst.Uint32(uint32(offset))
for i := 0; i < nfields; i++ {
offset -= sizes[i]
dst.Uint32(uint32(offset))
}
}
}
func mode(length int) int { // length must include header
if length == 0 {
return 0
} else if length < 0x100 {
return type8
} else if length < 0x10000 {
return type16
} else {
return type32
}
} | runtime/record.go | 0.616012 | 0.522507 | record.go | starcoder |
package circuit
import (
"math"
"math/rand"
"time"
"github.com/heustis/tsp-solver-go/model"
)
// SimulatedAnnealing implements [simulated annealing](https://en.wikipedia.org/wiki/Simulated_annealing) to stochastically approximate the optimum circuit through a set of points.
// Unlike the convex-concave algorithms (both closest and disparity variants) this does not start from a convex hull and work towards a completed circuit.
// Rather this treats the supplied set of points as the initial circuit, or uses another algorithm to create an initial circuit, and mutates its to try to find a better sequencing of points for the circuit.
// During each iteration (up to "maxIterations" times) this:
// 1. Randomly selects 2 points.
// * If enabled, when selecting a second point it will prefer points that are close to the first selected point.
// 2. Determine how swapping the 2 points impacts the circuit length.
// * i.e. how much does swapping the points lengthen or shorten the circuit?
// 3. Scale this value based on the size of the coordinate space being used, so that it is meaningful regardless of if the coordinates are from -100 to +100 or -100000 to +100000
// 4. Use the configured temperature function to determine the acceptance value (based on the number of iterations, max iterations, and impact of the swap)
// 5. Generate a random number in [0.0, 1.0)
// 6. Swap the points the random number it is less than the acceptance value, or if the swap would shorten the circuit.
type SimulatedAnnealing struct {
circuit []model.CircuitVertex
farthestDistance float64
maxIterations float64
numIterations float64
preferCloseNeighbors bool
random *rand.Rand
temperatureFunction func(currentIteration float64, maxIterations float64) float64
}
func NewSimulatedAnnealing(circuit []model.CircuitVertex, maxIterations int, preferCloseNeighbors bool) *SimulatedAnnealing {
return &SimulatedAnnealing{
circuit: circuit,
farthestDistance: computeFarthestDistance(circuit),
maxIterations: float64(maxIterations),
numIterations: 0.0,
preferCloseNeighbors: preferCloseNeighbors,
random: rand.New(rand.NewSource(time.Now().UnixNano())),
temperatureFunction: CalculateTemperatureLinear,
}
}
func NewSimulatedAnnealingFromCircuit(circuit model.Circuit, maxIterations int, preferCloseNeighbors bool) *SimulatedAnnealing {
for nextVertex, nextEdge := circuit.FindNextVertexAndEdge(); nextVertex != nil; nextVertex, nextEdge = circuit.FindNextVertexAndEdge() {
circuit.Update(nextVertex, nextEdge)
}
initCircuit := circuit.GetAttachedVertices()
return &SimulatedAnnealing{
circuit: initCircuit,
farthestDistance: computeFarthestDistance(initCircuit),
maxIterations: float64(maxIterations),
numIterations: 0.0,
preferCloseNeighbors: preferCloseNeighbors,
random: rand.New(rand.NewSource(time.Now().UnixNano())),
temperatureFunction: CalculateTemperatureLinear,
}
}
func (s *SimulatedAnnealing) FindNextVertexAndEdge() (model.CircuitVertex, model.CircuitEdge) {
// If we have reached the number of iterations we are done, so return (nil,nil)
if s.numIterations >= s.maxIterations {
return nil, nil
}
// We will determine the next vertex in Update(), so just return the first vertex in the circuit since it will be ignored by Update().
return s.circuit[0], nil
}
func (s *SimulatedAnnealing) GetAttachedVertices() []model.CircuitVertex {
return s.circuit
}
func (s *SimulatedAnnealing) GetLength() float64 {
return model.Length(s.circuit)
}
func (s *SimulatedAnnealing) GetUnattachedVertices() map[model.CircuitVertex]bool {
return make(map[model.CircuitVertex]bool)
}
// SetSeed sets the seed used by the SimulatedAnnealing for random number generation.
// This is to facilitate consistent unit tests.
func (s *SimulatedAnnealing) SetSeed(seed int64) {
s.random = rand.New(rand.NewSource(seed))
}
// SetTemperatureFunction updates the function used in each iteration of Update() to calculate the temperature.
// By default SimulatedAnnealing uses a linear temperature function, but this package also provides a geometric temperature function, and enables custom temperature functions.
func (s *SimulatedAnnealing) SetTemperatureFunction(temperatureFunction func(currentIteration float64, maxIterations float64) float64) {
s.temperatureFunction = temperatureFunction
}
func (s *SimulatedAnnealing) Update(vertexToAdd model.CircuitVertex, edgeToSplit model.CircuitEdge) {
if s.numIterations >= s.maxIterations {
return
}
s.numIterations++
// This section could be included in FindNextVertexAndEdge, but it is more performant to have the indices here.
// Select two random vertices to check if they should be swapped.
numVertices := len(s.circuit)
indexA := s.random.Intn(numVertices)
indexAPrev := (indexA + numVertices - 1) % numVertices
indexANext := (indexA + 1) % numVertices
var indexB int
if s.preferCloseNeighbors {
indexB = s.getRandomNeighbor(indexA)
} else {
// Select a random vertex for B, but don't allow it to be the same as A.
indexB = s.random.Intn(numVertices)
for indexA == indexB {
indexB = s.random.Intn(numVertices)
}
}
indexBPrev := (indexB + numVertices - 1) % numVertices
indexBNext := (indexB + 1) % numVertices
// Calculate the effect swapping the two vertices will have on the length of the circuit.
lengthACurrent := s.circuit[indexAPrev].DistanceTo(s.circuit[indexA]) + s.circuit[indexA].DistanceTo(s.circuit[indexANext])
lengthANew := s.circuit[indexAPrev].DistanceTo(s.circuit[indexB]) + s.circuit[indexB].DistanceTo(s.circuit[indexANext])
lengthBCurrent := s.circuit[indexBPrev].DistanceTo(s.circuit[indexB]) + s.circuit[indexB].DistanceTo(s.circuit[indexBNext])
lengthBNew := s.circuit[indexBPrev].DistanceTo(s.circuit[indexA]) + s.circuit[indexA].DistanceTo(s.circuit[indexBNext])
// If the two vertices are adjacent, need to add the length of the edge A->B to each new length.
if indexA == indexBPrev || indexA == indexBNext {
distAToB := s.circuit[indexA].DistanceTo(s.circuit[indexB])
lengthANew += distAToB
lengthBNew += distAToB
}
edgeADelta := lengthANew - lengthACurrent
edgeBDelta := lengthBNew - lengthBCurrent
// Scale delta so that it has a meaningful value in the acceptance function, since cooridinates from -100 to +100 will produce different deltas than coordinates from -10000 to +10000.
// The temperature is always between 0 and 1, decreasing from near 1 to near 0 as annealing progresses.
// The delta could be limited between 0 and 1 as well, so that all posibilities are feasible at a temperature of 1.
// However, we know that any intersecting edges are not optimal, so we can optimize this by allowing the delta to exceed 1 in bad use cases.
// The worst case delta is is if B and A are the farthest vertices from each other and both go from their closest vertices to their farthest vertices, and the best case is the reverse.
// This worst case is guaranteed to be less than 4*|B-A|, but we will use |B-A| since it is okay if we ignore the possibilities that are close to the worst case.
deltaIncrease := (edgeADelta + edgeBDelta) / s.farthestDistance
temperature := s.temperatureFunction(s.numIterations, s.maxIterations)
// Swap the two vertices if it would decrease the size of the circuit, or if the increase is within the acceptable bounds defined by the acceptance function.
if testValue, acceptanceThreshold := s.random.Float64(), math.Exp(-deltaIncrease/temperature); deltaIncrease <= 0.0 || testValue < acceptanceThreshold {
s.circuit[indexA], s.circuit[indexB] = s.circuit[indexB], s.circuit[indexA]
}
}
// getRandomNeighbor weighs vertices based on their distance from the vertex at the supplied index, then randomly selects a vertex based on the weights.
func (s *SimulatedAnnealing) getRandomNeighbor(index int) (neighborIndex int) {
// len-1 to ignore the vertex at the supplied index.
weights := make([]*weightedVertex, len(s.circuit)-1)
totalWeight := 0.0
for i, weightIndex := 0, 0; i < len(s.circuit); i++ {
if i != index {
// Invert the distance between the points, so that closer points have larger weights than farther points (e.g. 1/5 > 1/500).
weight := 1.0 / s.circuit[index].DistanceTo(s.circuit[i])
weights[weightIndex] = &weightedVertex{
weight: weight,
vertexIndex: i,
}
totalWeight += weight
weightIndex++
}
}
// Select a random index by weight:
// 1) Select a random value between [0,totalWeight)
// 2) Iterate through the weighted values, subtracting their weight from the random weight
// 3) Once the random weight has a value of 0 or less, select the vertex with the weight that caused it to transition to 0 or negative.
randomWeight := s.random.Float64() * totalWeight
for _, w := range weights {
randomWeight -= w.weight
if randomWeight <= 0 {
return w.vertexIndex
}
}
// This should never be reached, since the random weight should never be greater than the total weight.
return weights[len(weights)-1].vertexIndex
}
type weightedVertex struct {
vertexIndex int
weight float64
}
// CalculateTemperatureGeometric calculates temperature according to the equation t'=t*X, so that it decreases geometricaly as the model iterates.
// For this implementation we are using 5.0 since it produces selective acceptance without making the later iterations useless:
// * 1.0 -> 0.99^99=0.3697, 0.999^999=0.3681,
// * 5.0 -> 0.95^99=0.0062, 0.995^999=0.0066,
// * 10.0 -> 0.90^99=0.000059, 0.990^999=0.000044,
func CalculateTemperatureGeometric(currentIteration float64, maxIterations float64) float64 {
return math.Pow(1.0-5.0/maxIterations, currentIteration)
}
// CalculateTemperatureLinear calculates temperature according to the function t'=t-X, so that it decreases linearly as the model iterates.
func CalculateTemperatureLinear(currentIteration float64, maxIterations float64) float64 {
return 1.0 - currentIteration/maxIterations
}
func computeFarthestDistance(circuit []model.CircuitVertex) float64 {
farthestDistance := 0.0
// Find the distance between the two farthest vertices, for scaling the delta
for _, v := range circuit {
farthestFromV := model.FindFarthestPoint(v, circuit)
if testDistance := farthestFromV.DistanceTo(v); testDistance > farthestDistance {
farthestDistance = testDistance
}
}
return farthestDistance
}
var _ model.Circuit = (*SimulatedAnnealing)(nil) | circuit/simulatedannealing.go | 0.864968 | 0.7586 | simulatedannealing.go | starcoder |
package mlpack
/*
#cgo CFLAGS: -I./capi -Wall
#cgo LDFLAGS: -L. -lmlpack_go_lmnn
#include <capi/lmnn.h>
#include <stdlib.h>
*/
import "C"
import "gonum.org/v1/gonum/mat"
type LmnnOptionalParam struct {
BatchSize int
Center bool
Distance *mat.Dense
K int
Labels *mat.Dense
LinearScan bool
MaxIterations int
Normalize bool
Optimizer string
Passes int
PrintAccuracy bool
Range int
Rank int
Regularization float64
Seed int
StepSize float64
Tolerance float64
Verbose bool
}
func LmnnOptions() *LmnnOptionalParam {
return &LmnnOptionalParam{
BatchSize: 50,
Center: false,
Distance: nil,
K: 1,
Labels: nil,
LinearScan: false,
MaxIterations: 100000,
Normalize: false,
Optimizer: "amsgrad",
Passes: 50,
PrintAccuracy: false,
Range: 1,
Rank: 0,
Regularization: 0.5,
Seed: 0,
StepSize: 0.01,
Tolerance: 1e-07,
Verbose: false,
}
}
/*
This program implements Large Margin Nearest Neighbors, a distance learning
technique. The method seeks to improve k-nearest-neighbor classification on a
dataset. The method employes the strategy of reducing distance between
similar labeled data points (a.k.a target neighbors) and increasing distance
between differently labeled points (a.k.a impostors) using standard
optimization techniques over the gradient of the distance between data points.
To work, this algorithm needs labeled data. It can be given as the last row
of the input dataset (specified with "Input"), or alternatively as a separate
matrix (specified with "Labels"). Additionally, a starting point for
optimization (specified with "Distance"can be given, having (r x d)
dimensionality. Here r should satisfy 1 <= r <= d, Consequently a Low-Rank
matrix will be optimized. Alternatively, Low-Rank distance can be learned by
specifying the "Rank"parameter (A Low-Rank matrix with uniformly distributed
values will be used as initial learning point).
The program also requires number of targets neighbors to work with ( specified
with "K"), A regularization parameter can also be passed, It acts as a trade
of between the pulling and pushing terms (specified with "Regularization"), In
addition, this implementation of LMNN includes a parameter to decide the
interval after which impostors must be re-calculated (specified with "Range").
Output can either be the learned distance matrix (specified with "Output"), or
the transformed dataset (specified with "TransformedData"), or both.
Additionally mean-centered dataset (specified with "CenteredData") can be
accessed given mean-centering (specified with "Center") is performed on the
dataset. Accuracy on initial dataset and final transformed dataset can be
printed by specifying the "PrintAccuracy"parameter.
This implementation of LMNN uses AdaGrad, BigBatch_SGD, stochastic gradient
descent, mini-batch stochastic gradient descent, or the L_BFGS optimizer.
AdaGrad, specified by the value 'adagrad' for the parameter "Optimizer", uses
maximum of past squared gradients. It primarily on six parameters: the step
size (specified with "StepSize"), the batch size (specified with "BatchSize"),
the maximum number of passes (specified with "Passes"). Inaddition, a
normalized starting point can be used by specifying the "Normalize" parameter.
BigBatch_SGD, specified by the value 'bbsgd' for the parameter "Optimizer",
depends primarily on four parameters: the step size (specified with
"StepSize"), the batch size (specified with "BatchSize"), the maximum number
of passes (specified with "Passes"). In addition, a normalized starting point
can be used by specifying the "Normalize" parameter.
Stochastic gradient descent, specified by the value 'sgd' for the parameter
"Optimizer", depends primarily on three parameters: the step size (specified
with "StepSize"), the batch size (specified with "BatchSize"), and the maximum
number of passes (specified with "Passes"). In addition, a normalized
starting point can be used by specifying the "Normalize" parameter.
Furthermore, mean-centering can be performed on the dataset by specifying the
"Center"parameter.
The L-BFGS optimizer, specified by the value 'lbfgs' for the parameter
"Optimizer", uses a back-tracking line search algorithm to minimize a
function. The following parameters are used by L-BFGS: "MaxIterations",
"Tolerance"(the optimization is terminated when the gradient norm is below
this value). For more details on the L-BFGS optimizer, consult either the
mlpack L-BFGS documentation (in lbfgs.hpp) or the vast set of published
literature on L-BFGS. In addition, a normalized starting point can be used by
specifying the "Normalize" parameter.
By default, the AMSGrad optimizer is used.
Example - Let's say we want to learn distance on iris dataset with number of
targets as 3 using BigBatch_SGD optimizer. A simple call for the same will
look like:
// Initialize optional parameters for MlpackLmnn().
param := mlpack.MlpackLmnnOptions()
param.Labels = iris_labels
param.K = 3
param.Optimizer = "bbsgd"
_, output, _ := mlpack.MlpackLmnn(iris, param)
An another program call making use of range & regularization parameter with
dataset having labels as last column can be made as:
// Initialize optional parameters for MlpackLmnn().
param := mlpack.MlpackLmnnOptions()
param.K = 5
param.Range = 10
param.Regularization = 0.4
_, output, _ := mlpack.MlpackLmnn(letter_recognition, param)
Input parameters:
- input (mat.Dense): Input dataset to run LMNN on.
- BatchSize (int): Batch size for mini-batch SGD. Default value 50.
- Center (bool): Perform mean-centering on the dataset. It is useful
when the centroid of the data is far from the origin.
- Distance (mat.Dense): Initial distance matrix to be used as starting
point
- K (int): Number of target neighbors to use for each datapoint.
Default value 1.
- Labels (mat.Dense): Labels for input dataset.
- LinearScan (bool): Don't shuffle the order in which data points are
visited for SGD or mini-batch SGD.
- MaxIterations (int): Maximum number of iterations for L-BFGS (0
indicates no limit). Default value 100000.
- Normalize (bool): Use a normalized starting point for optimization.
Itis useful for when points are far apart, or when SGD is returning
NaN.
- Optimizer (string): Optimizer to use; 'amsgrad', 'bbsgd', 'sgd', or
'lbfgs'. Default value 'amsgrad'.
- Passes (int): Maximum number of full passes over dataset for AMSGrad,
BB_SGD and SGD. Default value 50.
- PrintAccuracy (bool): Print accuracies on initial and transformed
dataset
- Range (int): Number of iterations after which impostors needs to be
recalculated Default value 1.
- Rank (int): Rank of distance matrix to be optimized. Default value
0.
- Regularization (float64): Regularization for LMNN objective function
Default value 0.5.
- Seed (int): Random seed. If 0, 'std::time(NULL)' is used. Default
value 0.
- StepSize (float64): Step size for AMSGrad, BB_SGD and SGD (alpha).
Default value 0.01.
- Tolerance (float64): Maximum tolerance for termination of AMSGrad,
BB_SGD, SGD or L-BFGS. Default value 1e-07.
- Verbose (bool): Display informational messages and the full list of
parameters and timers at the end of execution.
Output parameters:
- centeredData (mat.Dense): Output matrix for mean-centered dataset.
- output (mat.Dense): Output matrix for learned distance matrix.
- transformedData (mat.Dense): Output matrix for transformed dataset.
*/
func Lmnn(input *mat.Dense, param *LmnnOptionalParam) (*mat.Dense, *mat.Dense, *mat.Dense) {
resetTimers()
enableTimers()
disableBacktrace()
disableVerbose()
restoreSettings("Large Margin Nearest Neighbors (LMNN)")
// Detect if the parameter was passed; set if so.
gonumToArmaMat("input", input)
setPassed("input")
// Detect if the parameter was passed; set if so.
if param.BatchSize != 50 {
setParamInt("batch_size", param.BatchSize)
setPassed("batch_size")
}
// Detect if the parameter was passed; set if so.
if param.Center != false {
setParamBool("center", param.Center)
setPassed("center")
}
// Detect if the parameter was passed; set if so.
if param.Distance != nil {
gonumToArmaMat("distance", param.Distance)
setPassed("distance")
}
// Detect if the parameter was passed; set if so.
if param.K != 1 {
setParamInt("k", param.K)
setPassed("k")
}
// Detect if the parameter was passed; set if so.
if param.Labels != nil {
gonumToArmaUrow("labels", param.Labels)
setPassed("labels")
}
// Detect if the parameter was passed; set if so.
if param.LinearScan != false {
setParamBool("linear_scan", param.LinearScan)
setPassed("linear_scan")
}
// Detect if the parameter was passed; set if so.
if param.MaxIterations != 100000 {
setParamInt("max_iterations", param.MaxIterations)
setPassed("max_iterations")
}
// Detect if the parameter was passed; set if so.
if param.Normalize != false {
setParamBool("normalize", param.Normalize)
setPassed("normalize")
}
// Detect if the parameter was passed; set if so.
if param.Optimizer != "amsgrad" {
setParamString("optimizer", param.Optimizer)
setPassed("optimizer")
}
// Detect if the parameter was passed; set if so.
if param.Passes != 50 {
setParamInt("passes", param.Passes)
setPassed("passes")
}
// Detect if the parameter was passed; set if so.
if param.PrintAccuracy != false {
setParamBool("print_accuracy", param.PrintAccuracy)
setPassed("print_accuracy")
}
// Detect if the parameter was passed; set if so.
if param.Range != 1 {
setParamInt("range", param.Range)
setPassed("range")
}
// Detect if the parameter was passed; set if so.
if param.Rank != 0 {
setParamInt("rank", param.Rank)
setPassed("rank")
}
// Detect if the parameter was passed; set if so.
if param.Regularization != 0.5 {
setParamDouble("regularization", param.Regularization)
setPassed("regularization")
}
// Detect if the parameter was passed; set if so.
if param.Seed != 0 {
setParamInt("seed", param.Seed)
setPassed("seed")
}
// Detect if the parameter was passed; set if so.
if param.StepSize != 0.01 {
setParamDouble("step_size", param.StepSize)
setPassed("step_size")
}
// Detect if the parameter was passed; set if so.
if param.Tolerance != 1e-07 {
setParamDouble("tolerance", param.Tolerance)
setPassed("tolerance")
}
// Detect if the parameter was passed; set if so.
if param.Verbose != false {
setParamBool("verbose", param.Verbose)
setPassed("verbose")
enableVerbose()
}
// Mark all output options as passed.
setPassed("centered_data")
setPassed("output")
setPassed("transformed_data")
// Call the mlpack program.
C.mlpackLmnn()
// Initialize result variable and get output.
var centeredDataPtr mlpackArma
centeredData := centeredDataPtr.armaToGonumMat("centered_data")
var outputPtr mlpackArma
output := outputPtr.armaToGonumMat("output")
var transformedDataPtr mlpackArma
transformedData := transformedDataPtr.armaToGonumMat("transformed_data")
// Clear settings.
clearSettings()
// Return output(s).
return centeredData, output, transformedData
} | lmnn.go | 0.704668 | 0.656135 | lmnn.go | starcoder |
package hyperscan
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
"github.com/flier/gohs/internal/hs"
)
// ExprInfo containing information related to an expression.
type ExprInfo = hs.ExprInfo
// ExtFlag are used in ExprExt.Flags to indicate which fields are used.
type ExtFlag = hs.ExtFlag
const (
// ExtMinOffset is a flag indicating that the ExprExt.MinOffset field is used.
ExtMinOffset ExtFlag = hs.ExtMinOffset
// ExtMaxOffset is a flag indicating that the ExprExt.MaxOffset field is used.
ExtMaxOffset ExtFlag = hs.ExtMaxOffset
// ExtMinLength is a flag indicating that the ExprExt.MinLength field is used.
ExtMinLength ExtFlag = hs.ExtMinLength
// ExtEditDistance is a flag indicating that the ExprExt.EditDistance field is used.
ExtEditDistance ExtFlag = hs.ExtEditDistance
// ExtHammingDistance is a flag indicating that the ExprExt.HammingDistance field is used.
ExtHammingDistance ExtFlag = hs.ExtHammingDistance
)
// Ext is a option containing additional parameters related to an expression.
type Ext func(ext *ExprExt)
// MinOffset given the minimum end offset in the data stream at which this expression should match successfully.
func MinOffset(n uint64) Ext {
return func(ext *ExprExt) {
ext.Flags |= ExtMinOffset
ext.MinOffset = n
}
}
// MaxOffset given the maximum end offset in the data stream at which this expression should match successfully.
func MaxOffset(n uint64) Ext {
return func(ext *ExprExt) {
ext.Flags |= ExtMaxOffset
ext.MaxOffset = n
}
}
// MinLength given the minimum match length (from start to end) required to successfully match this expression.
func MinLength(n uint64) Ext {
return func(ext *ExprExt) {
ext.Flags |= ExtMinLength
ext.MinLength = n
}
}
// EditDistance allow patterns to approximately match within this edit distance.
func EditDistance(n uint32) Ext {
return func(ext *ExprExt) {
ext.Flags |= ExtEditDistance
ext.EditDistance = n
}
}
// HammingDistance allow patterns to approximately match within this Hamming distance.
func HammingDistance(n uint32) Ext {
return func(ext *ExprExt) {
ext.Flags |= ExtHammingDistance
ext.HammingDistance = n
}
}
// ExprExt is a structure containing additional parameters related to an expression.
type ExprExt hs.ExprExt
func NewExprExt(exts ...Ext) (ext *ExprExt) {
if len(exts) == 0 {
return
}
ext = new(ExprExt)
for _, f := range exts {
f(ext)
}
return ext
}
// With specifies the additional parameters related to an expression.
func (ext *ExprExt) With(exts ...Ext) *ExprExt {
for _, f := range exts {
f(ext)
}
return ext
}
func (ext *ExprExt) String() string {
var values []string
if (ext.Flags & ExtMinOffset) == ExtMinOffset {
values = append(values, fmt.Sprintf("min_offset=%d", ext.MinOffset))
}
if (ext.Flags & ExtMaxOffset) == ExtMaxOffset {
values = append(values, fmt.Sprintf("max_offset=%d", ext.MaxOffset))
}
if (ext.Flags & ExtMinLength) == ExtMinLength {
values = append(values, fmt.Sprintf("min_length=%d", ext.MinLength))
}
if (ext.Flags & ExtEditDistance) == ExtEditDistance {
values = append(values, fmt.Sprintf("edit_distance=%d", ext.EditDistance))
}
if (ext.Flags & ExtHammingDistance) == ExtHammingDistance {
values = append(values, fmt.Sprintf("hamming_distance=%d", ext.HammingDistance))
}
return "{" + strings.Join(values, ",") + "}"
}
const keyValuePair = 2
// ParseExprExt parse containing additional parameters from string.
func ParseExprExt(s string) (ext *ExprExt, err error) {
ext = new(ExprExt)
if strings.HasPrefix(s, "{") && strings.HasSuffix(s, "}") {
s = strings.TrimSuffix(strings.TrimPrefix(s, "{"), "}")
}
for _, s := range strings.Split(s, ",") {
parts := strings.SplitN(s, "=", keyValuePair)
if len(parts) != keyValuePair {
continue
}
key := strings.ToLower(parts[0])
value := parts[1]
var n int
if n, err = strconv.Atoi(value); err != nil {
return
}
switch key {
case "min_offset":
ext.Flags |= ExtMinOffset
ext.MinOffset = uint64(n)
case "max_offset":
ext.Flags |= ExtMaxOffset
ext.MaxOffset = uint64(n)
case "min_length":
ext.Flags |= ExtMinLength
ext.MinLength = uint64(n)
case "edit_distance":
ext.Flags |= ExtEditDistance
ext.EditDistance = uint32(n)
case "hamming_distance":
ext.Flags |= ExtHammingDistance
ext.HammingDistance = uint32(n)
}
}
return // nolint: nakedret
}
// Pattern is a matching pattern.
// nolint: golint,revive,stylecheck
type Pattern struct {
Expression string // The expression to parse.
Flags CompileFlag // Flags which modify the behaviour of the expression.
Id int // The ID number to be associated with the corresponding pattern
info *ExprInfo
ext *ExprExt
}
// NewPattern returns a new pattern base on expression and compile flags.
func NewPattern(expr string, flags CompileFlag, exts ...Ext) *Pattern {
return &Pattern{
Expression: expr,
Flags: flags,
ext: NewExprExt(exts...),
}
}
func (p *Pattern) Pattern() *hs.Pattern {
return &hs.Pattern{
Expr: p.Expression,
Flags: p.Flags,
ID: p.Id,
Ext: (*hs.ExprExt)(p.ext),
}
}
func (p *Pattern) Patterns() []*hs.Pattern {
return []*hs.Pattern{p.Pattern()}
}
// IsValid validate the pattern contains a regular expression.
func (p *Pattern) IsValid() bool {
_, err := p.Info()
return err == nil
}
// Info provides information about a regular expression.
func (p *Pattern) Info() (*ExprInfo, error) {
if p.info == nil {
info, err := hs.ExpressionInfo(p.Expression, p.Flags)
if err != nil {
return nil, err // nolint: wrapcheck
}
p.info = info
}
return p.info, nil
}
// WithExt is used to set the additional parameters related to an expression.
func (p *Pattern) WithExt(exts ...Ext) *Pattern {
if p.ext == nil {
p.ext = new(ExprExt)
}
p.ext.With(exts...)
return p
}
// Ext provides additional parameters related to an expression.
func (p *Pattern) Ext() (*ExprExt, error) {
if p.ext == nil {
ext, info, err := hs.ExpressionExt(p.Expression, p.Flags)
if err != nil {
return nil, err // nolint: wrapcheck
}
p.ext = (*ExprExt)(ext)
p.info = info
}
return p.ext, nil
}
func (p *Pattern) String() string {
var b strings.Builder
if p.Id > 0 {
fmt.Fprintf(&b, "%d:", p.Id)
}
fmt.Fprintf(&b, "/%s/%s", p.Expression, p.Flags)
if p.ext != nil {
b.WriteString(p.ext.String())
}
return b.String()
}
/*
ParsePattern parse pattern from a formated string.
<integer id>:/<expression>/<flags>
For example, the following pattern will match `test` in the caseless and multi-lines mode
/test/im
*/
func ParsePattern(s string) (*Pattern, error) {
var p Pattern
i := strings.Index(s, ":/")
j := strings.LastIndex(s, "/")
if i > 0 && j > i+1 {
id, err := strconv.Atoi(s[:i])
if err != nil {
return nil, fmt.Errorf("pattern id `%s`, %w", s[:i], ErrInvalid)
}
p.Id = id
s = s[i+1:]
}
if n := strings.LastIndex(s, "/"); n > 1 && strings.HasPrefix(s, "/") {
p.Expression = s[1:n]
s = s[n+1:]
if n = strings.Index(s, "{"); n > 0 && strings.HasSuffix(s, "}") {
ext, err := ParseExprExt(s[n:])
if err != nil {
return nil, fmt.Errorf("expression extensions `%s`, %w", s[n:], err)
}
p.ext = ext
s = s[:n]
}
flags, err := ParseCompileFlag(s)
if err != nil {
return nil, fmt.Errorf("pattern flags `%s`, %w", s, err)
}
p.Flags = flags
} else {
p.Expression = s
}
info, err := hs.ExpressionInfo(p.Expression, p.Flags)
if err != nil {
return nil, fmt.Errorf("pattern `%s`, %w", p.Expression, err)
}
p.info = info
return &p, nil
}
// Patterns is a set of matching patterns.
type Patterns []*Pattern
// ParsePatterns parse lines as `Patterns`.
func ParsePatterns(r io.Reader) (patterns Patterns, err error) {
s := bufio.NewScanner(r)
for s.Scan() {
line := strings.TrimSpace(s.Text())
if line == "" {
// skip empty line
continue
}
if strings.HasPrefix(line, "#") {
// skip comment
continue
}
p, err := ParsePattern(line)
if err != nil {
return nil, err
}
patterns = append(patterns, p)
}
return
}
func (p Patterns) Patterns() (r []*hs.Pattern) {
r = make([]*hs.Pattern, len(p))
for i, pat := range p {
r[i] = pat.Pattern()
}
return
} | hyperscan/pattern.go | 0.753693 | 0.543227 | pattern.go | starcoder |
package storage
import (
"math"
"github.com/flowmatters/openwater-core/data"
"github.com/flowmatters/openwater-core/models/routing"
)
/* OW-SPEC
StorageDissolvedDecay:
inputs:
inflowMass: kg.s^-1
inflow: m^3.s^-1
outflow: m^3.s^-1
storageVolume: m^3
states:
storedMass: kg
parameters:
DeltaT: '[1,86400] Timestep, default=86400'
doStorageDecay:
annualReturnInterval:
bankFullFlow:
medianFloodResidenceTime:
outputs:
decayedMass: kg
outflowMass: kg.s^-1
implementation:
function: storageDissolvedDecay
type: scalar
lang: go
outputs: params
init:
zero: true
lang: go
tags:
storage, sediment
*/
func storageDissolvedDecay(inflowMass, storageInflow, storageOutflow, storageVolume data.ND1Float64, // inputs
initialStoredMass float64,
deltaT, doStorageDecay, annualReturnInterval, bankFullFlow, medianFloodResidenceTime float64,
decayedMass, outflowMass data.ND1Float64) (storedMass float64) {
if doStorageDecay < 0.5 {
storedMass = routing.LumpedConstituentTransport(
inflowMass, nil, storageOutflow, storageVolume,
initialStoredMass,
0.0, 0.0, deltaT,
outflowMass)
return
}
storedMass = initialStoredMass
nDays := inflowMass.Len1()
idx := []int{0}
for i := 0; i < nDays; i++ {
idx[0] = i
upstreamFlowMass := inflowMass.Get(idx) * deltaT
storageVol := storageVolume.Get(idx)
outflowRate := storageOutflow.Get(idx)
availLoadForOutflow := 0.0
dailyDecayedConstituentLoad := 0.0
if outflowRate < bankFullFlow {
dailyDecayedConstituentLoad = storedMass
availLoadForOutflow = upstreamFlowMass
} else {
totalConstsituentLoad := upstreamFlowMass + storedMass
if medianFloodResidenceTime <= 0 {
dailyDecayedConstituentLoad = 0
availLoadForOutflow = upstreamFlowMass + storedMass
} else {
propLost := math.Min(1, medianFloodResidenceTime/5)
dailyDecayedConstituentLoad = propLost * totalConstsituentLoad
availLoadForOutflow = totalConstsituentLoad - dailyDecayedConstituentLoad
}
}
concentration := availLoadForOutflow / storageVol
constituentRateInOutflow := concentration * outflowRate
outflowMass.Set(idx, constituentRateInOutflow)
decayedMass.Set(idx, dailyDecayedConstituentLoad)
storedMass -= dailyDecayedConstituentLoad
storedMass -= deltaT * constituentRateInOutflow
}
return
} | models/storage/dissolved_decay.go | 0.656218 | 0.443962 | dissolved_decay.go | starcoder |
package msgraph
// RatingGermanyTelevisionType undocumented
type RatingGermanyTelevisionType string
const (
// RatingGermanyTelevisionTypeVAllAllowed undocumented
RatingGermanyTelevisionTypeVAllAllowed RatingGermanyTelevisionType = "AllAllowed"
// RatingGermanyTelevisionTypeVAllBlocked undocumented
RatingGermanyTelevisionTypeVAllBlocked RatingGermanyTelevisionType = "AllBlocked"
// RatingGermanyTelevisionTypeVGeneral undocumented
RatingGermanyTelevisionTypeVGeneral RatingGermanyTelevisionType = "General"
// RatingGermanyTelevisionTypeVAgesAbove6 undocumented
RatingGermanyTelevisionTypeVAgesAbove6 RatingGermanyTelevisionType = "AgesAbove6"
// RatingGermanyTelevisionTypeVAgesAbove12 undocumented
RatingGermanyTelevisionTypeVAgesAbove12 RatingGermanyTelevisionType = "AgesAbove12"
// RatingGermanyTelevisionTypeVAgesAbove16 undocumented
RatingGermanyTelevisionTypeVAgesAbove16 RatingGermanyTelevisionType = "AgesAbove16"
// RatingGermanyTelevisionTypeVAdults undocumented
RatingGermanyTelevisionTypeVAdults RatingGermanyTelevisionType = "Adults"
)
// RatingGermanyTelevisionTypePAllAllowed returns a pointer to RatingGermanyTelevisionTypeVAllAllowed
func RatingGermanyTelevisionTypePAllAllowed() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAllAllowed
return &v
}
// RatingGermanyTelevisionTypePAllBlocked returns a pointer to RatingGermanyTelevisionTypeVAllBlocked
func RatingGermanyTelevisionTypePAllBlocked() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAllBlocked
return &v
}
// RatingGermanyTelevisionTypePGeneral returns a pointer to RatingGermanyTelevisionTypeVGeneral
func RatingGermanyTelevisionTypePGeneral() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVGeneral
return &v
}
// RatingGermanyTelevisionTypePAgesAbove6 returns a pointer to RatingGermanyTelevisionTypeVAgesAbove6
func RatingGermanyTelevisionTypePAgesAbove6() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAgesAbove6
return &v
}
// RatingGermanyTelevisionTypePAgesAbove12 returns a pointer to RatingGermanyTelevisionTypeVAgesAbove12
func RatingGermanyTelevisionTypePAgesAbove12() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAgesAbove12
return &v
}
// RatingGermanyTelevisionTypePAgesAbove16 returns a pointer to RatingGermanyTelevisionTypeVAgesAbove16
func RatingGermanyTelevisionTypePAgesAbove16() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAgesAbove16
return &v
}
// RatingGermanyTelevisionTypePAdults returns a pointer to RatingGermanyTelevisionTypeVAdults
func RatingGermanyTelevisionTypePAdults() *RatingGermanyTelevisionType {
v := RatingGermanyTelevisionTypeVAdults
return &v
} | v1.0/RatingGermanyTelevisionTypeEnum.go | 0.582254 | 0.477859 | RatingGermanyTelevisionTypeEnum.go | starcoder |
package function
import (
"fmt"
"regexp"
"strconv"
"time"
"github.com/square/metrics/api"
)
// Value is the result of evaluating an expression.
// They can be floating point values, strings, or series lists.
type Value interface {
ToSeriesList(api.Timerange) (api.SeriesList, *ConversionFailure)
ToString() (string, *ConversionFailure)
ToScalar() (float64, *ConversionFailure)
ToScalarSet() (ScalarSet, *ConversionFailure)
ToDuration() (time.Duration, *ConversionFailure)
}
type ConversionFailure struct {
From string // the original data type
To string // the type that it attempted to convert to
}
// WithContext adds enough context to make the ConversionFailure into an error.
func (c *ConversionFailure) WithContext(context string) ConversionError {
return ConversionError{
From: c.From,
To: c.To,
Context: context,
}
}
// ConversionError represents an error converting between two items of different types.
type ConversionError struct {
From string // the original data type
To string // the type that attempted to convert to
Context string // a short string representation of the value
}
// Error gives a readable description of the error.
func (e ConversionError) Error() string {
return fmt.Sprintf("cannot convert %s (type %s) to type %s", e.Context, e.From, e.To)
}
// A SeriesListValue holds a SeriesList.
type SeriesListValue api.SeriesList
// ToSeriesList is an identity function that allows SeriesList to implement the Value interface.
func (list SeriesListValue) ToSeriesList(time api.Timerange) (api.SeriesList, *ConversionFailure) {
return api.SeriesList(list), nil
}
// ToString is a conversion function to implement the Value interface.
func (list SeriesListValue) ToString() (string, *ConversionFailure) {
return "", &ConversionFailure{"series list", "string"}
}
// ToScalar is a conversion function to implement the Value interface.
func (list SeriesListValue) ToScalar() (float64, *ConversionFailure) {
return 0, &ConversionFailure{"series list", "scalar"}
}
// ToScalarSet is a conversion function to implement the Value interface.
func (list SeriesListValue) ToScalarSet() (ScalarSet, *ConversionFailure) {
return nil, &ConversionFailure{"series list", "scalar set"}
}
// ToDuration is a conversion function to implement the Value interface.
func (list SeriesListValue) ToDuration() (time.Duration, *ConversionFailure) {
return 0, &ConversionFailure{"series list", "duration"}
}
// A StringValue holds a string
type StringValue string
// ToSeriesList is a conversion function.
func (value StringValue) ToSeriesList(time api.Timerange) (api.SeriesList, *ConversionFailure) {
return api.SeriesList{}, &ConversionFailure{"string", "SeriesList"}
}
// ToString is a conversion function.
func (value StringValue) ToString() (string, *ConversionFailure) {
return string(value), nil
}
// ToScalar is a conversion function.
func (value StringValue) ToScalar() (float64, *ConversionFailure) {
return 0, &ConversionFailure{"string", "scalar"}
}
// ToScalarSet is a conversion function.
func (value StringValue) ToScalarSet() (ScalarSet, *ConversionFailure) {
return nil, &ConversionFailure{"string", "scalar set"}
}
// ToDuration is a conversion function.
func (value StringValue) ToDuration() (time.Duration, *ConversionFailure) {
return 0, &ConversionFailure{"string", "duration"}
}
// A ScalarValue holds a float and can be converted to a serieslist
type ScalarValue float64
// ToSeriesList is a conversion function.
// The scalar becomes a constant value for the timerange.
func (value ScalarValue) ToSeriesList(timerange api.Timerange) (api.SeriesList, *ConversionFailure) {
series := make([]float64, timerange.Slots())
for i := range series {
series[i] = float64(value)
}
return api.SeriesList{
Series: []api.Timeseries{{Values: series, TagSet: api.NewTagSet()}},
}, nil
}
// ToString is a conversion function. Numbers become formatted.
func (value ScalarValue) ToString() (string, *ConversionFailure) {
return "", &ConversionFailure{"scalar", "string"}
}
// ToScalar is a conversion function.
func (value ScalarValue) ToScalar() (float64, *ConversionFailure) {
return float64(value), nil
}
// ToScalarSet is a conversion function.
func (value ScalarValue) ToScalarSet() (ScalarSet, *ConversionFailure) {
return ScalarSet{
TaggedScalar{
Value: float64(value),
TagSet: api.TagSet{},
},
}, nil
}
// ToDuration is a conversion function.
// Scalars cannot be converted to durations.
func (value ScalarValue) ToDuration() (time.Duration, *ConversionFailure) {
return 0, &ConversionFailure{"scalar", "duration"}
}
// DurationValue is a duration with a (usually) human-written name.
type DurationValue struct {
name string
duration time.Duration
}
// NewDurationValue creates a duration value with the given name and duration.
func NewDurationValue(name string, duration time.Duration) DurationValue {
return DurationValue{name, duration}
}
// ToSeriesList is a conversion function.
func (value DurationValue) ToSeriesList(timerange api.Timerange) (api.SeriesList, *ConversionFailure) {
return api.SeriesList{}, &ConversionFailure{"duration", "SeriesList"}
}
// ToString is a conversion function.
func (value DurationValue) ToString() (string, *ConversionFailure) {
return "", &ConversionFailure{"duration", "string"}
}
// ToScalar is a conversion function.
func (value DurationValue) ToScalar() (float64, *ConversionFailure) {
return 0, &ConversionFailure{"duration", "scalar"}
}
func (value DurationValue) ToScalarSet() (ScalarSet, *ConversionFailure) {
return nil, &ConversionFailure{"duration", "scalar set"}
}
// ToDuration is a conversion function.
func (value DurationValue) ToDuration() (time.Duration, *ConversionFailure) {
return time.Duration(value.duration), nil
}
var durationRegexp = regexp.MustCompile(`^([+-]?[0-9]+)([smhdwMy]|ms|hr|mo|yr)$`)
// StringToDuration parses strings into timesdurations by examining their suffixes.
func StringToDuration(timeString string) (time.Duration, error) {
matches := durationRegexp.FindStringSubmatch(timeString)
if matches == nil {
return -1, fmt.Errorf("expected duration to be of the form `%s`", durationRegexp.String())
}
duration, err := strconv.ParseInt(matches[1], 10, 0)
if err != nil {
return -1, err
}
scale := time.Millisecond
switch matches[2] {
case "ms":
// no change in scale
case "s":
scale *= 1000
case "m":
scale *= 1000 * 60
case "h", "hr":
scale *= 1000 * 60 * 60
case "d":
scale *= 1000 * 60 * 60 * 24
case "w":
scale *= 1000 * 60 * 60 * 24 * 7
case "M", "mo":
scale *= 1000 * 60 * 60 * 24 * 30
case "y", "yr":
scale *= 1000 * 60 * 60 * 24 * 365
}
return time.Duration(duration) * scale, nil
} | function/value.go | 0.785966 | 0.424233 | value.go | starcoder |
type Node struct {
cnt, length int
}
type Interval struct {
begin, end int
}
type ByEnd []Interval
func (a ByEnd) Len() int { return len(a) }
func (a ByEnd) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a ByEnd) Less(i, j int) bool {
return a[i].end < a[j].end
}
func max(a, b Node) Node {
if a.cnt < b.cnt {
return b
} else if a.cnt > b.cnt {
return a
} else {
if a.length < b.length {
return a
} else {
return b
}
}
}
func maxNumOfSubstrings(s string) []string {
// A(i) 表示 [0, ..., i] 子串可构成的最优解 <cnt, len>,相同 cnt 取 len 最短
// A(i) = max(A(i-1), Node{A(interval(s[i]).begin-1).cnt + 1, A(interval(s[i]).begin-1).length + len(interval(s[i]))}
var begin, end [26]int
n := len(s)
for i := 0; i < n; i++ {
ch := int(s[i] - 'a')
if begin[ch] == 0 {
begin[ch] = i+1
}
end[ch] = i+1
}
intervals := make([]Interval, 0)
for i := 0; i < 26; i++ {
if begin[i] == 0 { continue }
interval := Interval{begin[i], end[i]}
valid := true
for j := interval.begin+1; j < interval.end; j++ {
ch := int(s[j-1] - 'a')
if begin[ch] < interval.begin {
valid = false
break
}
if end[ch] > interval.end {
interval.end = end[ch]
}
}
if valid {
intervals = append(intervals, interval)
}
}
sort.Sort(ByEnd(intervals))
// fmt.Println(intervals)
A := make([]Node, n+1)
for i, j := 1, 0; i <= n; i++ {
A[i] = A[i-1]
if i == intervals[j].end {
A[i] = max(A[i], Node{
cnt: A[intervals[j].begin-1].cnt + 1,
length: A[intervals[j].begin-1].length + (intervals[j].end - intervals[j].begin + 1),
})
j++
}
}
var result []string
for i, j := n, len(intervals)-1; i >= 1; {
if A[i] != A[i-1] {
for j >= 0 && i != intervals[j].end { j-- }
result = append(result, s[intervals[j].begin-1:intervals[j].end])
i = intervals[j].begin-1
} else {
i--
}
}
return result
} | leetcode/maximum-number-of-non-overlapping-substrings/solution.go | 0.54577 | 0.426381 | solution.go | starcoder |
package bactract
import (
"encoding/hex"
"fmt"
"math"
)
// readGeography reads the value for a varchar column
func readGeography(r *tReader, tc TableColumn) (ec ExtractedColumn, err error) {
fn := "readGeography"
if debugFlag {
debOut(fmt.Sprintf("Func %s", fn))
}
// Determine how many bytes to read
ss, err := r.readStoredSize(tc, 8, 0)
if err != nil {
return
}
// Check for nulls
if ss.isNull {
ec.IsNull = ss.isNull
return
}
// Read and translate the geography
/* TODO: determine the actual format and translate to well known text (WKT) or something similar
This could be:
- point: Point ( Lat, Long, SRID )
- linestring: Linestring ( Lat, Long, Lat, Long ), SRID
- polygon: Polygon ( Lat, Long, Lat, Long, Lat, Long, ...), SRID
The first 2 (possibly 4) bytes appear to be the SRID
The 5th byte is the geometry type? Could this be the same as the WKT geometry integer code
https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry
Geometry types, and WKB integer codes
| Type | 2D | Z | M | ZM |
| ------------------ | ---- | ---- | ---- | ---- |
| Geometry | 0 | 1000 | 2000 | 3000 |
| Point | 1 | 1001 | 2001 | 3001 |
| LineString | 2 | 1002 | 2002 | 3002 |
| Polygon | 3 | 1003 | 2003 | 3003 |
| MultiPoint | 4 | 1004 | 2004 | 3004 |
| MultiLineString | 5 | 1005 | 2005 | 3005 |
| MultiPolygon | 6 | 1006 | 2006 | 3006 |
| GeometryCollection | 7 | 1007 | 2007 | 3007 |
| CircularString | 8 | 1008 | 2008 | 3008 |
| CompoundCurve | 9 | 1009 | 2009 | 3009 |
| CurvePolygon | 10 | 1010 | 2010 | 3010 |
| MultiCurve | 11 | 1011 | 2011 | 3011 |
| MultiSurface | 12 | 1012 | 2012 | 3012 |
| Curve | 13 | 1013 | 2013 | 3013 |
| Surface | 14 | 1014 | 2014 | 3014 |
| PolyhedralSurface | 15 | 1015 | 2015 | 3015 |
| TIN | 16 | 1016 | 2016 | 3016 |
| Triangle | 17 | 1017 | 2017 | 3017 |
| Circle | 18 | 1018 | 2018 | 3018 |
| GeodesicString | 19 | 1019 | 2019 | 3019 |
| EllipticalCurve | 20 | 1020 | 2020 | 3020 |
| NurbsCurve | 21 | 1021 | 2021 | 3021 |
| Clothoid | 22 | 1022 | 2022 | 3022 |
| SpiralCurve | 23 | 1023 | 2023 | 3023 |
| CompoundSurface | 24 | 1024 | 2024 | 3024 |
| BrepSolid | | 1025 | | |
| AffinePlacement | 102 | 1102 | | |
*/
b, err := r.readBytes(fn, ss.byteCount)
if err != nil {
return
}
var srid int32
if ss.byteCount > 4 {
for i, sb := range stripTrailingNulls(b[0:4]) {
srid |= int32(sb) << uint(8*i)
}
}
// Currently only have data for points...
switch {
case ss.byteCount == 22:
lat := tcord(b[6:14])
long := tcord(b[14:])
ec.Str = fmt.Sprintf("SRID=%d;POINT(%f %f)", srid, long, lat)
case ss.byteCount > 22:
// case ( ss.ByteCount - 6 ) % 16 == 0: // list of points ?
ec.Str = fmt.Sprintf("SRID=%d;(%s)", srid, hex.EncodeToString(b[6:]))
}
return
}
func tcord(b []byte) float64 {
var z uint64
for i := 0; i < 8; i++ {
z |= uint64(b[i]) << uint(8*i)
}
f := math.Float64frombits(z)
return f
} | bactract/geography.go | 0.506836 | 0.469885 | geography.go | starcoder |
package mlpack
/*
#cgo CFLAGS: -I./capi -Wall
#cgo LDFLAGS: -L. -lmlpack_go_krann
#include <capi/krann.h>
#include <stdlib.h>
*/
import "C"
import "gonum.org/v1/gonum/mat"
type KrannOptionalParam struct {
Alpha float64
FirstLeafExact bool
InputModel *rannModel
K int
LeafSize int
Naive bool
Query *mat.Dense
RandomBasis bool
Reference *mat.Dense
SampleAtLeaves bool
Seed int
SingleMode bool
SingleSampleLimit int
Tau float64
TreeType string
Verbose bool
}
func KrannOptions() *KrannOptionalParam {
return &KrannOptionalParam{
Alpha: 0.95,
FirstLeafExact: false,
InputModel: nil,
K: 0,
LeafSize: 20,
Naive: false,
Query: nil,
RandomBasis: false,
Reference: nil,
SampleAtLeaves: false,
Seed: 0,
SingleMode: false,
SingleSampleLimit: 20,
Tau: 5,
TreeType: "kd",
Verbose: false,
}
}
/*
This program will calculate the k rank-approximate-nearest-neighbors of a set
of points. You may specify a separate set of reference points and query
points, or just a reference set which will be used as both the reference and
query set. You must specify the rank approximation (in %) (and optionally the
success probability).
For example, the following will return 5 neighbors from the top 0.1% of the
data (with probability 0.95) for each point in input and store the distances
in distances and the neighbors in neighbors.csv:
// Initialize optional parameters for Krann().
param := mlpack.KrannOptions()
param.Reference = input
param.K = 5
param.Tau = 0.1
distances, neighbors, _ := mlpack.Krann(param)
Note that tau must be set such that the number of points in the corresponding
percentile of the data is greater than k. Thus, if we choose tau = 0.1 with a
dataset of 1000 points and k = 5, then we are attempting to choose 5 nearest
neighbors out of the closest 1 point -- this is invalid and the program will
terminate with an error message.
The output matrices are organized such that row i and column j in the
neighbors output file corresponds to the index of the point in the reference
set which is the i'th nearest neighbor from the point in the query set with
index j. Row i and column j in the distances output file corresponds to the
distance between those two points.
Input parameters:
- Alpha (float64): The desired success probability. Default value
0.95.
- FirstLeafExact (bool): The flag to trigger sampling only after
exactly exploring the first leaf.
- InputModel (rannModel): Pre-trained kNN model.
- K (int): Number of nearest neighbors to find. Default value 0.
- LeafSize (int): Leaf size for tree building (used for kd-trees, UB
trees, R trees, R* trees, X trees, Hilbert R trees, R+ trees, R++ trees,
and octrees). Default value 20.
- Naive (bool): If true, sampling will be done without using a tree.
- Query (mat.Dense): Matrix containing query points (optional).
- RandomBasis (bool): Before tree-building, project the data onto a
random orthogonal basis.
- Reference (mat.Dense): Matrix containing the reference dataset.
- SampleAtLeaves (bool): The flag to trigger sampling at leaves.
- Seed (int): Random seed (if 0, std::time(NULL) is used). Default
value 0.
- SingleMode (bool): If true, single-tree search is used (as opposed to
dual-tree search.
- SingleSampleLimit (int): The limit on the maximum number of samples
(and hence the largest node you can approximate). Default value 20.
- Tau (float64): The allowed rank-error in terms of the percentile of
the data. Default value 5.
- TreeType (string): Type of tree to use: 'kd', 'ub', 'cover', 'r',
'x', 'r-star', 'hilbert-r', 'r-plus', 'r-plus-plus', 'oct'. Default
value 'kd'.
- Verbose (bool): Display informational messages and the full list of
parameters and timers at the end of execution.
Output parameters:
- distances (mat.Dense): Matrix to output distances into.
- neighbors (mat.Dense): Matrix to output neighbors into.
- outputModel (rannModel): If specified, the kNN model will be output
here.
*/
func Krann(param *KrannOptionalParam) (*mat.Dense, *mat.Dense, rannModel) {
resetTimers()
enableTimers()
disableBacktrace()
disableVerbose()
restoreSettings("K-Rank-Approximate-Nearest-Neighbors (kRANN)")
// Detect if the parameter was passed; set if so.
if param.Alpha != 0.95 {
setParamDouble("alpha", param.Alpha)
setPassed("alpha")
}
// Detect if the parameter was passed; set if so.
if param.FirstLeafExact != false {
setParamBool("first_leaf_exact", param.FirstLeafExact)
setPassed("first_leaf_exact")
}
// Detect if the parameter was passed; set if so.
if param.InputModel != nil {
setRANNModel("input_model", param.InputModel)
setPassed("input_model")
}
// Detect if the parameter was passed; set if so.
if param.K != 0 {
setParamInt("k", param.K)
setPassed("k")
}
// Detect if the parameter was passed; set if so.
if param.LeafSize != 20 {
setParamInt("leaf_size", param.LeafSize)
setPassed("leaf_size")
}
// Detect if the parameter was passed; set if so.
if param.Naive != false {
setParamBool("naive", param.Naive)
setPassed("naive")
}
// Detect if the parameter was passed; set if so.
if param.Query != nil {
gonumToArmaMat("query", param.Query)
setPassed("query")
}
// Detect if the parameter was passed; set if so.
if param.RandomBasis != false {
setParamBool("random_basis", param.RandomBasis)
setPassed("random_basis")
}
// Detect if the parameter was passed; set if so.
if param.Reference != nil {
gonumToArmaMat("reference", param.Reference)
setPassed("reference")
}
// Detect if the parameter was passed; set if so.
if param.SampleAtLeaves != false {
setParamBool("sample_at_leaves", param.SampleAtLeaves)
setPassed("sample_at_leaves")
}
// Detect if the parameter was passed; set if so.
if param.Seed != 0 {
setParamInt("seed", param.Seed)
setPassed("seed")
}
// Detect if the parameter was passed; set if so.
if param.SingleMode != false {
setParamBool("single_mode", param.SingleMode)
setPassed("single_mode")
}
// Detect if the parameter was passed; set if so.
if param.SingleSampleLimit != 20 {
setParamInt("single_sample_limit", param.SingleSampleLimit)
setPassed("single_sample_limit")
}
// Detect if the parameter was passed; set if so.
if param.Tau != 5 {
setParamDouble("tau", param.Tau)
setPassed("tau")
}
// Detect if the parameter was passed; set if so.
if param.TreeType != "kd" {
setParamString("tree_type", param.TreeType)
setPassed("tree_type")
}
// Detect if the parameter was passed; set if so.
if param.Verbose != false {
setParamBool("verbose", param.Verbose)
setPassed("verbose")
enableVerbose()
}
// Mark all output options as passed.
setPassed("distances")
setPassed("neighbors")
setPassed("output_model")
// Call the mlpack program.
C.mlpackKrann()
// Initialize result variable and get output.
var distancesPtr mlpackArma
distances := distancesPtr.armaToGonumMat("distances")
var neighborsPtr mlpackArma
neighbors := neighborsPtr.armaToGonumUmat("neighbors")
var outputModel rannModel
outputModel.getRANNModel("output_model")
// Clear settings.
clearSettings()
// Return output(s).
return distances, neighbors, outputModel
} | krann.go | 0.730578 | 0.491334 | krann.go | starcoder |
package day3
import (
"adventofcode/utils"
"fmt"
"log"
"strconv"
"strings"
)
type Point struct {
X int
Y int
}
const MaxUint = ^uint(0)
const MaxInt = int(MaxUint >> 1)
// 1. index all points in wire 1
// 2. index points in wire 2
func nextPointInDirection(p Point, direction byte) Point {
switch direction {
case 'D':
return Point{p.X, p.Y - 1}
case 'U':
return Point{p.X, p.Y + 1}
case 'L':
return Point{p.X - 1, p.Y}
case 'R':
return Point{p.X + 1, p.Y}
}
log.Fatal("unknown point")
return Point{0, 0}
}
func manhattanDistance(p Point) int {
distance := 0
if p.X < 0 {
distance += -p.X
} else {
distance += p.X
}
if p.Y < 0 {
distance += -p.Y
} else {
distance += p.Y
}
return distance
}
func computeClosestIntersection(wire1 []string, wire2 []string) int {
minDistance := MaxInt
wire1Points := make(map[Point]bool)
wire1current := Point{0, 0}
for _, w := range wire1 {
direction := w[0]
distance, _ := strconv.Atoi(w[1:])
for i := 0; i < distance; i++ {
wire1current = nextPointInDirection(wire1current, direction)
wire1Points[wire1current] = true
}
}
wire2current := Point{0, 0}
for _, w := range wire2 {
direction := w[0]
distance, _ := strconv.Atoi(w[1:])
for i := 0; i < distance; i++ {
wire2current = nextPointInDirection(wire2current, direction)
if wire1Points[wire2current] {
distanceFromOrigin := manhattanDistance(wire2current)
if distanceFromOrigin < minDistance {
minDistance = distanceFromOrigin
}
}
}
}
return minDistance
}
func computeMinNumerOfSteps(wire1 []string, wire2 []string) int {
minSteps := MaxInt
wire1Points := make(map[Point]int)
wire1current := Point{0, 0}
wire1steps := 1
for _, w := range wire1 {
direction := w[0]
distance, _ := strconv.Atoi(w[1:])
for i := 0; i < distance; i++ {
wire1current = nextPointInDirection(wire1current, direction)
if _, ok := wire1Points[wire1current]; !ok {
wire1Points[wire1current] = wire1steps
}
wire1steps++
}
}
wire2current := Point{0, 0}
wire2steps := 1
for _, w := range wire2 {
direction := w[0]
distance, _ := strconv.Atoi(w[1:])
for i := 0; i < distance; i++ {
wire2current = nextPointInDirection(wire2current, direction)
if wire1stepsmin, ok := wire1Points[wire2current]; ok {
toalWireSteps := wire2steps + wire1stepsmin
if toalWireSteps < minSteps {
minSteps = toalWireSteps
}
}
wire2steps++
}
}
return minSteps
}
func PrintSolution() {
lines := utils.ParseLines("./inputs/day3.txt")
distClosestIntersect := computeClosestIntersection(strings.Split(lines[0], ","), strings.Split(lines[1], ","))
fmt.Println("Min distance intersection (Part 1): ", distClosestIntersect)
minNumerOfSteps := computeMinNumerOfSteps(strings.Split(lines[0], ","), strings.Split(lines[1], ","))
fmt.Println("Min wire steps intersection (Part 2): ", minNumerOfSteps)
} | day3/day3.go | 0.568176 | 0.426023 | day3.go | starcoder |
package topojson
import (
geojson "github.com/paulmach/orb/geojson"
)
// Filter topology into a new topology that only contains features with the given IDs
func (t *Topology) Filter(ids []string) *Topology {
result := &Topology{
Type: t.Type,
Transform: t.Transform,
BBox: t.BBox,
Objects: make(map[string]*Geometry),
}
arcMap := make(map[int]int)
for _, g := range t.Objects {
geom := remapGeometry(arcMap, ids, g)
if geom != nil {
result.Objects[geom.ID] = geom
}
}
result.Arcs = make([][][]float64, len(arcMap))
for k, v := range arcMap {
result.Arcs[v] = t.Arcs[k]
}
return result
}
func remapGeometry(arcMap map[int]int, ids []string, g *Geometry) *Geometry {
found := false
for _, id := range ids {
if g.ID == id {
found = true
break
}
}
if !found {
return nil
}
geom := &Geometry{
ID: g.ID,
Type: g.Type,
Properties: g.Properties,
BBox: g.BBox,
}
switch g.Type {
case geojson.TypePoint:
geom.Point = g.Point
case geojson.TypeMultiPoint:
geom.MultiPoint = g.MultiPoint
case geojson.TypeLineString:
geom.LineString = remapLineString(arcMap, g.LineString)
case geojson.TypeMultiLineString:
geom.MultiLineString = remapMultiLineString(arcMap, g.MultiLineString)
case geojson.TypePolygon:
geom.Polygon = remapMultiLineString(arcMap, g.Polygon)
case geojson.TypeMultiPolygon:
polygons := make([][][]int, len(g.MultiPolygon))
for i, poly := range g.MultiPolygon {
polygons[i] = remapMultiLineString(arcMap, poly)
}
geom.MultiPolygon = polygons
default:
geometries := make([]*Geometry, 0)
for _, geometry := range g.Geometries {
out := remapGeometry(arcMap, ids, geometry)
if out != nil {
geometries = append(geometries, out)
}
}
geom.Geometries = geometries
}
return geom
}
func remapLineString(arcMap map[int]int, in []int) []int {
out := make([]int, len(in))
for i, arc := range in {
a := arc
reverse := false
if a < 0 {
a = ^a
reverse = true
}
idx, ok := arcMap[a]
if !ok {
idx = len(arcMap)
arcMap[a] = idx
}
if reverse {
out[i] = ^idx
} else {
out[i] = idx
}
}
return out
}
func remapMultiLineString(arcMap map[int]int, in [][]int) [][]int {
lines := make([][]int, len(in))
for i, line := range in {
lines[i] = remapLineString(arcMap, line)
}
return lines
} | encoding/topojson/filter.go | 0.727685 | 0.431764 | filter.go | starcoder |
package primitives
import (
"bytes"
"context"
"github.com/atomix/go-client/pkg/client/map"
"github.com/atomix/go-client/pkg/client/session"
"github.com/onosproject/onos-test/pkg/onit/env"
"github.com/stretchr/testify/assert"
"testing"
"time"
)
// TestAtomixMap : integration test
func (s *TestSuite) TestAtomixMap(t *testing.T) {
group, err := env.Database().Partitions("protocol").Connect()
assert.NoError(t, err)
assert.NotNil(t, group)
m, err := group.GetMap(context.Background(), "TestAtomixMap", session.WithTimeout(5*time.Second))
assert.NoError(t, err)
ch := make(chan *_map.Entry)
err = m.Entries(context.Background(), ch)
assert.NoError(t, err)
for range ch {
assert.Fail(t, "entries found in map")
}
size, err := m.Len(context.Background())
assert.NoError(t, err)
assert.Equal(t, 0, size)
value, err := m.Get(context.Background(), "foo")
assert.NoError(t, err)
assert.Nil(t, value)
value, err = m.Put(context.Background(), "foo", []byte("Hello world!"))
assert.NoError(t, err)
assert.NotNil(t, value)
assert.Equal(t, "foo", value.Key)
assert.True(t, bytes.Equal([]byte("Hello world!"), value.Value))
assert.NotEqual(t, int64(0), value.Version)
version := value.Version
value, err = m.Get(context.Background(), "foo")
assert.NoError(t, err)
assert.NotNil(t, value)
assert.Equal(t, "foo", value.Key)
assert.True(t, bytes.Equal([]byte("Hello world!"), value.Value))
assert.Equal(t, version, value.Version)
size, err = m.Len(context.Background())
assert.NoError(t, err)
assert.Equal(t, 1, size)
ch = make(chan *_map.Entry)
err = m.Entries(context.Background(), ch)
assert.NoError(t, err)
i := 0
for kv := range ch {
assert.Equal(t, "foo", kv.Key)
assert.Equal(t, "Hello world!", string(kv.Value))
i++
}
assert.Equal(t, 1, i)
allEvents := make(chan *_map.Event)
err = m.Watch(context.Background(), allEvents, _map.WithReplay())
assert.NoError(t, err)
event := <-allEvents
assert.NotNil(t, event)
assert.Equal(t, "foo", event.Entry.Key)
assert.Equal(t, []byte("Hello world!"), event.Entry.Value)
assert.Equal(t, value.Version, event.Entry.Version)
futureEvents := make(chan *_map.Event)
err = m.Watch(context.Background(), futureEvents)
assert.NoError(t, err)
value, err = m.Put(context.Background(), "bar", []byte("Hello world!"))
assert.NoError(t, err)
assert.NotNil(t, value)
assert.Equal(t, "bar", value.Key)
assert.Equal(t, []byte("Hello world!"), value.Value)
assert.NotEqual(t, int64(0), value.Version)
event = <-allEvents
assert.NotNil(t, event)
assert.Equal(t, "bar", event.Entry.Key)
assert.Equal(t, []byte("Hello world!"), event.Entry.Value)
assert.Equal(t, value.Version, event.Entry.Version)
event = <-futureEvents
assert.NotNil(t, event)
assert.Equal(t, "bar", event.Entry.Key)
assert.Equal(t, []byte("Hello world!"), event.Entry.Value)
assert.Equal(t, value.Version, event.Entry.Version)
} | test/primitives/maptest.go | 0.591133 | 0.608914 | maptest.go | starcoder |
package rotate
import (
"math"
"math/rand"
"github.com/paulwrubel/photolum/config/geometry"
"github.com/paulwrubel/photolum/config/geometry/primitive"
"github.com/paulwrubel/photolum/config/geometry/primitive/aabb"
"github.com/paulwrubel/photolum/config/shading/material"
)
// RotationY is a primitive with a rotations around the y axis attached
type RotationY struct {
AngleDegrees float64 `json:"angle"`
TypeName string `json:"type"`
Data interface{} `json:"data"`
Primitive primitive.Primitive
theta float64
sinTheta float64
cosTheta float64
}
// Setup sets up some internal fields of a rotation
func (ry *RotationY) Setup() (*RotationY, error) {
// convert to radians and save
ry.theta = (math.Pi / 180.0) * ry.AngleDegrees
// find sin(theta)
ry.sinTheta = math.Sin(ry.theta)
// find cos(theta)
ry.cosTheta = math.Cos(ry.theta)
return ry, nil
}
// Intersection computer the intersection of this object and a given ray if it exists
func (ry *RotationY) Intersection(ray geometry.Ray, tMin, tMax float64, rng *rand.Rand) (*material.RayHit, bool) {
rotatedRay := ray
rotatedRay.Origin.X = ry.cosTheta*ray.Origin.X - ry.sinTheta*ray.Origin.Z
rotatedRay.Origin.Z = ry.sinTheta*ray.Origin.X + ry.cosTheta*ray.Origin.Z
rotatedRay.Direction.X = ry.cosTheta*ray.Direction.X - ry.sinTheta*ray.Direction.Z
rotatedRay.Direction.Z = ry.sinTheta*ray.Direction.X + ry.cosTheta*ray.Direction.Z
rayHit, wasHit := ry.Primitive.Intersection(rotatedRay, tMin, tMax, rng)
if wasHit {
unrotatedNormal := rayHit.NormalAtHit
unrotatedNormal.X = ry.cosTheta*rayHit.NormalAtHit.X + ry.sinTheta*rayHit.NormalAtHit.Z
unrotatedNormal.Z = -ry.sinTheta*rayHit.NormalAtHit.X + ry.cosTheta*rayHit.NormalAtHit.Z
return &material.RayHit{
Ray: ray,
NormalAtHit: unrotatedNormal,
Time: rayHit.Time,
U: rayHit.U,
V: rayHit.V,
Material: rayHit.Material,
}, true
}
return nil, false
}
// BoundingBox returns an AABB for this object
func (ry *RotationY) BoundingBox(t0, t1 float64) (*aabb.AABB, bool) {
box, ok := ry.Primitive.BoundingBox(t0, t1)
if !ok {
return nil, false
}
minPoint := geometry.PointMax
maxPoint := geometry.PointMax.Negate()
for i := 0.0; i < 2; i++ {
for j := 0.0; j < 2; j++ {
for k := 0.0; k < 2; k++ {
x := i*box.B.X + (1-i)*box.A.X
y := j*box.B.Y + (1-j)*box.A.Y
z := k*box.B.Z + (1-k)*box.A.Z
newX := ry.cosTheta*x + ry.sinTheta*z
newZ := -ry.sinTheta*x + ry.cosTheta*z
rotatedCorner := geometry.Point{
X: newX,
Y: y,
Z: newZ,
}
maxPoint = geometry.MaxComponents(maxPoint, rotatedCorner)
minPoint = geometry.MinComponents(minPoint, rotatedCorner)
}
}
}
return &aabb.AABB{
A: minPoint,
B: maxPoint,
}, true
}
// SetMaterial sets the material of this object
func (ry *RotationY) SetMaterial(m material.Material) {
ry.Primitive.SetMaterial(m)
}
// IsInfinite returns whether this object is infinite
func (ry *RotationY) IsInfinite() bool {
return ry.Primitive.IsInfinite()
}
// IsClosed returns whether this object is closed
func (ry *RotationY) IsClosed() bool {
return ry.Primitive.IsClosed()
}
// Copy returns a shallow copy of this object
func (ry *RotationY) Copy() primitive.Primitive {
newRY := *ry
return &newRY
} | config/geometry/primitive/transform/rotate/rotatey.go | 0.813387 | 0.449211 | rotatey.go | starcoder |
package camera
import (
"math"
"github.com/go-gl/mathgl/mgl32"
)
// Camera movements
const (
FORWARD = iota
BACKWARD
LEFT
RIGHT
)
const (
cYaw = -90
cPitch = 0
cSpeed = 2.5
cSensitivity = 0.1
cZoom = 45
)
// Camera Contains information about the camera
type Camera struct {
position mgl32.Vec3
front mgl32.Vec3
up mgl32.Vec3
right mgl32.Vec3
worldUp mgl32.Vec3
// Euler angles
yaw float64
pitch float64
// Options
moveSpeed float32
sensitivity float32
inverted bool
}
// NewCamera Returns a new camera object
func NewCamera(inverted bool) (*Camera, error) {
c := new(Camera)
c.position = mgl32.Vec3{0, 0, 3}
c.front = mgl32.Vec3{0, 0, -1}
c.worldUp = mgl32.Vec3{0, 1, 0}
c.moveSpeed = cSpeed
c.yaw = cYaw
c.pitch = cPitch
c.sensitivity = cSensitivity
c.inverted = inverted
c.updateCameraVectors()
return c, nil
}
// GetViewMatrix return the View Matrix
func (c *Camera) GetViewMatrix() mgl32.Mat4 {
return mgl32.LookAtV(c.position, c.position.Add(c.front), c.up)
}
// ProcessKeyboard Handles keyboard input
func (c *Camera) ProcessKeyboard(direction int, deltaTime float32) {
velocity := c.moveSpeed * deltaTime
if direction == FORWARD {
c.position = c.position.Add(c.front.Mul(velocity))
}
if direction == BACKWARD {
c.position = c.position.Sub(c.front.Mul(velocity))
}
if direction == LEFT {
c.position = c.position.Sub(c.right.Mul(velocity))
}
if direction == RIGHT {
c.position = c.position.Add(c.right.Mul(velocity))
}
}
// ProcessMouseMovement Handles input of mouse movement data
func (c *Camera) ProcessMouseMovement(xoffset, yoffset float64, constrainPitch bool) {
xoffset *= float64(c.sensitivity)
yoffset *= float64(c.sensitivity)
c.yaw += xoffset
if c.inverted {
c.pitch -= yoffset
} else {
c.pitch += yoffset
}
if constrainPitch {
if c.pitch > 89.0 {
c.pitch = 89
}
if c.pitch < -89.0 {
c.pitch = -89
}
}
c.updateCameraVectors()
}
func (c *Camera) updateCameraVectors() {
var frontVec mgl32.Vec3
frontVec[0] = float32(math.Cos(float64(mgl32.DegToRad(float32(c.yaw))))) * float32(math.Cos(float64(mgl32.DegToRad(float32(c.pitch)))))
frontVec[1] = float32(math.Sin(float64(mgl32.DegToRad(float32(c.pitch)))))
frontVec[2] = float32(math.Sin(float64(mgl32.DegToRad(float32(c.yaw))))) * float32(math.Cos(float64(mgl32.DegToRad(float32(c.pitch)))))
c.front = frontVec.Normalize()
c.right = c.front.Cross(c.worldUp).Normalize()
c.up = c.right.Cross(c.front).Normalize()
} | camera/camera.go | 0.742888 | 0.402627 | camera.go | starcoder |
// Package byteutil provides various operations on bytes and byte strings.
package byteutil
var (
digit [256]bool
hexdigit [256]bool
letter [256]bool
uppercase [256]bool
lowercase [256]bool
alphanum [256]bool
tolower [256]byte
toupper [256]byte
)
func init() {
for _, b := range "0123456789" {
digit[b] = true
}
for _, b := range "0123456789abcdefABCDEF" {
hexdigit[b] = true
}
for _, b := range "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" {
letter[b] = true
}
for _, b := range "abcdefghijklmnopqrstuvwxyz" {
lowercase[b] = true
}
for _, b := range "ABCDEFGHIJKLMNOPQRSTUVWXYZ" {
uppercase[b] = true
}
for _, b := range "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" {
alphanum[b] = true
}
for i := 0; i < 256; i++ {
tolower[i] = byte(i)
toupper[i] = byte(i)
}
for _, b := range "ABCDEFGHIJKLMNOPQRSTUVWXYZ" {
tolower[b] = byte(b) - 'A' + 'a'
}
for _, b := range "abcdefghijklmnopqrstuvwxyz" {
toupper[b] = byte(b) - 'a' + 'A'
}
}
func IsDigit(b byte) bool {
return digit[b]
}
func IsHexDigit(b byte) bool {
return hexdigit[b]
}
func IsLetter(b byte) bool {
return letter[b]
}
func IsLowercaseLetter(b byte) bool {
return lowercase[b]
}
func IsUppercaseLetter(b byte) bool {
return uppercase[b]
}
func IsAlphaNum(b byte) bool {
return alphanum[b]
}
func ToLower(s string) string {
if s == "" {
return ""
}
hasUpper := false
for i := 0; i < len(s); i++ {
if uppercase[s[i]] {
hasUpper = true
break
}
}
if !hasUpper {
return s
}
buf := make([]byte, len(s))
for i := 0; i < len(s); i++ {
buf[i] = tolower[s[i]]
}
return string(buf)
}
func ToUpper(s string) string {
if s == "" {
return ""
}
hasLower := false
for i := 0; i < len(s); i++ {
if lowercase[s[i]] {
hasLower = true
break
}
}
if !hasLower {
return s
}
buf := make([]byte, len(s))
for i := 0; i < len(s); i++ {
buf[i] = toupper[s[i]]
}
return string(buf)
}
func ByteToLower(b byte) byte {
return tolower[b]
}
func ByteToUpper(b byte) byte {
return toupper[b]
}
func IndexAny(s, chars string) int {
var t [256]bool
for i := 0; i < len(chars); i++ {
t[chars[i]] = true
}
for i := 0; i < len(s); i++ {
if t[s[i]] {
return i
}
}
return -1
}
func IndexAnyTable(s string, t *[256]bool) int {
for i := 0; i < len(s); i++ {
if t[s[i]] {
return i
}
}
return -1
}
func Unhex(d byte) byte {
switch {
case digit[d]:
return d - '0'
case uppercase[d]:
return d - 'A' + 10
case lowercase[d]:
return d - 'a' + 10
}
panic("unhex: not hex digit")
} | src/vendor/github.com/golang-commonmark/markdown/byteutil/byteutil.go | 0.592195 | 0.450178 | byteutil.go | starcoder |
package main
import (
"fmt"
"math"
)
type PromptTriangle struct {
a, b, c, A, B, C float64
}
func NewPromptTriangle(partialValues PromptTriangle) (result *PromptTriangle) {
result = new(PromptTriangle)
result.C = math.Pi / 2
if 0 < partialValues.a {
result.a = partialValues.a
}
if 0 < partialValues.b {
result.b = partialValues.b
}
if 0 < partialValues.c {
result.c = partialValues.c
}
if 0 < partialValues.A {
result.A = partialValues.A
}
if 0 < partialValues.B {
result.B = partialValues.B
}
return
}
func (t *PromptTriangle) ComputeEdgeA() {
if 0 < t.a {
return
}
if 0 < t.c {
if 0 < t.b {
t.a = math.Sqrt(math.Pow(t.c, 2) - math.Pow(t.b, 2))
return
}
if 0 < t.A {
t.a = math.Sin(t.A) * t.c
return
}
if 0 < t.B {
t.a = math.Cos(t.B) * t.c
return
}
}
if 0 < t.A && 0 < t.b && 0 < t.B {
t.a = math.Sin(t.A) * t.b / math.Sin(t.B)
return
}
}
func (t *PromptTriangle) ComputeEdgeB() {
if 0 < t.b {
return
}
if 0 < t.c {
if 0 < t.a {
t.b = math.Sqrt(math.Pow(t.c, 2) - math.Pow(t.a, 2))
return
}
if 0 < t.B {
t.b = math.Sin(t.B) * t.c
return
}
if 0 < t.A {
t.b = math.Cos(t.A) * t.c
return
}
}
if 0 < t.B && 0 < t.a && 0 < t.A {
t.a = math.Sin(t.B) * t.a / math.Sin(t.A)
return
}
}
func (t *PromptTriangle) ComputeEdgeC() {
if 0 < t.c {
return
}
if 0 < t.a {
if 0 < t.b {
t.c = math.Sqrt(math.Pow(t.a, 2) + math.Pow(t.b, 2))
return
}
if 0 < t.A {
t.c = t.a / math.Sin(t.A)
return
}
if 0 < t.B {
t.c = t.a / math.Cos(t.B)
return
}
} else if 0 < t.b {
if 0 < t.A {
t.c = t.b / math.Cos(t.A)
return
}
if 0 < t.B {
t.c = t.b / math.Sin(t.B)
return
}
}
}
func (t *PromptTriangle) ComputeAngleA() {
if 0 < t.A {
return
}
if 0 < t.c {
if 0 < t.a {
t.A = math.Asin(t.a / t.c)
return
}
if 0 < t.b {
t.A = math.Acos(t.b / t.c)
return
}
} else if 0 < t.B {
t.A = math.Pi/2 - t.B
return
}
}
func (t *PromptTriangle) ComputeAngleB() {
if 0 < t.B {
return
}
if 0 < t.c {
if 0 < t.a {
t.B = math.Acos(t.a / t.c)
return
}
if 0 < t.b {
t.B = math.Asin(t.b / t.c)
return
}
} else if 0 < t.A {
t.B = math.Pi/2 - t.A
return
}
}
func (t *PromptTriangle) DiscoverUnknowns() {
count := 0
for 0 == t.a || 0 == t.b || 0 == t.c || 0 == t.A || 0 == t.B {
t.ComputeEdgeA()
t.ComputeEdgeB()
t.ComputeEdgeC()
t.ComputeAngleA()
t.ComputeAngleB()
count++
if 100 < count {
panic("Not enough info to define the triangle")
}
}
}
var zPrint = fmt.Println
func main() {
_, _ = zPrint("hello world")
} | easy/101-200/160/go/main.go | 0.537284 | 0.443962 | main.go | starcoder |
package dfl
import (
"github.com/pkg/errors"
"github.com/spatialcurrent/go-adaptive-functions/pkg/af"
"github.com/spatialcurrent/go-reader-writer/pkg/io"
)
// In is a BinaryOperator that evaluates to true if the left value is in the right value.
// The left value is cast as a string using "fmt.Sprint(lv)".
// If the right value is an array/slice, then evaluated to true if the left value is in the array/slice.
// Otherwise, evaluates to true if the right string is contained by the left string.
type In struct {
*BinaryOperator
}
func (i In) Dfl(quotes []string, pretty bool, tabs int) string {
return i.BinaryOperator.Dfl("in", quotes, pretty, tabs)
}
func (i In) Sql(pretty bool, tabs int) string {
switch right := i.Right.(type) {
case *Attribute:
switch left := i.Left.(type) {
case *Literal:
switch lv := left.Value.(type) {
case string:
like := &Like{&BinaryOperator{
Left: i.Right,
Right: &Literal{Value: "%" + lv + "%"},
}}
return like.Sql(pretty, tabs)
}
}
case *Set:
eq := &Equal{&BinaryOperator{
Left: i.Left,
Right: &Function{Name: "ANY", MultiOperator: &MultiOperator{Arguments: []Node{right}}},
}}
return eq.Sql(pretty, tabs)
}
return ""
}
func (i In) Map() map[string]interface{} {
return i.BinaryOperator.Map("in", i.Left, i.Right)
}
func (i In) Compile() Node {
left := i.Left.Compile()
right := i.Right.Compile()
return &In{&BinaryOperator{Left: left, Right: right}}
}
func (i In) Evaluate(vars map[string]interface{}, ctx interface{}, funcs FunctionMap, quotes []string) (map[string]interface{}, interface{}, error) {
vars, lv, err := i.Left.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating left value for "+i.Dfl(quotes, false, 0))
}
vars, rv, err := i.Right.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating right value for "+i.Dfl(quotes, false, 0))
}
if rvr, ok := rv.(io.ByteReadCloser); ok {
if lvb, ok := lv.([]byte); ok {
rvb, err := rvr.ReadAll()
if err != nil {
return vars, false, errors.Wrap(err, "error reading all byte for right value in expression "+i.Dfl(quotes, false, 0))
}
if len(lvb) == len(rvb) && len(lvb) == 0 {
return vars, true, nil
}
for i, _ := range rvb {
if rvb[i] == lvb[0] && i+len(lvb) < len(rvb) {
match := true
for j, _ := range lvb {
if rvb[i+j] != lvb[j] {
match = false
break
}
}
if match {
return vars, true, nil
}
}
}
return vars, false, nil
}
if lvs, ok := lv.(string); ok {
lvb := []byte(lvs)
rvb, err := rvr.ReadAll()
if err != nil {
return vars, false, errors.Wrap(err, "error reading all byte for right value in expression "+i.Dfl(quotes, false, 0))
}
if len(lvb) == len(rvb) && len(lvb) == 0 {
return vars, true, nil
}
for i, _ := range rvb {
if rvb[i] == lvb[0] && i+len(lvb) < len(rvb) {
match := true
for j, _ := range lvb {
if rvb[i+j] != lvb[j] {
match = false
break
}
}
if match {
return vars, true, nil
}
}
}
return vars, false, nil
}
}
value, err := af.In.ValidateRun(lv, rv)
if err != nil {
return vars, false, errors.Wrap(err, ErrorEvaluate{Node: i, Quotes: quotes}.Error())
}
return vars, value, nil
} | pkg/dfl/In.go | 0.691185 | 0.460046 | In.go | starcoder |
package mgl
import (
"math"
"unsafe"
"github.com/go-gl/gl/v2.1/gl"
)
const (
NUM_SEG = 16
)
var (
dir [NUM_SEG * 2]float32
_init = false
)
func init() {
for i := 0; i < NUM_SEG; i++ {
a := float64(i) / float64(NUM_SEG) * float64(math.Pi*2)
dir[i*2] = float32(math.Cos(a))
dir[i*2+1] = float32(math.Sin(a))
}
}
func DrawCylinder(minx, miny, minz, maxx, maxy, maxz float32, col uint32) {
Begin(gl.TRIANGLES, 1)
col2 := DuMultCol(col, 160)
cx := (maxx + minx) / 2
cz := (maxz + minz) / 2
rx := (maxx - minx) / 2
rz := (maxz - minz) / 2
for i := 2; i < NUM_SEG; i++ {
a := 0
b := i - 1
c := i
Vertex2(cx+dir[a*2+0]*rx, miny, cz+dir[a*2+1]*rz, col2)
Vertex2(cx+dir[b*2+0]*rx, miny, cz+dir[b*2+1]*rz, col2)
Vertex2(cx+dir[c*2+0]*rx, miny, cz+dir[c*2+1]*rz, col2)
}
for i := 2; i < NUM_SEG; i++ {
a := 0
b := i
c := i - 1
Vertex2(cx+dir[a*2+0]*rx, maxy, cz+dir[a*2+1]*rz, col)
Vertex2(cx+dir[b*2+0]*rx, maxy, cz+dir[b*2+1]*rz, col)
Vertex2(cx+dir[c*2+0]*rx, maxy, cz+dir[c*2+1]*rz, col)
}
j := NUM_SEG - 1
for i := 0; i < NUM_SEG; i++ {
Vertex2(cx+dir[i*2+0]*rx, miny, cz+dir[i*2+1]*rz, col2)
Vertex2(cx+dir[j*2+0]*rx, miny, cz+dir[j*2+1]*rz, col2)
Vertex2(cx+dir[j*2+0]*rx, maxy, cz+dir[j*2+1]*rz, col)
Vertex2(cx+dir[i*2+0]*rx, miny, cz+dir[i*2+1]*rz, col2)
Vertex2(cx+dir[j*2+0]*rx, maxy, cz+dir[j*2+1]*rz, col)
Vertex2(cx+dir[i*2+0]*rx, maxy, cz+dir[i*2+1]*rz, col)
j = i
}
End()
}
func DuRGBA(r, g, b, a uint32) uint32 {
return r | (g << 8) | (b << 16) | (a << 24)
}
func DuLerpCol(ca, cb, u uint32) uint32 {
ra := ca & 0xff
ga := (ca >> 8) & 0xff
ba := (ca >> 16) & 0xff
aa := (ca >> 24) & 0xff
rb := cb & 0xff
gb := (cb >> 8) & 0xff
bb := (cb >> 16) & 0xff
ab := (cb >> 24) & 0xff
r := (ra*(255-u) + rb*u) / 255
g := (ga*(255-u) + gb*u) / 255
b := (ba*(255-u) + bb*u) / 255
a := (aa*(255-u) + ab*u) / 255
return DuRGBA(r, g, b, a)
}
func Begin(prim uint32, size float32) {
if prim == gl.POINTS {
gl.PointSize(size)
} else if prim == gl.LINES {
gl.LineWidth(size)
}
gl.Begin(prim)
}
func End() {
gl.End()
gl.LineWidth(1.0)
gl.PointSize(1.0)
}
func Vertex(pos []float32, color uint32, uv [2]float32) {
// fmt.Println("pos:", pos, " uv:", uv)
gl.Color4ubv((*uint8)(unsafe.Pointer(&color)))
gl.TexCoord2fv((*float32)(unsafe.Pointer(&uv)))
gl.Vertex3fv((*float32)(unsafe.Pointer(&pos[0])))
}
func Vertex2(x, y, z float32, color uint32) {
gl.Color4ubv((*uint8)(unsafe.Pointer(&color)))
gl.Vertex3f(x, y, z)
}
func Texture(state bool) {
if state {
gl.Enable(gl.TEXTURE_2D)
g_tex.Bind()
} else {
gl.Disable(gl.TEXTURE_2D)
}
}
func isectSegAABB(sp, sq *[3]float64, amin, amax *[3]float32) (bool, float64, float64) {
EPS := 1e-6
var d = [3]float64{sq[0] - sp[0], sq[1] - sp[1], sq[2] - sp[2]}
var tmin, tmax float64 = 0, 1
for i := 0; i < 3; i++ {
if math.Abs(d[i]) < EPS {
if sp[i] < float64(amin[i]) || sp[i] > float64(amax[i]) {
return false, 0, 0
}
} else {
ood := 1.0 / d[i]
t1 := (float64(amin[i]) - sp[i]) * ood
t2 := (float64(amax[i]) - sp[i]) * ood
if t1 > t2 {
t1, t2 = t2, t1
}
if t1 > tmin {
tmin = t1
}
if t2 < tmax {
tmax = t2
}
if tmin > tmax {
return false, 0, 0
}
}
}
return true, tmin, tmax
}
func intersectSegmentTriangle(sp, sq *[3]float64, a, b, c []float32) (bool, float32) {
var v, w float32
var ab, ac, qp, ap, norm, e [3]float32
vSub(ab[0:], b, a)
vSub(ac[0:], c, a)
qp[0] = float32(sp[0] - sq[0])
qp[1] = float32(sp[1] - sq[1])
qp[2] = float32(sp[2] - sq[2])
vCross(norm[0:], ab[0:], ac[0:])
d := vDot(qp[0:], norm[0:])
if d <= 0 {
return false, 0
}
ap[0] = float32(sp[0]) - a[0]
ap[1] = float32(sp[1]) - a[1]
ap[2] = float32(sp[2]) - a[2]
t := vDot(ap[0:], norm[0:])
if t < 0 {
return false, 0
}
if t > d {
return false, 0
}
vCross(e[0:], qp[0:], ap[0:])
v = vDot(ac[0:], e[0:])
if v < 0 || v > d {
return false, 0
}
w = -vDot(ab[0:], e[0:])
if w < 0 || v+w > d {
return false, 0
}
t /= d
return true, t
}
func vSub(dest, a, b []float32) {
dest[0] = a[0] - b[0]
dest[1] = a[1] - b[1]
dest[2] = a[2] - b[2]
}
func vCross(dest, v1, v2 []float32) {
dest[0] = v1[1]*v2[2] - v1[2]*v2[1]
dest[1] = v1[2]*v2[0] - v1[0]*v2[2]
dest[2] = v1[0]*v2[1] - v1[1]*v2[0]
}
func vDot(v1, v2 []float32) float32 {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2]
}
func DuMultCol(col, d uint32) uint32 {
r := col & 0xff
g := (col >> 8) & 0xff
b := (col >> 16) & 0xff
a := (col >> 24) & 0xff
return DuRGBA((r*d)>>8, (g*d)>>8, (b*d)>>8, a)
} | demo/src/mgl/mgl.go | 0.520009 | 0.461138 | mgl.go | starcoder |
package fullerene
import (
"time"
)
type Fullerene struct {
t time.Time
}
func Now() Fullerene {
return Fullerene{
t: time.Now(),
}
}
func (fr Fullerene) Date() (year int, month time.Month, day int) {
return fr.t.Date()
}
func (fr Fullerene) After(u Fullerene) bool {
return fr.t.After(u.t)
}
func (fr Fullerene) Before(u Fullerene) bool {
return fr.t.Before(u.t)
}
func (fr Fullerene) Equal(u Fullerene) bool {
return fr.t.Equal(u.t)
}
func (fr Fullerene) IsZero() bool {
return fr.t.IsZero()
}
func Date(year int, month time.Month, day, hour, min, sec, nsec int, loc *time.Location) Fullerene {
return Fullerene{t: time.Date(year, month, day, hour, min, sec, nsec, loc)}
}
func (fr Fullerene) Year() int {
return fr.t.Year()
}
func (fr Fullerene) Month() time.Month {
return fr.t.Month()
}
func (fr Fullerene) Day() int {
return fr.t.Day()
}
func (fr Fullerene) AddDate(years int, months int, days int) Fullerene {
return Fullerene{t: fr.t.AddDate(years, months, days)}
}
func (fr Fullerene) IsLeapYear() bool {
y := fr.Year()
return (y%4 == 0 && (y%100 != 0 || y%400 == 0))
}
func (fr Fullerene) IsLeapDay() bool {
_, m, d := fr.Date()
return (m == 2 && d == 29)
}
func (fr Fullerene) IsBirthday(targetTime Fullerene, beforeDayIfLeap bool) bool {
_, m, d := fr.Date() // birthday
_, mm, dd := targetTime.Date() // check if it is birthday.
if m == mm && d == dd && !fr.IsLeapDay() {
// consider leap year.
return true
}
// there are countries which a person get old at the day before leap day, and the day after in a leap year.
return fr.isBirthdayEx(targetTime, beforeDayIfLeap)
}
func (fr Fullerene) isBirthdayEx(targetTime Fullerene, beforeDayIfLeap bool) bool {
_, m, d := targetTime.Date()
if targetTime.IsLeapYear() {
return false
}
if beforeDayIfLeap && m == 2 && d == 28 {
return true
}
if !beforeDayIfLeap && m == 3 && d == 1 {
return true
}
return false
}
func (fr Fullerene) Age(targetTime Fullerene) int {
y, m, d := targetTime.Date()
age := y - fr.Year()
if m < fr.Month() {
return age - 1
}
if m > fr.Month() {
return age
}
// month of targetTime is equal to birthday.
if d <= fr.Day() {
return age
}
if d > fr.Day() {
return age - 1
}
return age
}
func (fr Fullerene) CurrentAge() int {
return fr.Age(Now())
}
func (fr Fullerene) IsHoliday() bool {
if fr.t.Location() == loc && fr.IsJapanesePublicHoliday() {
return true
}
switch fr.t.Weekday() {
case time.Sunday, time.Saturday:
return true
default:
return false
}
}
func (fr Fullerene) IsJapanesePublicHoliday() bool {
y1, m1, d1 := fr.Date()
for _, d := range JapanesePublicHolidays {
y2, m2, d2 := d.Date()
if y1 == y2 && m1 == m2 && d1 == d2 {
return true
}
}
return false
}
func (fr Fullerene) IsWeekday() bool {
return !fr.IsHoliday()
}
func (fr *Fullerene) String() string {
return fr.t.String()
}
func (fr *Fullerene) Format(layout string) string {
return fr.t.Format(layout)
} | fullerene.go | 0.675122 | 0.58818 | fullerene.go | starcoder |
package learnML
import (
"../matrix"
)
type LayerType int
type Dims []int
const (
LayerLinear LayerType = iota
LayerTanh
LayerConv
LayerLeakyRectifier
LayerMaxPooling2D
LayerComposite
LayerSinusoidal
)
type Layer interface {
Activate(x *matrix.Vector) *matrix.Vector
BackProp(prevBlame *matrix.Vector)
init(dim Dims, dims ...Dims)
OutDim() Dims
Wrap(activation, blame matrix.Vector, weight ...matrix.Vector) Layer
Name() string
UpdateGradient(in, gradient *matrix.Vector)
Activation() *matrix.Vector
Blame() *matrix.Vector
Weight() *matrix.Vector
}
func NewLayer(t LayerType, dim Dims, dims ...Dims) Layer {
var l Layer
switch t {
case LayerTanh:
l = &layerTanh{}
case LayerLeakyRectifier:
l = &layerLeakyRectifier{}
case LayerLinear:
l = &layerLinear{}
case LayerConv:
l = &layerConv{}
case LayerMaxPooling2D:
l = &layerMaxPooling2D{}
case LayerSinusoidal:
l = &layerSinusoidal{}
default:
panic("Unsupported layer type!!!")
}
l.init(dim, dims...)
return l
}
// layer implements identity activation function.
type layer struct {
activation matrix.Vector
blame matrix.Vector
weight matrix.Vector
}
func (l *layer) OutDim() Dims {
return Dims{len(l.activation)}
}
// dim = [out, inDim, innerDim]. Every layer must have an output
// dimension. layerLinear must have an input dimension.
func (l *layer) init(dim Dims, dims ...Dims) {
l.activation = make(matrix.Vector, dim[0])
l.blame = make(matrix.Vector, dim[0])
l.weight = make(matrix.Vector, 0)
}
// layer.Activate returns the input.
func (l *layer) Activate(x *matrix.Vector) *matrix.Vector {
panic("layer: Activate: not implemented!")
}
func (l *layer) BackProp(prevBlame *matrix.Vector) {
panic("layer: BackProp: not implemented!")
}
// Wrap wraps a Layer around an activation Vector.
func (l *layer) Wrap(activation, blame matrix.Vector, weight ...matrix.Vector) Layer {
panic("layer: Wrap: not implemented!")
}
func (l *layer) Name() string {
panic("layer: Name: not implemented!")
}
func (l *layer) UpdateGradient(in, gradient *matrix.Vector) {}
func (l *layer) Activation() *matrix.Vector {
return &(l.activation)
}
func (l *layer) Blame() *matrix.Vector {
return &(l.blame)
}
func (l *layer) Weight() *matrix.Vector {
return &(l.weight)
} | goml/learnML/layer.go | 0.723212 | 0.553505 | layer.go | starcoder |
package main
import (
"github.com/ByteArena/box2d"
"github.com/wdevore/Ranger-Go-IGE/api"
"github.com/wdevore/Ranger-Go-IGE/extras/shapes"
)
type landPhysicsComponent struct {
physicsComponent
categoryBits uint16 // I am a...
maskBits uint16 // I can collide with a...
}
func newLandPhysicsComponent() *landPhysicsComponent {
o := new(landPhysicsComponent)
return o
}
func (p *landPhysicsComponent) Build(phyWorld *box2d.B2World, parent api.INode, position api.IPoint) {
p.position = position
var err error
err = p.buildPolygon(parent.World(), parent)
if err != nil {
panic(err)
}
// A body def used to create bodies
bDef := box2d.MakeB2BodyDef()
bDef.Type = box2d.B2BodyType.B2_staticBody
px := p.phyNode.Position().X()
py := p.phyNode.Position().Y()
bDef.Position.Set(
float64(px),
float64(py),
)
// An instance of a body to contain Fixtures
p.b2Body = phyWorld.CreateBody(&bDef)
// Every Fixture has a shape
b2ChainShape := box2d.MakeB2ChainShape()
vertices := []box2d.B2Vec2{}
gla := p.phyNode.(*shapes.MonoPolygonNode)
verts := gla.Vertices()
scale := p.phyNode.Scale()
for i := 0; i < len(*verts); i += api.XYZComponentCount {
vertices = append(vertices, box2d.B2Vec2{X: float64((*verts)[i] * scale), Y: float64((*verts)[i+1] * scale)})
}
b2ChainShape.CreateChain(vertices, len(vertices))
fd := box2d.MakeB2FixtureDef()
fd.Shape = &b2ChainShape
// fd.UserData = b.land
fd.Filter.CategoryBits = p.categoryBits
fd.Filter.MaskBits = p.maskBits
p.b2Body.CreateFixtureFromDef(&fd) // attach Fixture to body
}
func (p *landPhysicsComponent) ConfigureFilter(categoryBits, maskBits uint16) {
p.categoryBits = categoryBits
p.maskBits = maskBits
}
func (p *landPhysicsComponent) buildPolygon(world api.IWorld, parent api.INode) error {
var err error
vertices := []float32{
-30.0, 2.5, 0.0,
-25.0, 2.5, 0.0,
-20.0, 7.5, 0.0,
-10.0, 7.5, 0.0,
-10.0, 5.5, 0.0,
-5.0, 2.5, 0.0,
1.0, 2.5, 0.0,
1.5, 1.0, 0.0,
7.0, 1.0, 0.0,
7.5, 0.0, 0.0,
10.5, 0.0, 0.0,
11.0, 2.0, 0.0,
11.0, 5.0, 0.0,
15.0, 10.0, 0.0,
20.0, 10.0, 0.0,
}
indices := []uint32{
0, 1, 2, 3, 4,
5, 6, 7, 8, 9,
10, 11, 12, 13, 14,
}
// --------------------------------------------------------------
p.phyNode, err = shapes.NewMonoPolygonNode("Land", &vertices, &indices, api.OPENOUTLINED, world, parent)
if err != nil {
return err
}
p.phyNode.SetScale(2.0)
p.phyNode.SetPosition(p.position.X(), p.position.Y())
return nil
} | examples/complex/physics/complex/c4_lava/land_physics_component.go | 0.568176 | 0.475544 | land_physics_component.go | starcoder |
package state
// batched_storage.go - stores arbitrary data for given key prefix, batching it in a way
// that no single value in db is larger than specified `batchSize` in bytes.
// data is sequence of records of similar size, batchedStorage also provides iterators
// to move through records, in range from most recent to oldest.
import (
"encoding/binary"
"math"
"sync"
"github.com/pkg/errors"
"github.com/wavesplatform/gowaves/pkg/keyvalue"
)
const (
firstBatchNum = 0
blockNumLen = 4 // 4 bytes for block number.
)
type record struct {
blockNum uint32
data []byte
}
func newRecordFromBytes(data []byte) (*record, error) {
if len(data) < blockNumLen {
return nil, errInvalidDataSize
}
blockNum := binary.BigEndian.Uint32(data[:blockNumLen])
return &record{blockNum: blockNum, data: data[blockNumLen:]}, nil
}
func (r *record) marshalBinary() []byte {
buf := make([]byte, blockNumLen+len(r.data))
binary.BigEndian.PutUint32(buf, r.blockNum)
copy(buf[blockNumLen:], r.data)
return buf
}
func (r *record) recordBytes() []byte {
return r.data
}
type recordIterator struct {
iter *batchIterator
batch []byte
recordSize int
err error
}
func newRecordIterator(iter *batchIterator, recordSize int) *recordIterator {
return &recordIterator{iter: iter, recordSize: recordSize}
}
func (i *recordIterator) loadNextBatch() bool {
for {
if !i.iter.next() {
return false
}
batch, err := i.iter.currentBatch()
if err != nil {
i.err = err
return false
}
if len(batch) == 0 {
// We need to find first not empty batch.
continue
}
i.batch = batch
return true
}
}
func (i *recordIterator) next() bool {
size := i.recordSize
if len(i.batch) > size {
i.batch = i.batch[:len(i.batch)-size]
return true
}
return i.loadNextBatch()
}
func (i *recordIterator) currentRecord() ([]byte, error) {
size := int(i.recordSize)
if len(i.batch) < size {
return nil, errInvalidDataSize
}
recordBytes := i.batch[len(i.batch)-size:]
r, err := newRecordFromBytes(recordBytes)
if err != nil {
i.err = err
return nil, err
}
return r.recordBytes(), nil
}
func (i *recordIterator) error() error {
if err := i.iter.error(); err != nil {
return err
}
return i.err
}
func (i *recordIterator) release() {
i.batch = nil
i.iter.release()
}
type batchIterator struct {
stor *batchedStorage
iter keyvalue.Iterator
used bool
}
func newBatchIterator(stor *batchedStorage, iter keyvalue.Iterator) *batchIterator {
return &batchIterator{stor, iter, false}
}
func (i *batchIterator) next() bool {
if i.used {
return i.iter.Prev()
}
i.used = true
return i.iter.Last()
}
func (i *batchIterator) currentBatch() ([]byte, error) {
val := keyvalue.SafeValue(i.iter)
return i.stor.normalize(val)
}
func (i *batchIterator) error() error {
return i.iter.Error()
}
func (i *batchIterator) release() {
i.iter.Release()
}
type batch struct {
pos int
data []byte
num uint32
recordSize int
}
func newBatchWithData(data []byte, maxSize, recordSize int, batchNum uint32) (*batch, error) {
if len(data) > int(maxSize) {
return nil, errInvalidDataSize
}
b := &batch{pos: len(data), num: batchNum, recordSize: recordSize}
b.data = make([]byte, maxSize)
copy(b.data, data)
return b, nil
}
func newBatch(maxSize, recordSize int, batchNum uint32) *batch {
return &batch{pos: 0, data: make([]byte, maxSize), num: batchNum, recordSize: recordSize}
}
func (b *batch) canAddRecord(record []byte) bool {
return b.pos+len(record) <= len(b.data)
}
func (b *batch) addRecord(record []byte) {
copy(b.data[b.pos:], record)
b.pos += len(record)
}
func (b *batch) bytes() []byte {
return b.data[:b.pos]
}
func (b *batch) lastRecord() (*record, error) {
if b.pos < b.recordSize {
return nil, errors.New("batch is too small")
}
recordBytes := b.data[b.pos-b.recordSize : b.pos]
record, err := newRecordFromBytes(recordBytes)
if err != nil {
return nil, err
}
return record, nil
}
type batchesGroup struct {
maxBatchSize int
recordSize int
batches []*batch
}
func newBatchesGroup(maxBatchSize, recordSize int) (*batchesGroup, error) {
if recordSize > maxBatchSize {
return nil, errors.New("recordSize is greater than maxBatchSize")
}
return &batchesGroup{
maxBatchSize: maxBatchSize,
recordSize: recordSize,
}, nil
}
func (bg *batchesGroup) initFirstBatch(first *batch) {
bg.batches = make([]*batch, 1)
bg.batches[0] = first
}
func (bg *batchesGroup) initFirstBatchEmpty() {
bg.batches = make([]*batch, 1)
bg.batches[0] = newBatch(bg.maxBatchSize, bg.recordSize, firstBatchNum)
}
func (bg *batchesGroup) appendNewRecord(record []byte) error {
if len(record) != bg.recordSize {
// Sanity check.
return errInvalidDataSize
}
if len(bg.batches) == 0 {
bg.initFirstBatchEmpty()
}
lastBatch := bg.batches[len(bg.batches)-1]
if lastBatch.canAddRecord(record) {
lastBatch.addRecord(record)
return nil
}
if lastBatch.num == math.MaxUint32 {
// Sanity check to prevent overflow.
return errors.New("too many batches, can't add new!")
}
nextBatchNum := lastBatch.num + 1
newBatch := newBatch(bg.maxBatchSize, bg.recordSize, nextBatchNum)
newBatch.addRecord(record)
bg.batches = append(bg.batches, newBatch)
return nil
}
func (bg *batchesGroup) lastRecord() (*record, error) {
if len(bg.batches) == 0 {
return nil, errors.New("no batches")
}
lastBatch := bg.batches[len(bg.batches)-1]
return lastBatch.lastRecord()
}
type batchedStorParams struct {
maxBatchSize, recordSize int
prefix byte
}
type batchedStorage struct {
db keyvalue.IterableKeyVal
dbBatch keyvalue.Batch
writeLock *sync.Mutex
stateDB *stateDB
params *batchedStorParams
localStor map[string]*batchesGroup
memSize int // Total size (in bytes) of what was added.
memLimit int // When memSize >= memLimit, we should flush().
maxKeys int
}
func newBatchedStorage(
db keyvalue.IterableKeyVal,
stateDB *stateDB,
params *batchedStorParams,
memLimit int,
maxKeys int,
) (*batchedStorage, error) {
// Actual record size is greater by blockNumLen.
params.recordSize += blockNumLen
dbBatch, err := db.NewBatch()
if err != nil {
return nil, errors.Wrap(err, "failed to create db batch")
}
return &batchedStorage{
db: db,
dbBatch: dbBatch,
writeLock: stateDB.retrieveWriteLock(),
stateDB: stateDB,
params: params,
localStor: make(map[string]*batchesGroup),
memSize: 0,
memLimit: memLimit,
maxKeys: maxKeys,
}, nil
}
func (s *batchedStorage) lastRecordByKey(key []byte, filter bool) ([]byte, error) {
last, err := s.readLastBatch(key, filter)
if err != nil {
return nil, err
}
record, err := last.lastRecord()
if err != nil {
return nil, err
}
return record.recordBytes(), nil
}
func (s *batchedStorage) newestLastRecordByKey(key []byte, filter bool) ([]byte, error) {
bg, ok := s.localStor[string(key)]
if !ok {
return s.lastRecordByKey(key, filter)
}
record, err := bg.lastRecord()
if err != nil {
return nil, err
}
return record.recordBytes(), nil
}
func (s *batchedStorage) newBatchGroupForKey(key []byte, filter bool) (*batchesGroup, error) {
bg, err := newBatchesGroup(s.params.maxBatchSize, s.params.recordSize)
if err != nil {
return nil, err
}
last, err := s.readLastBatch(key, filter)
if err == errNotFound {
return bg, nil
} else if err != nil {
return nil, err
}
bg.initFirstBatch(last)
return bg, nil
}
func (s *batchedStorage) addRecordBytes(key, record []byte, filter bool) error {
keyStr := string(key)
bg, ok := s.localStor[keyStr]
if ok {
if err := bg.appendNewRecord(record); err != nil {
return err
}
s.memSize += len(record)
} else {
newGroup, err := s.newBatchGroupForKey(key, filter)
if err != nil {
return err
}
if err := newGroup.appendNewRecord(record); err != nil {
return err
}
s.localStor[keyStr] = newGroup
s.memSize += len(key) + len(record)
}
if s.memSize >= s.memLimit || len(s.localStor) >= s.maxKeys {
if err := s.flush(); err != nil {
return err
}
s.reset()
}
return nil
}
// Appends one more record (at the end) for specified key.
func (s *batchedStorage) addRecord(key []byte, data []byte, blockNum uint32, filter bool) error {
r := &record{data: data, blockNum: blockNum}
recordBytes := r.marshalBinary()
return s.addRecordBytes(key, recordBytes, filter)
}
func (s *batchedStorage) batchByNum(key []byte, num uint32) (*batch, error) {
batchKey := batchedStorKey{prefix: s.params.prefix, internalKey: key, batchNum: num}
batch, err := s.db.Get(batchKey.bytes())
if err != nil {
return nil, err
}
return newBatchWithData(batch, s.params.maxBatchSize, s.params.recordSize, num)
}
func (s *batchedStorage) moveLastBatchPointer(key []byte, lastNum uint32) error {
if lastNum == firstBatchNum {
if err := s.removeLastBatchNum(key); err != nil {
return errors.Wrap(err, "failed to remove last batch num")
}
} else {
if err := s.saveLastBatchNumDirectly(key, lastNum-1); err != nil {
return errors.Wrap(err, "failed to save batch num to db")
}
}
return nil
}
func (s *batchedStorage) handleEmptyBatch(key []byte, batchNum uint32) error {
if err := s.moveLastBatchPointer(key, batchNum); err != nil {
return errors.Wrap(err, "failed to update last batch num")
}
if err := s.removeBatchByNum(key, batchNum); err != nil {
return errors.Wrap(err, "failed to remove batch by num")
}
return nil
}
func (s *batchedStorage) normalizeBatches(key []byte) error {
// Lock the write lock.
// Normalized batches will be written back to database, so we need to make sure
// we read and write them under the same lock.
s.writeLock.Lock()
defer s.writeLock.Unlock()
lastBatchNum, err := s.readLastBatchNum(key)
if err != nil {
// Nothing to normalize for this key.
return nil
}
batchNum := lastBatchNum
for {
// Iterate until we find first non-empty (after filtering) batch.
batchKey := batchedStorKey{prefix: s.params.prefix, internalKey: key, batchNum: batchNum}
batch, err := s.db.Get(batchKey.bytes())
if err != nil {
return errors.Wrap(err, "failed to get batch by key")
}
newBatch, err := s.newestNormalize(batch)
if err != nil {
return errors.Wrap(err, "failed to normalize batch")
}
batchChanged := len(newBatch) != len(batch)
if batchChanged {
// Write normalized version of batch to database.
if err := s.writeBatchDirectly(key, newBatch); err != nil {
return errors.Wrap(err, "failed to write batch")
}
}
if len(newBatch) == 0 {
// Batch is empty.
if err := s.handleEmptyBatch(key, batchNum); err != nil {
return errors.Wrap(err, "failed to handle empty batch")
}
if batchNum == firstBatchNum {
return nil
}
batchNum--
continue
}
return nil
}
}
func (s *batchedStorage) readLastBatch(key []byte, filter bool) (*batch, error) {
if filter {
if err := s.normalizeBatches(key); err != nil {
return nil, errors.Wrap(err, "failed to normalize")
}
}
lastBatchNum, err := s.readLastBatchNum(key)
if err != nil {
return nil, errNotFound
}
return s.batchByNum(key, lastBatchNum)
}
// newBackwardRecordIterator() returns backward iterator for iterating single records.
func (s *batchedStorage) newBackwardRecordIterator(key []byte) (*recordIterator, error) {
k := batchedStorKey{prefix: s.params.prefix, internalKey: key}
rawIter, err := s.db.NewKeyIterator(k.prefixUntilBatch())
if err != nil {
return nil, err
}
batchIter := newBatchIterator(s, rawIter)
return newRecordIterator(batchIter, s.params.recordSize), nil
}
type blockValidationFunc func(blockNum uint32) (bool, error)
func (s *batchedStorage) normalizeCommon(batch []byte, isValidBlock blockValidationFunc) ([]byte, error) {
size := s.params.recordSize
if (len(batch) % size) != 0 {
return nil, errInvalidDataSize
}
for i := len(batch); i >= size; i -= size {
recordBytes := batch[i-size : i]
record, err := newRecordFromBytes(recordBytes)
if err != nil {
return nil, err
}
isValid, err := isValidBlock(record.blockNum)
if err != nil {
return nil, err
}
if isValid {
break
}
batch = batch[:i-size]
}
return batch, nil
}
func (s *batchedStorage) normalize(batch []byte) ([]byte, error) {
return s.normalizeCommon(batch, s.stateDB.isValidBlock)
}
func (s *batchedStorage) newestNormalize(batch []byte) ([]byte, error) {
return s.normalizeCommon(batch, s.stateDB.newestIsValidBlock)
}
func (s *batchedStorage) removeBatchByNum(key []byte, num uint32) error {
batchKey := batchedStorKey{prefix: s.params.prefix, internalKey: key, batchNum: num}
if err := s.db.Delete(batchKey.bytes()); err != nil {
return errors.Wrap(err, "failed to delete batch")
}
return nil
}
func (s *batchedStorage) removeLastBatchNum(key []byte) error {
numKey := lastBatchKey{prefix: s.params.prefix, internalKey: key}
if err := s.db.Delete(numKey.bytes()); err != nil {
return errors.Wrap(err, "failed to delete last batch num")
}
return nil
}
func (s *batchedStorage) writeBatchDirectly(key, batch []byte) error {
return s.db.Put(key, batch)
}
func (s *batchedStorage) saveLastBatchNumDirectly(key []byte, num uint32) error {
k := lastBatchKey{prefix: s.params.prefix, internalKey: key}
numBytes := make([]byte, 4)
binary.BigEndian.PutUint32(numBytes, num)
return s.db.Put(k.bytes(), numBytes)
}
func (s *batchedStorage) saveLastBatchNum(key []byte, num uint32) {
k := lastBatchKey{prefix: s.params.prefix, internalKey: key}
numBytes := make([]byte, 4)
binary.BigEndian.PutUint32(numBytes, num)
s.dbBatch.Put(k.bytes(), numBytes)
}
func (s *batchedStorage) readLastBatchNum(key []byte) (uint32, error) {
k := lastBatchKey{prefix: s.params.prefix, internalKey: key}
numBytes, err := s.db.Get(k.bytes())
if err != nil {
return 0, err
}
return binary.BigEndian.Uint32(numBytes), nil
}
func (s *batchedStorage) writeBatchGroup(key []byte, bg *batchesGroup) {
k := batchedStorKey{prefix: s.params.prefix, internalKey: key}
lastBatchNum := uint32(0)
for _, batch := range bg.batches {
lastBatchNum = batch.num
k.batchNum = batch.num
s.dbBatch.Put(k.bytes(), batch.bytes())
}
s.saveLastBatchNum(key, lastBatchNum)
}
func (s *batchedStorage) reset() {
s.localStor = make(map[string]*batchesGroup)
s.memSize = 0
s.dbBatch.Reset()
}
func (s *batchedStorage) flush() error {
for key, bg := range s.localStor {
s.writeBatchGroup([]byte(key), bg)
}
s.writeLock.Lock()
defer s.writeLock.Unlock()
if err := s.db.Flush(s.dbBatch); err != nil {
return err
}
return nil
} | pkg/state/batched_storage.go | 0.729809 | 0.465145 | batched_storage.go | starcoder |
package parser
import (
"io/ioutil"
"log"
"net/http"
"strings"
)
// Question ,
type Question struct {
Number int `json:"number"`
Statement string `json:"statement"`
Code string `json:"code"`
Options map[string]string `json:"options"`
CorrectAnswer string `json:"correct_answer"`
Explanation string `json:"explanation"`
}
// GetQuestions returns a []Question with all the questions in it
func GetQuestions() []Question {
data := getData()
return parseGlob(data)
}
// getData will fetch data from the readme file and return it as a string
func getData() string {
response, err := http.Get("https://raw.githack.com/lydiahallie/javascript-questions/master/en-EN/README.md")
if err != nil {
log.Fatal(err)
}
readmeContent, _ := ioutil.ReadAll(response.Body)
return string(readmeContent)
}
// parseGlob parses data accordingly using the helper functions
func parseGlob(data string) []Question {
dataStringArray := strings.Split(data, "---")
dataStringArray = dataStringArray[1:]
var questions []Question
for questionNumber, questionBlock := range dataStringArray {
var q Question
q.Number = questionNumber + 1
q.Statement = getQuestionStatement(questionBlock)
q.Code = getQuestionCode(questionBlock)
q.Options = getQuestionOptions(questionBlock)
q.CorrectAnswer = getQuestionCorrectAnswer(questionBlock)
q.Explanation = getQuestionExplanation(questionBlock)
questions = append(questions, q)
}
return questions
}
// Helper Functions
func getQuestionStatement(data string) string {
n := 0
for i, line := range strings.Split(data, "\n") {
if strings.Contains(line, "######") {
n = i
break
}
}
statement := strings.Split(data, "\n")[n]
statement = strings.Split(statement, ".")[1]
statement = strings.TrimSpace(statement)
return statement
}
func getQuestionCode(data string) string {
code := ""
if strings.Contains(data, "```javascript") {
code = strings.Split(data, "```javascript")[1]
code = strings.Split(code, "```")[0]
}
return code
}
func getQuestionOptions(data string) map[string]string {
finalOptionsMap := make(map[string]string)
optionsArr := strings.Split(strings.Split(data, "<details>")[0], "\n")
for _, o := range optionsArr {
if len(o) != 0 && string(o[0]) == "-" {
optionAlphabet := strings.Split(strings.Split(o, ":")[0], " ")[1]
optionText := strings.TrimSpace(o[4:])
finalOptionsMap[optionAlphabet] = optionText
}
}
return finalOptionsMap
}
func getQuestionCorrectAnswer(data string) string {
correctAnswer := strings.Split(data, "<p>")[1]
correctAnswer = strings.Split(correctAnswer, "</p>")[0]
correctAnswer = strings.Split(correctAnswer, "Answer: ")[1]
correctAnswer = string(correctAnswer[0])
return correctAnswer
}
func getQuestionExplanation(data string) string {
explanation := strings.Split(data, "<p>")[1]
explanation = strings.Split(explanation, "</p>")[0]
return explanation
} | backend/parser/parser.go | 0.665737 | 0.441914 | parser.go | starcoder |
package functions
import (
"fmt"
"github.com/aokoli/goutils"
"github.com/huandu/xstrings"
"strings"
)
var FuncAbbrev = Function{
Description: `Abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "Now is the time for..."`,
Parameters: Parameters{{
Name: "width",
Description: "Maximum length of result string, must be at least 4",
}, {
Name: "in",
}},
}.MustWithFunc(func(width int, in string) (string, error) {
if width < 4 {
return in, nil
}
return goutils.Abbreviate(in, width)
})
var FuncAbbrevFull = Function{
Description: `AbbreviateFull abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "...is the time for..."
This function works like Abbreviate(string, int), but allows you to specify a "left edge" offset. Note that this left edge is not
necessarily going to be the leftmost character in the result, or the first character following the ellipses, but it will appear
somewhere in the result.
In no case will it return a string of length greater than maxWidth.`,
Parameters: Parameters{{
Name: "offset",
Description: "Left edge of source string",
}, {
Name: "width",
Description: "Maximum length of result string, must be at least 4",
}, {
Name: "in",
}},
}.MustWithFunc(func(offset, width int, in string) (string, error) {
if width < 4 || offset > 0 && width < 7 {
return in, nil
}
return goutils.AbbreviateFull(in, offset, width)
})
var FuncUpper = Function{
Parameters: Parameters{{
Name: "in",
}},
Returns: Return{
Description: `A copy of the string s with all Unicode letters mapped to their upper case.`,
},
}.MustWithFunc(strings.ToUpper)
var FuncLower = Function{
Parameters: Parameters{{
Name: "in",
}},
Returns: Return{
Description: `A copy of the string s with all Unicode letters mapped to their lower case.`,
},
}.MustWithFunc(strings.ToLower)
var FuncTrim = Function{
Description: "Removes space from either side of a string",
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(strings.TrimSpace)
var FuncTrimAll = Function{
Description: "Remove given characters from the front or back of a string.",
Parameters: Parameters{{
Name: "toRemove",
}, {
Name: "in",
}},
}.MustWithFunc(func(toRemove string, in string) string {
return strings.Trim(in, toRemove)
})
var FuncTrimSuffix = Function{
Description: "Remove given characters from the back of a string.",
Parameters: Parameters{{
Name: "toRemove",
}, {
Name: "in",
}},
}.MustWithFunc(func(toRemove string, in string) string {
return strings.TrimSuffix(in, toRemove)
})
var FuncCapitalize = Function{
Description: `Capitalize capitalizes all the delimiter separated words in a string. Only the first letter of each word is changed.
To convert the rest of each word to lowercase at the same time, use CapitalizeFully(str string, delimiters ...rune).
The delimiters represent a set of characters understood to separate words. The first string character
and the first non-delimiter character after a delimiter will be capitalized. A "" input string returns "".
Capitalization uses the Unicode title case, normally equivalent to upper case.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(in string) string {
return goutils.Capitalize(in)
})
var FuncUncapitalize = Function{
Description: `Uncapitalize uncapitalizes all the whitespace separated words in a string. Only the first letter of each word is changed.
The delimiters represent a set of characters understood to separate words. The first string character and the first non-delimiter
character after a delimiter will be uncapitalized. Whitespace is defined by unicode.IsSpace(char).`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(in string) string {
return goutils.Uncapitalize(in)
})
var FuncReplace = Function{
Description: "Replaces the given <old> string with the <new> string.",
Parameters: Parameters{{
Name: "old",
}, {
Name: "new",
}, {
Name: "in",
}},
}.MustWithFunc(func(old string, n string, in string) string {
return strings.Replace(in, old, n, -1)
})
var FuncRepeat = Function{
Description: "Repeat a string multiple times.",
Parameters: Parameters{{
Name: "count",
}, {
Name: "in",
}},
}.MustWithFunc(func(count int, in string) string {
return strings.Repeat(in, count)
})
var FuncSubstr = Function{
Description: "Get a substring from a string.",
Parameters: Parameters{{
Name: "start",
}, {
Name: "length",
}, {
Name: "in",
}},
}.MustWithFunc(func(start int, length int, in string) string {
if start < 0 {
return in[:length]
}
if length < 0 {
return in[start:]
}
return in[start:length]
})
var FuncTrunc = Function{
Description: "Truncate a string (and add no suffix).",
Parameters: Parameters{{
Name: "length",
}, {
Name: "in",
}},
}.MustWithFunc(func(length int, in string) string {
if len(in) <= length {
return in
}
return in[0:length]
})
var FuncInitials = Function{
Description: "Given multiple words, take the first letter of each word and combine.",
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(in string) string {
return goutils.Initials(in)
})
var FuncRandAlphaNum = Function{
Description: "These four functions generate random strings, but with different base character sets of [0-9A-Za-z].",
Parameters: Parameters{{
Name: "count",
}},
}.MustWithFunc(func(count int) (string, error) {
return goutils.RandomAlphaNumeric(count)
})
var FuncRandAlpha = Function{
Description: "These four functions generate random strings, but with different base character sets of [A-Za-z].",
Parameters: Parameters{{
Name: "count",
}},
}.MustWithFunc(func(count int) (string, error) {
return goutils.RandomAlphabetic(count)
})
var FuncRandNum = Function{
Description: "These four functions generate random strings, but with different base character sets of [0-9].",
Parameters: Parameters{{
Name: "count",
}},
}.MustWithFunc(func(count int) (string, error) {
return goutils.RandomNumeric(count)
})
var FuncWarp = Function{
Description: `Wrap wraps a single line of text, identifying words by ' '.
New lines will be separated by '\n'. Very int64 words, such as URLs will not be wrapped.
Leading spaces on a new line are stripped. Trailing spaces are not stripped.`,
Parameters: Parameters{{
Name: "length",
}, {
Name: "in",
}},
}.MustWithFunc(func(length int, in string) string {
return goutils.Wrap(in, length)
})
var FuncWarpCustom = Function{
Description: `WrapCustom wraps a single line of text, identifying words by ' '.
Leading spaces on a new line are stripped. Trailing spaces are not stripped.`,
Parameters: Parameters{{
Name: "length",
}, {
Name: "newLine",
}, {
Name: "wrapInt64Words",
}, {
Name: "in",
}},
}.MustWithFunc(func(length int, newLine string, wrapInt64Words bool, in string) string {
return goutils.WrapCustom(in, length, newLine, wrapInt64Words)
})
var FuncHasPrefix = Function{
Description: `Test whether a string has a given prefix.`,
Parameters: Parameters{{
Name: "toSearchFor",
}, {
Name: "in",
}},
}.MustWithFunc(func(toSearchFor string, in string) bool {
return strings.HasPrefix(in, toSearchFor)
})
var FuncHasSuffix = Function{
Description: `Test whether a string has a given suffix.`,
Parameters: Parameters{{
Name: "toSearchFor",
}, {
Name: "in",
}},
}.MustWithFunc(func(toSearchFor string, in string) bool {
return strings.HasSuffix(in, toSearchFor)
})
var FuncQuote = Function{
Description: `Wrap a string in float64 quotes.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(str ...interface{}) string {
out := make([]string, len(str))
for i, s := range str {
out[i] = fmt.Sprintf("%q", strval(s))
}
return strings.Join(out, " ")
})
var FuncSQuote = Function{
Description: `Wrap a string in single quotes.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(str ...interface{}) string {
out := make([]string, len(str))
for i, s := range str {
out[i] = fmt.Sprintf("'%v'", s)
}
return strings.Join(out, " ")
})
var FuncCat = Function{
Description: `Concatenates multiple strings together into one, separating them with spaces.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(func(v ...interface{}) string {
r := strings.TrimSpace(strings.Repeat("%v ", len(v)))
return fmt.Sprintf(r, v...)
})
var FuncIndent = Function{
Description: `Indents every line in a given string to the specified indent width. This is useful when aligning multi-line strings.`,
Parameters: Parameters{{
Name: "indent",
}, {
Name: "str",
}},
}.MustWithFunc(func(indent int, str string) string {
pad := strings.Repeat(" ", indent)
return pad + strings.Replace(str, "\n", "\n"+pad, -1)
})
var FuncSnakeCase = Function{
Description: `convert all upper case characters in a string to snake case format.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(xstrings.ToSnakeCase)
var FuncCamelCase = Function{
Description: `Convert all lower case characters behind underscores to upper case character.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(xstrings.ToCamelCase)
var FuncKebabCase = Function{
Description: `Convert all upper case characters in a string to kebab case format.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(xstrings.ToKebabCase)
var FuncShuffle = Function{
Description: `Shuffle randomizes runes in a string and returns the result.`,
Parameters: Parameters{{
Name: "in",
}},
}.MustWithFunc(xstrings.Shuffle)
var FuncsStrings = Functions{
"abbrev": FuncAbbrev,
"abbrevFull": FuncAbbrevFull,
"upper": FuncUpper,
"lower": FuncLower,
"trim": FuncTrim,
"trimAll": FuncTrimAll,
"trimSuffix": FuncTrimSuffix,
"capitalize": FuncCapitalize,
"uncapitalize": FuncUncapitalize,
"replace": FuncReplace,
"repeat": FuncRepeat,
"substr": FuncSubstr,
"trunc": FuncTrunc,
"initials": FuncInitials,
"randAlphaNum": FuncRandAlphaNum,
"randAlpha": FuncRandAlpha,
"randNum": FuncRandNum,
"warp": FuncWarp,
"warpCustom": FuncWarpCustom,
"hasPrefix": FuncHasPrefix,
"hasSuffix": FuncHasSuffix,
"quote": FuncQuote,
"sQuote": FuncSQuote,
"cat": FuncCat,
"indent": FuncIndent,
"snakeCase": FuncSnakeCase,
"camelCase": FuncCamelCase,
"kebabCase": FuncKebabCase,
"shuffle": FuncShuffle,
}
var CategoryStrings = Category{
Functions: FuncsStrings,
}
func strval(v interface{}) string {
switch v := v.(type) {
case string:
return v
case []byte:
return string(v)
case error:
return v.Error()
case fmt.Stringer:
return v.String()
default:
return fmt.Sprintf("%v", v)
}
} | template/functions/strings.go | 0.731442 | 0.599427 | strings.go | starcoder |
package ordinarykriging
import "image/color"
type ModelType string
const (
Gaussian ModelType = "gaussian"
Exponential ModelType = "exponential"
Spherical ModelType = "spherical"
)
var (
DefaultLegendColor = []color.Color{
NewRGBA(40, 146, 199, 255),
NewRGBA(96, 163, 181, 255),
NewRGBA(140, 184, 164, 255),
NewRGBA(177, 204, 145, 255),
NewRGBA(215, 227, 125, 255),
NewRGBA(250, 250, 100, 255),
NewRGBA(252, 207, 81, 255),
NewRGBA(252, 164, 63, 255),
NewRGBA(242, 77, 31, 255),
NewRGBA(232, 16, 20, 255),
}
DefaultGridLevelColor = []GridLevelColor{
{Color: NewRGBA(40, 146, 199, 255), Value: [2]float64{-30, -15}},
{Color: NewRGBA(96, 163, 181, 255), Value: [2]float64{-15, -10}},
{Color: NewRGBA(140, 184, 164, 255), Value: [2]float64{-10, -5}},
{Color: NewRGBA(177, 204, 145, 255), Value: [2]float64{-5, 0}},
{Color: NewRGBA(215, 227, 125, 255), Value: [2]float64{0, 5}},
{Color: NewRGBA(250, 250, 100, 255), Value: [2]float64{5, 10}},
{Color: NewRGBA(252, 207, 81, 255), Value: [2]float64{10, 15}},
{Color: NewRGBA(252, 164, 63, 255), Value: [2]float64{15, 20}},
{Color: NewRGBA(247, 122, 45, 255), Value: [2]float64{20, 25}},
{Color: NewRGBA(242, 77, 31, 255), Value: [2]float64{25, 30}},
{Color: NewRGBA(232, 16, 20, 255), Value: [2]float64{30, 40}},
}
)
type DistanceList [][2]float64
func (t DistanceList) Len() int {
return len(t)
}
func (t DistanceList) Less(i, j int) bool {
return t[i][0] < t[j][0]
}
func (t DistanceList) Swap(i, j int) {
tmp := t[i]
t[i] = t[j]
t[j] = tmp
}
type GridMatrices struct {
Data [][]float64 `json:"data"`
Width float64 `json:"width"`
Xlim [2]float64 `json:"xLim"`
Ylim [2]float64 `json:"yLim"`
Zlim [2]float64 `json:"zLim"`
NodataValue float64 `json:"nodataValue"`
}
type ContourRectangle struct {
Contour []float64 `json:"contour"`
XWidth int `json:"xWidth"`
YWidth int `json:"yWidth"`
Xlim [2]float64 `json:"xLim"`
Ylim [2]float64 `json:"yLim"`
Zlim [2]float64 `json:"zLim"`
XResolution float64 `json:"xResolution"`
YResolution float64 `json:"yResolution"`
}
type Point [2]float64 // example [103.614373, 27.00541]
type Ring []Point
type PolygonCoordinates []Ring
type PolygonGeometry struct {
Type string `json:"type" default:"Polygon"` // Polygon
Coordinates []Ring `json:"coordinates,omitempty"` // coordinates
}
func NewRGBA(r, g, b, a uint8) color.RGBA {
_rgba := color.RGBA{R: r, G: g, B: b, A: a}
return _rgba
}
type GridLevelColor struct {
Value [2]float64 `json:"value"` // 值区间 [0, 5]
Color color.RGBA `json:"color"` // RGBA颜色 {255, 255, 255, 255}
}
type PredictDate struct {
X int
Y int
Value float64
} | ordinarykriging/type.go | 0.670824 | 0.421195 | type.go | starcoder |
package vector3
import (
"github.com/louis030195/protometry/api/quaternion"
"math"
"math/rand"
)
// NewVector3 constructs a Vector3
func NewVector3(x, y, z float64) *Vector3 {
return &Vector3{X: x, Y: y, Z: z}
}
// Clone a vector
func (v *Vector3) Clone() *Vector3 {
return NewVector3(v.X, v.Y, v.Z)
}
// NewVector3Zero constructs a Vector3 of 3 dimensions initialized with 0
func NewVector3Zero() *Vector3 {
return NewVector3(0, 0, 0)
}
// NewVector3One constructs a Vector3 of 3 dimensions initialized with 1
func NewVector3One() *Vector3 {
return NewVector3(1, 1, 1)
}
// NewVector3Max returns a Vector3 of maximum float64 value
func NewVector3Max() *Vector3 {
return NewVector3(math.MaxFloat64, math.MaxFloat64, math.MaxFloat64)
}
// NewVector3Min returns a Vector3 of minimum float64 value
func NewVector3Min() *Vector3 {
return NewVector3(-math.MaxFloat64, -math.MaxFloat64, -math.MaxFloat64)
}
// Equal reports whether a and b are equal within a small epsilon.
func (v Vector3) Equal(v2 Vector3) bool {
const epsilon = 1e-16
// If any dimensions aren't approximately equal, return false
if math.Abs(v.X-v2.X) >= epsilon ||
math.Abs(v.Y-v2.Y) >= epsilon ||
math.Abs(v.Z-v2.Z) >= epsilon {
return false
}
// Else return true
return true
}
// Pow returns the vector pow as a new vector
// Not in-place
func (v Vector3) Pow() Vector3 {
v.X *= v.X
v.Y *= v.Y
v.Z *= v.Z
return v
}
// Sum returns the sum of all the dimensions of the vector
func (v Vector3) Sum() float64 {
return v.X + v.Y + v.Z
}
// Norm returns the norm.
func (v Vector3) Norm() float64 { return v.Pow().Sum() }
// Norm2 returns the square of the norm.
func (v Vector3) Norm2() float64 { return math.Sqrt(v.Norm()) }
// Normalize returns a new unit vector in the same direction as a.
func (v Vector3) Normalize() Vector3 {
n2 := v.Norm2()
if n2 == 0 {
return *NewVector3(0, 0, 0)
}
return v.Times(1 / math.Sqrt(n2))
}
// Abs returns the vector with non-negative components.
func (v *Vector3) Abs() Vector3 {
nv := NewVector3(math.Abs(v.X), math.Abs(v.Y), math.Abs(v.Z))
return *nv
}
// Plus returns the standard vector sum of v1 and v2.
// Not in-place
func (v Vector3) Plus(v2 Vector3) Vector3 {
v.X += v2.X
v.Y += v2.Y
v.Z += v2.Z
return v
}
// Add arguments element-wise
// v, v2 : Vector3
// The arrays to be added.
// In-place
func (v *Vector3) Add(v2 *Vector3) {
v.X += v2.X
v.Y += v2.Y
v.Z += v2.Z
}
// Minus returns the standard vector difference of a and b as a new vector
// Not in-place
func (v Vector3) Minus(v2 Vector3) Vector3 {
v.X -= v2.X
v.Y -= v2.Y
v.Z -= v2.Z
return v
}
// Subtract arguments element-wise in-place
// v, v2 : Vector3
// The arrays to be subtracted.
func (v *Vector3) Subtract(v2 *Vector3) {
v.X -= v2.X
v.Y -= v2.Y
v.Z -= v2.Z
}
// Times returns the scalar product of v and m
// Not in-place
func (v Vector3) Times(m float64) Vector3 {
v.X *= m
v.Y *= m
v.Z *= m
return v
}
// Scale rescale the vector3 by m
// In-place
func (v *Vector3) Scale(m float64) {
v.X *= m
v.Y *= m
v.Z *= m
}
// Divide will obviously panic in case of division by 0
// In-place
func (v *Vector3) Divide(m float64) {
v.X /= m
v.Y /= m
v.Z /= m
}
// Dot returns the standard dot product of a and b.
func (v Vector3) Dot(v2 Vector3) float64 {
return (v.X * v2.X) + (v.Y * v2.Y) + (v.Z * v2.Z)
}
// Cross returns the standard cross product of a and b.
func (v Vector3) Cross(v2 Vector3) *Vector3 {
return NewVector3(v.Y*v2.Z-v.Z*v2.Y, v.Z*v2.X-v.X*v2.Z, v.X*v2.Y-v.Y*v.X)
}
// Distance returns the Euclidean distance between a and b.
func (v Vector3) Distance(v2 Vector3) float64 { return math.Sqrt(v.Minus(v2).Pow().Sum()) }
// Angle returns the angle between a and b.
func (v Vector3) Angle(v2 Vector3) float64 {
return math.Atan2(v.Cross(v2).Norm(), v.Dot(v2))
}
// Min Returns the a vector where each component is the lesser of the
// corresponding component in this and the specified vector.
// Not in-place
func Min(v Vector3, v2 Vector3) Vector3 {
return *NewVector3(math.Min(v.X, v2.X), math.Min(v.Y, v2.Y), math.Min(v.Z, v2.Z))
}
// Max Returns the a vector where each component is the greater of the
// corresponding component in this and the specified vector.
// Not in-place
func Max(v Vector3, v2 Vector3) Vector3 {
return *NewVector3(math.Max(v.X, v2.X), math.Max(v.Y, v2.Y), math.Max(v.Z, v2.Z))
}
// Lerp Returns the linear interpolation between two Vector3(s).
// Not in-place
func (v *Vector3) Lerp(v2 *Vector3, f float64) *Vector3 {
return NewVector3((v2.X-v.X)*f+v.X, (v2.Y-v.Y)*f+v.Y, (v2.Z-v.Z)*f+v.Z)
}
// Expands a 10-bit integer into 30 bits
// by inserting 2 zeros after each bit.
func expandBits(v uint) uint {
v = (v * 0x00010001) & 0xFF0000FF
v = (v * 0x00000101) & 0x0F00F00F
v = (v * 0x00000011) & 0xC30C30C3
v = (v * 0x00000005) & 0x49249249
return v
}
// Morton3D Calculates a 30-bit Morton code for the
// given 3D point located within the unit cube [0,1].
func Morton3D(v Vector3) uint { // TODO: decoder
x := math.Min(math.Max(v.X*1024.0, 0.0), 1023.0)
y := math.Min(math.Max(v.Y*1024.0, 0.0), 1023.0)
z := math.Min(math.Max(v.Z*1024.0, 0.0), 1023.0)
xx := expandBits(uint(x))
yy := expandBits(uint(y))
zz := expandBits(uint(z))
return xx*4 + yy*2 + zz
}
func randFloat(min, max float64) float64 {
return min + rand.Float64()*(max-min)
}
// RandomCirclePoint returns a random circle point
func RandomCirclePoint(x, z, radius float64) Vector3 {
return *NewVector3(randFloat(-radius+x, radius+x),
0,
randFloat(-radius+z, radius+z))
}
// RandomSpherePoint returns a random sphere point
func RandomSpherePoint(center Vector3, radius float64) Vector3 {
return *NewVector3(randFloat(-radius+center.X, radius+center.X),
randFloat(-radius+center.Y, radius+center.Y),
randFloat(-radius+center.Z, radius+center.Z))
}
// LookAt return a quaternion corresponding to the rotation required to look at the other Vector3
func (v Vector3) LookAt(b Vector3) quaternion.Quaternion {
angle := v.Angle(b)
return *quaternion.NewQuaternion(0, angle, 0, angle)
}
// Mutate returns a new Vector3 with each coordinates multiplied by a random value between -rate and rate
func (v Vector3) Mutate(rate float64) Vector3 {
return *NewVector3(v.X*randFloat(-rate, rate), v.Y*randFloat(-rate, rate), v.Z*randFloat(-rate, rate))
} | api/vector3/vector3.go | 0.946312 | 0.744169 | vector3.go | starcoder |
package imagecolor
import (
"image"
"math"
"sort"
"github.com/lucasb-eyer/go-colorful"
"gonum.org/v1/gonum/stat"
)
// Box - x1, y1, x2, y2 int
type Box struct {
Rect image.Rectangle
Focused float64
Score float64
MeanL float64
StdL float64
SkewL float64
values []float64
}
// NewBox - Create a new Box
func NewBox(x1, y1, x2, y2 int) *Box {
return &Box{Rect: image.Rect(x1, y1, x2, y2)}
}
// ThirdsBoxes - Create boxes for thirds composition.
// 4 Boxes with centers at 1/3 and 2/3 horizontal and vertical.
func ThirdsBoxes(width, height int) (boxes []*Box) {
x1, y1 := int(width/6), int(height/6)
boxes = append(boxes, NewBox(x1, y1, x1*3, y1*3))
boxes = append(boxes, NewBox(x1*3, y1, (x1*3)+(x1*2), (y1*3)))
boxes = append(boxes, NewBox(x1, y1*3, (x1*3), (y1*3)+(y1*2)))
boxes = append(boxes, NewBox(x1*3, y1*3, (x1*3)+(x1*2), (y1*3)+(y1*2)))
return
}
func focusedPixels(val []float64) (float64, float64, float64, float64) {
var focused []float64
for _, v := range val {
if v > 0.02 {
focused = append(focused, v)
}
}
sort.Float64s(focused)
mean, std := stat.MeanStdDev(focused, nil)
skew := stat.Skew(focused, nil)
per := float64(len(focused)) / float64(len(val))
return mean, std, skew, per
}
// MeanStd - Get Mean and StandardVariation for values
func (b *Box) MeanStd() {
b.MeanL, b.StdL = stat.MeanStdDev(b.values, nil)
}
// Skew - Get the skew of the normal distribution curve for values
func (b *Box) Skew() {
b.SkewL = stat.Skew(b.values, nil)
}
// FocusedPixels - Create ImageColors array from an image
func (b *Box) FocusedPixels(m image.Image) {
bounds := m.Bounds()
minX, minY := bounds.Min.X, bounds.Min.Y
width, height := bounds.Max.X-minX, bounds.Max.Y-minY
var ic ImageColors
ic.defineSize(width, height)
for x := 0; x < width; x++ {
for y := 0; y < height; y++ {
cf, _ := colorful.MakeColor(m.At(x+minX, y+minY))
colorHSL := NewColorHSL(cf)
if colorHSL[lightValue] > 0.02 {
b.values = append(b.values, colorHSL[lightValue])
}
}
}
b.Focused = float64(len(b.values)) / float64(width*height)
}
// FocusScore -
func (b *Box) FocusScore() {
b.Score = (math.Sqrt(b.MeanL*b.MeanL*b.StdL) - b.SkewL*b.Focused/(b.MeanL*10000)) * 10
} | box.go | 0.794026 | 0.494263 | box.go | starcoder |
// Package ged implements a global-purpose encoding/decoding library.
package ged
import (
"math"
"strings"
"github.com/nkcr/ged/alphabet"
"golang.org/x/xerrors"
)
// EncodeHex encodes data to hexadecimal. Uses the lower case letter form.
func EncodeHex(data []byte) string {
return EncodeString(data, alphabet.Hex)
}
// DecodeHex decodes encoded hex string.
func DecodeHex(encoded string) ([]byte, error) {
// lower 'encoded' to ensure compatibility with other encoding that use
// capital letters.
return DecodeString(strings.ToLower(encoded), alphabet.Hex)
}
// EncodeBase58 encodes data to a base58 representation using the Bitcoin
// alphabet.
func Encode58(data []byte) string {
return EncodeString(data, alphabet.Base58Bitcoin)
}
// DecodeBase58 decodes string from a base58 representation using the Bitcoin
// alphabet.
func Decode58(encoded string) ([]byte, error) {
return DecodeString(encoded, alphabet.Base58Bitcoin)
}
// EncodeString encodes data using a provided alphabet.
func EncodeString(data []byte, alphabet alphabet.Alphabet) string {
encoded := Encode(data, alphabet.BaseTo)
out := new(strings.Builder)
for _, e := range encoded {
out.WriteByte(alphabet.Charset[e])
}
return out.String()
}
// DecodeString decodes string using the provided alphabet.
func DecodeString(encoded string, alphabet alphabet.Alphabet) ([]byte, error) {
buf := make([]byte, len(encoded))
for i, c := range encoded {
val, ok := alphabet.Decoding[c]
if !ok {
return nil, xerrors.Errorf("invalid character: '%s'", c)
}
buf[i] = val
}
return Decode(buf, alphabet.BaseTo)
}
// Encode encodes data to a given base. baseTo SHOULD NOT be greater than 256.
func Encode(data []byte, baseTo uint) []byte {
// there can't be an error because baseFrom is set to 256
encoded, _ := Transform(data, 256, baseTo)
return encoded
}
// Decode decodes data from a given base. baseTo SHOULD NOT be greater than 256.
func Decode(data []byte, baseFrom uint) ([]byte, error) {
return Transform(data, baseFrom, 256)
}
// Transform is a low-level generic function to transform bytes from a given
// base to a target base. Returns an error if an element in data is greater or
// equal than the given base: if the baseFrom is 128, then all elements from
// data must be lower than 128. baseFrom and baseTo SHOULD NOT be greater than
// 256 and MUST NOT be lower than 2.
func Transform(data []byte, baseFrom, baseTo uint) ([]byte, error) {
if baseFrom <= 1 {
return nil, xerrors.Errorf("invalid baseFrom: %d <= 1", baseFrom)
}
if baseTo <= 1 {
return nil, xerrors.Errorf("invalid baseTo: %d <= 1", baseTo)
}
prefixZeros := 0
for _, e := range data {
if e != 0 {
break
}
prefixZeros++
}
factor := math.Log(float64(baseFrom)) / math.Log(float64(baseTo))
cap := int(float64(len(data)-prefixZeros)*factor + 1)
result := make([]byte, cap)
reverseEnd := len(result) - 1
var reverseIndex int
for _, e := range data[prefixZeros:] {
carry := uint(e)
if carry >= baseFrom {
return nil, xerrors.Errorf("invalid data: %d >= %d", e, baseFrom)
}
reverseIndex = len(result) - 1
for ; reverseIndex > reverseEnd || carry != 0; reverseIndex-- {
// we populate in reverse order
carry = carry + uint(result[reverseIndex])*baseFrom
result[reverseIndex] = byte(carry % baseTo)
carry = carry / baseTo
}
// keep track of the last encoded index
reverseEnd = reverseIndex
}
return append(make([]byte, prefixZeros), result...)[reverseEnd+1:], nil
} | mod.go | 0.878985 | 0.406921 | mod.go | starcoder |
package yurit
/*
import (
"fmt"
"io"
)
// id3v2Header is a type which represents an ID3v2 tag header.
type id3v2Header struct {
Version Format
Unsynchronisation bool
ExtendedHeader bool
Experimental bool
Footer bool
Size uint
}
// readID3v2Header reads the ID3v2 header from the given io.Reader.
// offset it number of bytes of header that was read
func readID3v2Header(r io.Reader) (h *id3v2Header, offset uint, err error) {
offset = 10
b, err := readBytes(r, offset)
if err != nil {
return nil, 0, fmt.Errorf("expected to read 10 bytes (id3v2Header): %v", err)
}
if string(b[0:3]) != "ID3" {
return nil, 0, fmt.Errorf("expected to read \"ID3\"")
}
b = b[3:]
var vers Format
switch uint(b[0]) {
case 2:
vers = ID3v2_2
case 3:
vers = ID3v2_3
case 4:
vers = ID3v2_4
case 0, 1:
fallthrough
default:
return nil, 0, fmt.Errorf("ID3 version: %v, expected: 2, 3 or 4", uint(b[0]))
}
// NB: We ignore b[1] (the revision) as we don't currently rely on it.
h = &id3v2Header{
Version: vers,
Unsynchronisation: getBit(b[2], 7),
ExtendedHeader: getBit(b[2], 6),
Experimental: getBit(b[2], 5),
Footer: getBit(b[2], 4),
Size: uint(get7BitChunkedInt(b[3:7])),
}
if h.ExtendedHeader {
switch vers {
case ID3v2_3:
b, err := readBytes(r, 4)
if err != nil {
return nil, 0, fmt.Errorf("expected to read 4 bytes (ID3v23 extended header len): %v", err)
}
// skip header, size is excluding len bytes
extendedHeaderSize := uint(getInt(b))
_, err = readBytes(r, extendedHeaderSize)
if err != nil {
return nil, 0, fmt.Errorf("expected to read %d bytes (ID3v23 skip extended header): %v", extendedHeaderSize, err)
}
offset += extendedHeaderSize
case ID3v2_4:
b, err := readBytes(r, 4)
if err != nil {
return nil, 0, fmt.Errorf("expected to read 4 bytes (ID3v24 extended header len): %v", err)
}
// skip header, size is synchsafe int including len bytes
extendedHeaderSize := uint(get7BitChunkedInt(b)) - 4
_, err = readBytes(r, extendedHeaderSize)
if err != nil {
return nil, 0, fmt.Errorf("expected to read %d bytes (ID3v24 skip extended header): %v", extendedHeaderSize, err)
}
offset += extendedHeaderSize
default:
// nop, only 2.3 and 2.4 should have extended header
}
}
return h, offset, nil
}
*/ | id3v2header.go | 0.583915 | 0.412885 | id3v2header.go | starcoder |
package require
import (
"reflect"
"regexp"
"runtime"
"testing"
)
// Matches checks that a string matches a regular-expression.
func Matches(tb testing.TB, expectedMatch string, actual string, msgAndArgs ...interface{}) {
r, err := regexp.Compile(expectedMatch)
if err != nil {
fatal(tb, msgAndArgs, "Match string provided (%v) is invalid", expectedMatch)
}
if !r.MatchString(actual) {
fatal(tb, msgAndArgs, "Actual string (%v) does not match pattern (%v)", actual, expectedMatch)
}
}
// Equal checks equality of two values.
func Equal(tb testing.TB, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
if !reflect.DeepEqual(expected, actual) {
fatal(
tb,
msgAndArgs,
"Not equal: %#v (expected)\n"+
" != %#v (actual)", expected, actual)
}
}
// NotEqual checks inequality of two values.
func NotEqual(tb testing.TB, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
if reflect.DeepEqual(expected, actual) {
fatal(
tb,
msgAndArgs,
"Equal: %#v (expected)\n"+
" == %#v (actual)", expected, actual)
}
}
// EqualOneOf checks if a value is equal to one of the elements of a slice.
func EqualOneOf(tb testing.TB, expecteds []interface{}, actual interface{}, msgAndArgs ...interface{}) {
equal := false
for _, expected := range expecteds {
if reflect.DeepEqual(expected, actual) {
equal = true
break
}
}
if !equal {
fatal(
tb,
msgAndArgs,
"Not equal 1 of: %#v (expecteds)\n"+
" != %#v (actual)", expecteds, actual)
}
}
// OneOfEquals checks one element of a slice equals a value.
func OneOfEquals(tb testing.TB, expected interface{}, actuals []interface{}, msgAndArgs ...interface{}) {
equal := false
for _, actual := range actuals {
if reflect.DeepEqual(expected, actual) {
equal = true
break
}
}
if !equal {
fatal(
tb,
msgAndArgs,
"Not equal : %#v (expected)\n"+
" one of != %#v (actuals)", expected, actuals)
}
}
// NoError checks for no error.
func NoError(tb testing.TB, err error, msgAndArgs ...interface{}) {
if err != nil {
fatal(tb, msgAndArgs, "No error is expected but got %v", err)
}
}
// YesError checks for an error.
func YesError(tb testing.TB, err error, msgAndArgs ...interface{}) {
if err == nil {
fatal(tb, msgAndArgs, "Error is expected but got %v", err)
}
}
// NotNil checks a value is non-nil.
func NotNil(tb testing.TB, object interface{}, msgAndArgs ...interface{}) {
success := true
if object == nil {
success = false
} else {
value := reflect.ValueOf(object)
kind := value.Kind()
if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
success = false
}
}
if !success {
fatal(tb, msgAndArgs, "Expected value not to be nil.")
}
}
// Nil checks a value is nil.
func Nil(tb testing.TB, object interface{}, msgAndArgs ...interface{}) {
if object == nil {
return
}
value := reflect.ValueOf(object)
kind := value.Kind()
if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
return
}
fatal(tb, msgAndArgs, "Expected value to be nil.")
}
// True checks a value is true.
func True(tb testing.TB, value bool, msgAndArgs ...interface{}) {
if !value {
fatal(tb, msgAndArgs, "Should be true.")
}
}
// False checks a value is false.
func False(tb testing.TB, value bool, msgAndArgs ...interface{}) {
if value {
fatal(tb, msgAndArgs, "Should be false.")
}
}
func logMessage(tb testing.TB, msgAndArgs []interface{}) {
if len(msgAndArgs) == 1 {
tb.Logf(msgAndArgs[0].(string))
}
if len(msgAndArgs) > 1 {
tb.Logf(msgAndArgs[0].(string), msgAndArgs[1:]...)
}
}
func fatal(tb testing.TB, userMsgAndArgs []interface{}, msgFmt string, msgArgs ...interface{}) {
logMessage(tb, userMsgAndArgs)
_, file, line, ok := runtime.Caller(2)
if ok {
tb.Logf("%s:%d", file, line)
}
tb.Fatalf(msgFmt, msgArgs...)
} | src/client/pkg/require/require.go | 0.563378 | 0.435902 | require.go | starcoder |
package asdf
import (
"bytes"
"compress/bzip2"
"compress/zlib"
"crypto/md5"
"encoding/binary"
"io"
"io/ioutil"
"github.com/pierrec/lz4"
"github.com/pkg/errors"
)
var blockMagic = [4]byte{0xd3, 0x42, 0x4c, 0x4b}
// CompressionKind indicates the block compression type: none, zlib, bzip2 or lz4.
type CompressionKind int
const (
// CompressionNone corresponds to no compression.
CompressionNone CompressionKind = iota
// CompressionZLIB corresponds to zlib compression: balanced compression/decompression performance, moderate compression ratio.
CompressionZLIB CompressionKind = iota
// CompressionBZIP2 corresponds to bzip2 compression: slow compression/decompression, good compression ratio.
CompressionBZIP2 CompressionKind = iota
// CompressionLZ4 corresponds to lz4 compression: very fast compression/decompression, poor compression ratio for complex data, moderate/good for ordered.
CompressionLZ4 CompressionKind = iota
// FlagStreamed denotes a streamed block. Not used anywhere yet.
FlagStreamed uint32 = 1
)
// Block corresponds to an ASDF block.
type Block struct {
// Data is the block's payload.
Data []byte
// Flags is the block's flags. The 1.x standard does not define any flags except `FlagStreamed`.
Flags uint32
// Compression is the block's compression type: none, zlib, bzip2 or lz4.
Compression CompressionKind
// checksum is MD5 of uncompressed `Data`.
checksum []byte
}
var compressionMapping = map[string]CompressionKind{
"\x00\x00\x00\x00": CompressionNone,
"zlib": CompressionZLIB,
"bzp2": CompressionBZIP2,
"lz4\x00": CompressionLZ4,
}
var compressionNames = map[CompressionKind]string{
CompressionNone: "none",
CompressionZLIB: "zlib",
CompressionBZIP2: "bzip2",
CompressionLZ4: "lz4",
}
var decompressors = map[CompressionKind]func(reader io.Reader) (io.Reader, error){
CompressionNone: newNoneReader,
CompressionZLIB: newZlibReader,
CompressionBZIP2: newBzip2Reader,
CompressionLZ4: newLZ4Reader,
}
// Uncompress switches the block's compression to "none", uncompressing `Data` in-place as needed
// and checking the checksum.
func (block *Block) Uncompress() error {
reader, err := decompressors[block.Compression](bytes.NewBuffer(block.Data))
if err != nil {
return errors.Wrapf(err, "failed to decompress %d bytes with %s",
len(block.Data), compressionNames[block.Compression])
}
data, err := ioutil.ReadAll(reader)
if err != nil {
return errors.Wrapf(err, "failed to decompress %d bytes with %s",
len(block.Data), compressionNames[block.Compression])
}
block.Data = data
block.Compression = CompressionNone
if !bytes.Equal(block.checksum, bytes.Repeat([]byte{0}, 16)) {
// check the checksum
hash := md5.New()
hash.Write(block.Data)
if !bytes.Equal(hash.Sum(nil), block.checksum) {
return errors.Errorf("block checksum mismatch: actual %v vs declared %v",
hash.Sum(nil), block.checksum)
}
}
return nil
}
// ReadBlock loads another block from the specified reader. That block may be compressed,
// call `Uncompress()` to obtain the original Data.
func ReadBlock(reader io.Reader) (*Block, error) {
block := &Block{}
buffer := make([]byte, 4)
_, err := io.ReadFull(reader, buffer)
if err != nil {
return nil, errors.Wrap(err, "failed to read the block's magic")
}
if !bytes.Equal(buffer, blockMagic[:]) {
return nil, errors.Errorf("block magic does not match: %v", buffer)
}
buffer = buffer[:2]
_, err = io.ReadFull(reader, buffer)
if err != nil {
return nil, errors.Wrap(err, "failed to read the block's header size")
}
headerSize := binary.BigEndian.Uint16(buffer)
buffer = make([]byte, headerSize)
_, err = io.ReadFull(reader, buffer)
if err != nil {
return nil, errors.Wrap(err, "failed to read the block's header")
}
offset := 0
block.Flags = binary.BigEndian.Uint32(buffer[:4])
offset += 4
compression := buffer[offset : offset+4]
offset += 4
var exists bool
block.Compression, exists = compressionMapping[string(compression)]
if !exists {
return nil, errors.Errorf("unsupported block compression: %s", string(compression))
}
allocatedSize := binary.BigEndian.Uint64(buffer[offset : offset+8])
offset += 8
usedSize := binary.BigEndian.Uint64(buffer[offset : offset+8])
// ignore data_size
offset += 16
block.checksum = buffer[offset : offset+16]
block.Data = make([]byte, usedSize)
_, err = io.ReadFull(reader, block.Data)
if err != nil {
return nil, errors.Wrap(err, "failed to read the block's payload")
}
sink := make([]byte, allocatedSize-usedSize)
_, err = io.ReadFull(reader, sink)
if err != nil {
return nil, errors.Wrap(err, "failed to read the block's remainder")
}
return block, nil
}
func newNoneReader(reader io.Reader) (io.Reader, error) {
return reader, nil
}
func newZlibReader(reader io.Reader) (io.Reader, error) {
return zlib.NewReader(reader)
}
func newBzip2Reader(reader io.Reader) (io.Reader, error) {
return bzip2.NewReader(reader), nil
}
func newLZ4Reader(reader io.Reader) (io.Reader, error) {
// The underlying format is LZ4 block.
// 4 bytes + 4 bytes + data
// block size uncompressed size
writer := &bytes.Buffer{}
sizeBuffer := make([]byte, 4)
for {
_, err := io.ReadFull(reader, sizeBuffer)
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
size := binary.BigEndian.Uint32(sizeBuffer)
lz4data := make([]byte, size-4)
_, err = io.ReadFull(reader, sizeBuffer)
if err != nil {
return nil, err
}
size = binary.LittleEndian.Uint32(sizeBuffer)
dest := make([]byte, size)
_, err = io.ReadFull(reader, lz4data)
if err != nil {
return nil, err
}
n, err := lz4.UncompressBlock(lz4data, dest)
if err != nil {
return nil, errors.Wrap(err, "lz4 error")
}
if n != len(dest) {
return nil, errors.Errorf("uncompressed LZ4 size mismatch: %d != %d", n, size)
}
writer.Write(dest)
}
return bytes.NewReader(writer.Bytes()), nil
} | block.go | 0.626581 | 0.483526 | block.go | starcoder |
package eurorack
import (
"github.com/jsleeio/go-eagle/pkg/panel"
)
const (
// PanelHeight3U represents the total height of a Eurorack panel. Note in
// particular that this is NOT the same as the Eurocard standard, as the
// latter does not use lipped rails
PanelHeight3U = 128.5
// ExtraMountingHolesThreshold represents the panel width threshold beyond
// which additional mounting holes are required
ExtraMountingHolesThreshold = 8
// MountingHolesLeftOffset represents the distance of the first mounting
// hole from the left edge of the panel
MountingHolesLeftOffset = 7.5
// MountingHoleTopY3U represents the Y value for the top row of 3U mounting
// holes
MountingHoleTopY3U = PanelHeight3U - 3.00
// MountingHoleBottomY3U represents the Y value for the bottom row of 3U
// mounting holes
MountingHoleBottomY3U = 3.00
// MountingHoleDiameter represents the diameter of a Eurorack system
// mounting hole, in millimetres
MountingHoleDiameter = 3.2
// HP represents horizontal pitch in a Eurorack frame, in millimetres
HP = 5.08
// HorizontalFit indicates the panel tolerance adjustment for the format
HorizontalFit = 0.25
// CornerRadius indicates the corner radius for the format. Eurorack doesn't
// believe in such things.
CornerRadius = 0.0
// RailHeightFromMountingHole is used to determine how much space exists.
// See discussion in github.com/jsleeio/pkg/panel. 5mm is a good safe
// figure for all known-used Eurorack rail types
RailHeightFromMountingHole = 5.0
)
// Eurorack implements the panel.Panel interface and encapsulates the physical
// characteristics of a Eurorack panel
type Eurorack struct {
HP int
}
// NewEurorack constructs a new Eurorack object
func NewEurorack(hp int) *Eurorack {
return &Eurorack{HP: hp}
}
// Width returns the width of a Eurorack panel, in millimetres
func (e Eurorack) Width() float64 {
return HP * float64(e.HP)
}
// Height returns the height of a Eurorack panel, in millimetres
func (e Eurorack) Height() float64 {
return PanelHeight3U
}
// MountingHoleDiameter returns the Eurorack system mounting hole size, in
// millimetres
func (e Eurorack) MountingHoleDiameter() float64 {
return MountingHoleDiameter
}
// MountingHoles generates a set of Point objects representing the mounting
// hole locations of a Eurorack panel
func (e Eurorack) MountingHoles() []panel.Point {
holes := []panel.Point{
{X: MountingHolesLeftOffset, Y: MountingHoleBottomY3U},
{X: MountingHolesLeftOffset, Y: MountingHoleTopY3U},
}
if e.HP > ExtraMountingHolesThreshold {
rhsx := MountingHolesLeftOffset + HP*(float64(e.HP-3))
holes = append(holes, panel.Point{X: rhsx, Y: MountingHoleBottomY3U})
holes = append(holes, panel.Point{X: rhsx, Y: MountingHoleTopY3U})
}
return holes
}
// HorizontalFit indicates the panel tolerance adjustment for the format
func (e Eurorack) HorizontalFit() float64 {
return HorizontalFit
}
// CornerRadius indicates the corner radius for the format
func (e Eurorack) CornerRadius() float64 {
return CornerRadius
}
// RailHeightFromMountingHole is used to calculate space between rails
func (e Eurorack) RailHeightFromMountingHole() float64 {
return RailHeightFromMountingHole
}
// MountingHoleTopY returns the Y coordinate for the top row of mounting
// holes
func (e Eurorack) MountingHoleTopY() float64 {
return MountingHoleTopY3U
}
// MountingHoleBottomY returns the Y coordinate for the bottom row of
// mounting holes
func (e Eurorack) MountingHoleBottomY() float64 {
return MountingHoleBottomY3U
}
// HeaderLocation returns the location of the header text. Eurorack has
// mounting rails so this is typically aligned with the top mounting screw
func (e Eurorack) HeaderLocation() panel.Point {
return panel.Point{X: e.Width() / 2, Y: e.MountingHoleTopY()}
}
// FooterLocation returns the location of the footer text. Eurorack has
// mounting rails so this is typically aligned with the bottom mounting screw
func (e Eurorack) FooterLocation() panel.Point {
return panel.Point{X: e.Width() / 2, Y: e.MountingHoleBottomY()}
} | pkg/format/eurorack/eurorack.go | 0.848251 | 0.696236 | eurorack.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// RelyingPartyDetailedSummary
type RelyingPartyDetailedSummary struct {
Entity
// Number of failed sign in on Active Directory Federation Service in the period specified.
failedSignInCount *int64
// Indication of whether the application can be moved to Azure AD or require more investigation. Possible values are: ready, needsReview, additionalStepsRequired, unknownFutureValue.
migrationStatus *MigrationStatus
// Specifies all the validations check done on applications configuration details to evaluate if the application is ready to be moved to Azure AD.
migrationValidationDetails []KeyValuePairable
// This identifier is used to identify the relying party to this Federation Service. It is used when issuing claims to the relying party.
relyingPartyId *string
// Name of application or other entity on the internet that uses an identity provider to authenticate a user who wants to log in.
relyingPartyName *string
// Specifies where the relying party expects to receive the token.
replyUrls []string
// Uniquely identifies the Active Directory forest.
serviceId *string
// Number of successful / (number of successful + number of failed sign ins) on Active Directory Federation Service in the period specified.
signInSuccessRate *float64
// Number of successful sign ins on Active Directory Federation Service.
successfulSignInCount *int64
// Number of successful + failed sign ins failed sign ins on Active Directory Federation Service in the period specified.
totalSignInCount *int64
// Number of unique users that have signed into the application.
uniqueUserCount *int64
}
// NewRelyingPartyDetailedSummary instantiates a new relyingPartyDetailedSummary and sets the default values.
func NewRelyingPartyDetailedSummary()(*RelyingPartyDetailedSummary) {
m := &RelyingPartyDetailedSummary{
Entity: *NewEntity(),
}
return m
}
// CreateRelyingPartyDetailedSummaryFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateRelyingPartyDetailedSummaryFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewRelyingPartyDetailedSummary(), nil
}
// GetFailedSignInCount gets the failedSignInCount property value. Number of failed sign in on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) GetFailedSignInCount()(*int64) {
if m == nil {
return nil
} else {
return m.failedSignInCount
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *RelyingPartyDetailedSummary) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["failedSignInCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt64Value()
if err != nil {
return err
}
if val != nil {
m.SetFailedSignInCount(val)
}
return nil
}
res["migrationStatus"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseMigrationStatus)
if err != nil {
return err
}
if val != nil {
m.SetMigrationStatus(val.(*MigrationStatus))
}
return nil
}
res["migrationValidationDetails"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateKeyValuePairFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]KeyValuePairable, len(val))
for i, v := range val {
res[i] = v.(KeyValuePairable)
}
m.SetMigrationValidationDetails(res)
}
return nil
}
res["relyingPartyId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetRelyingPartyId(val)
}
return nil
}
res["relyingPartyName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetRelyingPartyName(val)
}
return nil
}
res["replyUrls"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfPrimitiveValues("string")
if err != nil {
return err
}
if val != nil {
res := make([]string, len(val))
for i, v := range val {
res[i] = *(v.(*string))
}
m.SetReplyUrls(res)
}
return nil
}
res["serviceId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetServiceId(val)
}
return nil
}
res["signInSuccessRate"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetFloat64Value()
if err != nil {
return err
}
if val != nil {
m.SetSignInSuccessRate(val)
}
return nil
}
res["successfulSignInCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt64Value()
if err != nil {
return err
}
if val != nil {
m.SetSuccessfulSignInCount(val)
}
return nil
}
res["totalSignInCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt64Value()
if err != nil {
return err
}
if val != nil {
m.SetTotalSignInCount(val)
}
return nil
}
res["uniqueUserCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt64Value()
if err != nil {
return err
}
if val != nil {
m.SetUniqueUserCount(val)
}
return nil
}
return res
}
// GetMigrationStatus gets the migrationStatus property value. Indication of whether the application can be moved to Azure AD or require more investigation. Possible values are: ready, needsReview, additionalStepsRequired, unknownFutureValue.
func (m *RelyingPartyDetailedSummary) GetMigrationStatus()(*MigrationStatus) {
if m == nil {
return nil
} else {
return m.migrationStatus
}
}
// GetMigrationValidationDetails gets the migrationValidationDetails property value. Specifies all the validations check done on applications configuration details to evaluate if the application is ready to be moved to Azure AD.
func (m *RelyingPartyDetailedSummary) GetMigrationValidationDetails()([]KeyValuePairable) {
if m == nil {
return nil
} else {
return m.migrationValidationDetails
}
}
// GetRelyingPartyId gets the relyingPartyId property value. This identifier is used to identify the relying party to this Federation Service. It is used when issuing claims to the relying party.
func (m *RelyingPartyDetailedSummary) GetRelyingPartyId()(*string) {
if m == nil {
return nil
} else {
return m.relyingPartyId
}
}
// GetRelyingPartyName gets the relyingPartyName property value. Name of application or other entity on the internet that uses an identity provider to authenticate a user who wants to log in.
func (m *RelyingPartyDetailedSummary) GetRelyingPartyName()(*string) {
if m == nil {
return nil
} else {
return m.relyingPartyName
}
}
// GetReplyUrls gets the replyUrls property value. Specifies where the relying party expects to receive the token.
func (m *RelyingPartyDetailedSummary) GetReplyUrls()([]string) {
if m == nil {
return nil
} else {
return m.replyUrls
}
}
// GetServiceId gets the serviceId property value. Uniquely identifies the Active Directory forest.
func (m *RelyingPartyDetailedSummary) GetServiceId()(*string) {
if m == nil {
return nil
} else {
return m.serviceId
}
}
// GetSignInSuccessRate gets the signInSuccessRate property value. Number of successful / (number of successful + number of failed sign ins) on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) GetSignInSuccessRate()(*float64) {
if m == nil {
return nil
} else {
return m.signInSuccessRate
}
}
// GetSuccessfulSignInCount gets the successfulSignInCount property value. Number of successful sign ins on Active Directory Federation Service.
func (m *RelyingPartyDetailedSummary) GetSuccessfulSignInCount()(*int64) {
if m == nil {
return nil
} else {
return m.successfulSignInCount
}
}
// GetTotalSignInCount gets the totalSignInCount property value. Number of successful + failed sign ins failed sign ins on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) GetTotalSignInCount()(*int64) {
if m == nil {
return nil
} else {
return m.totalSignInCount
}
}
// GetUniqueUserCount gets the uniqueUserCount property value. Number of unique users that have signed into the application.
func (m *RelyingPartyDetailedSummary) GetUniqueUserCount()(*int64) {
if m == nil {
return nil
} else {
return m.uniqueUserCount
}
}
// Serialize serializes information the current object
func (m *RelyingPartyDetailedSummary) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
{
err = writer.WriteInt64Value("failedSignInCount", m.GetFailedSignInCount())
if err != nil {
return err
}
}
if m.GetMigrationStatus() != nil {
cast := (*m.GetMigrationStatus()).String()
err = writer.WriteStringValue("migrationStatus", &cast)
if err != nil {
return err
}
}
if m.GetMigrationValidationDetails() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetMigrationValidationDetails()))
for i, v := range m.GetMigrationValidationDetails() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("migrationValidationDetails", cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("relyingPartyId", m.GetRelyingPartyId())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("relyingPartyName", m.GetRelyingPartyName())
if err != nil {
return err
}
}
if m.GetReplyUrls() != nil {
err = writer.WriteCollectionOfStringValues("replyUrls", m.GetReplyUrls())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("serviceId", m.GetServiceId())
if err != nil {
return err
}
}
{
err = writer.WriteFloat64Value("signInSuccessRate", m.GetSignInSuccessRate())
if err != nil {
return err
}
}
{
err = writer.WriteInt64Value("successfulSignInCount", m.GetSuccessfulSignInCount())
if err != nil {
return err
}
}
{
err = writer.WriteInt64Value("totalSignInCount", m.GetTotalSignInCount())
if err != nil {
return err
}
}
{
err = writer.WriteInt64Value("uniqueUserCount", m.GetUniqueUserCount())
if err != nil {
return err
}
}
return nil
}
// SetFailedSignInCount sets the failedSignInCount property value. Number of failed sign in on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) SetFailedSignInCount(value *int64)() {
if m != nil {
m.failedSignInCount = value
}
}
// SetMigrationStatus sets the migrationStatus property value. Indication of whether the application can be moved to Azure AD or require more investigation. Possible values are: ready, needsReview, additionalStepsRequired, unknownFutureValue.
func (m *RelyingPartyDetailedSummary) SetMigrationStatus(value *MigrationStatus)() {
if m != nil {
m.migrationStatus = value
}
}
// SetMigrationValidationDetails sets the migrationValidationDetails property value. Specifies all the validations check done on applications configuration details to evaluate if the application is ready to be moved to Azure AD.
func (m *RelyingPartyDetailedSummary) SetMigrationValidationDetails(value []KeyValuePairable)() {
if m != nil {
m.migrationValidationDetails = value
}
}
// SetRelyingPartyId sets the relyingPartyId property value. This identifier is used to identify the relying party to this Federation Service. It is used when issuing claims to the relying party.
func (m *RelyingPartyDetailedSummary) SetRelyingPartyId(value *string)() {
if m != nil {
m.relyingPartyId = value
}
}
// SetRelyingPartyName sets the relyingPartyName property value. Name of application or other entity on the internet that uses an identity provider to authenticate a user who wants to log in.
func (m *RelyingPartyDetailedSummary) SetRelyingPartyName(value *string)() {
if m != nil {
m.relyingPartyName = value
}
}
// SetReplyUrls sets the replyUrls property value. Specifies where the relying party expects to receive the token.
func (m *RelyingPartyDetailedSummary) SetReplyUrls(value []string)() {
if m != nil {
m.replyUrls = value
}
}
// SetServiceId sets the serviceId property value. Uniquely identifies the Active Directory forest.
func (m *RelyingPartyDetailedSummary) SetServiceId(value *string)() {
if m != nil {
m.serviceId = value
}
}
// SetSignInSuccessRate sets the signInSuccessRate property value. Number of successful / (number of successful + number of failed sign ins) on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) SetSignInSuccessRate(value *float64)() {
if m != nil {
m.signInSuccessRate = value
}
}
// SetSuccessfulSignInCount sets the successfulSignInCount property value. Number of successful sign ins on Active Directory Federation Service.
func (m *RelyingPartyDetailedSummary) SetSuccessfulSignInCount(value *int64)() {
if m != nil {
m.successfulSignInCount = value
}
}
// SetTotalSignInCount sets the totalSignInCount property value. Number of successful + failed sign ins failed sign ins on Active Directory Federation Service in the period specified.
func (m *RelyingPartyDetailedSummary) SetTotalSignInCount(value *int64)() {
if m != nil {
m.totalSignInCount = value
}
}
// SetUniqueUserCount sets the uniqueUserCount property value. Number of unique users that have signed into the application.
func (m *RelyingPartyDetailedSummary) SetUniqueUserCount(value *int64)() {
if m != nil {
m.uniqueUserCount = value
}
} | models/relying_party_detailed_summary.go | 0.630344 | 0.414899 | relying_party_detailed_summary.go | starcoder |
package ent
import (
"fmt"
"strings"
"time"
"entgo.io/ent/dialect/sql"
"github.com/open-privacy/opv/pkg/ent/apiaudit"
)
// APIAudit is the model entity for the APIAudit schema.
type APIAudit struct {
config `json:"-"`
// ID of the ent.
ID string `json:"id,omitempty"`
// CreatedAt holds the value of the "created_at" field.
CreatedAt time.Time `json:"created_at,omitempty"`
// UpdatedAt holds the value of the "updated_at" field.
UpdatedAt time.Time `json:"updated_at,omitempty"`
// DeletedAt holds the value of the "deleted_at" field.
DeletedAt *time.Time `json:"deleted_at,omitempty"`
// Plane holds the value of the "plane" field.
Plane string `json:"plane,omitempty"`
// HashedGrantToken holds the value of the "hashed_grant_token" field.
HashedGrantToken string `json:"-"`
// Domain holds the value of the "domain" field.
Domain string `json:"domain,omitempty"`
// HTTPPath holds the value of the "http_path" field.
HTTPPath string `json:"http_path,omitempty"`
// HTTPMethod holds the value of the "http_method" field.
HTTPMethod string `json:"http_method,omitempty"`
// SentHTTPStatus holds the value of the "sent_http_status" field.
SentHTTPStatus int `json:"sent_http_status,omitempty"`
}
// scanValues returns the types for scanning values from sql.Rows.
func (*APIAudit) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case apiaudit.FieldSentHTTPStatus:
values[i] = &sql.NullInt64{}
case apiaudit.FieldID, apiaudit.FieldPlane, apiaudit.FieldHashedGrantToken, apiaudit.FieldDomain, apiaudit.FieldHTTPPath, apiaudit.FieldHTTPMethod:
values[i] = &sql.NullString{}
case apiaudit.FieldCreatedAt, apiaudit.FieldUpdatedAt, apiaudit.FieldDeletedAt:
values[i] = &sql.NullTime{}
default:
return nil, fmt.Errorf("unexpected column %q for type APIAudit", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the APIAudit fields.
func (aa *APIAudit) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case apiaudit.FieldID:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field id", values[i])
} else if value.Valid {
aa.ID = value.String
}
case apiaudit.FieldCreatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field created_at", values[i])
} else if value.Valid {
aa.CreatedAt = value.Time
}
case apiaudit.FieldUpdatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
} else if value.Valid {
aa.UpdatedAt = value.Time
}
case apiaudit.FieldDeletedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field deleted_at", values[i])
} else if value.Valid {
aa.DeletedAt = new(time.Time)
*aa.DeletedAt = value.Time
}
case apiaudit.FieldPlane:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field plane", values[i])
} else if value.Valid {
aa.Plane = value.String
}
case apiaudit.FieldHashedGrantToken:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field hashed_grant_token", values[i])
} else if value.Valid {
aa.HashedGrantToken = value.String
}
case apiaudit.FieldDomain:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field domain", values[i])
} else if value.Valid {
aa.Domain = value.String
}
case apiaudit.FieldHTTPPath:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field http_path", values[i])
} else if value.Valid {
aa.HTTPPath = value.String
}
case apiaudit.FieldHTTPMethod:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field http_method", values[i])
} else if value.Valid {
aa.HTTPMethod = value.String
}
case apiaudit.FieldSentHTTPStatus:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field sent_http_status", values[i])
} else if value.Valid {
aa.SentHTTPStatus = int(value.Int64)
}
}
}
return nil
}
// Update returns a builder for updating this APIAudit.
// Note that you need to call APIAudit.Unwrap() before calling this method if this APIAudit
// was returned from a transaction, and the transaction was committed or rolled back.
func (aa *APIAudit) Update() *APIAuditUpdateOne {
return (&APIAuditClient{config: aa.config}).UpdateOne(aa)
}
// Unwrap unwraps the APIAudit entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (aa *APIAudit) Unwrap() *APIAudit {
tx, ok := aa.config.driver.(*txDriver)
if !ok {
panic("ent: APIAudit is not a transactional entity")
}
aa.config.driver = tx.drv
return aa
}
// String implements the fmt.Stringer.
func (aa *APIAudit) String() string {
var builder strings.Builder
builder.WriteString("APIAudit(")
builder.WriteString(fmt.Sprintf("id=%v", aa.ID))
builder.WriteString(", created_at=")
builder.WriteString(aa.CreatedAt.Format(time.ANSIC))
builder.WriteString(", updated_at=")
builder.WriteString(aa.UpdatedAt.Format(time.ANSIC))
if v := aa.DeletedAt; v != nil {
builder.WriteString(", deleted_at=")
builder.WriteString(v.Format(time.ANSIC))
}
builder.WriteString(", plane=")
builder.WriteString(aa.Plane)
builder.WriteString(", hashed_grant_token=<sensitive>")
builder.WriteString(", domain=")
builder.WriteString(aa.Domain)
builder.WriteString(", http_path=")
builder.WriteString(aa.HTTPPath)
builder.WriteString(", http_method=")
builder.WriteString(aa.HTTPMethod)
builder.WriteString(", sent_http_status=")
builder.WriteString(fmt.Sprintf("%v", aa.SentHTTPStatus))
builder.WriteByte(')')
return builder.String()
}
// APIAudits is a parsable slice of APIAudit.
type APIAudits []*APIAudit
func (aa APIAudits) config(cfg config) {
for _i := range aa {
aa[_i].config = cfg
}
} | pkg/ent/apiaudit.go | 0.649023 | 0.421433 | apiaudit.go | starcoder |
package main
const kartaQMLString = `
// Start of the QML string
import QtQuick 2.0
import QtQuick.Particles 2.0
import GoExtensions 1.0
Rectangle {
id: root
property alias seed: seedInput.text
property int clickX: 0
property int clickY: 0
width: 800
height: 600
color: "#030f14"
Rectangle {
id: background
width: parent.width
height: parent.height-50
gradient: Gradient {
GradientStop { position: 0.0; color: "#061f29" }
GradientStop { position: 1.0; color: "#030f14" }
}
MouseArea {
id: area
width: parent.width
height: parent.height
onClicked: {
root.clickX = mouse.x
root.clickY = mouse.y
}
}
ParticleSystem {
anchors.fill: parent
ImageParticle {
id: ip
source: "image://star/FFFFFF"
colorVariation: 0.2
alpha: 0.6
rotation: 15
rotationVariation: 45
rotationVelocity: 35
rotationVelocityVariation: 25
}
Emitter {
anchors.fill: parent
emitRate: 160
lifeSpan: 2000
lifeSpanVariation: 500
size: 1
endSize: 22
}
}
Image {
id: karta
cache: false
source: "image://karta/map.png"
property int clicks: 0
x: (parent.width - width)/2
y: (parent.height - height)/2
width: parent.width/1.6
fillMode: Image.PreserveAspectFit
clip: false
MouseArea {
anchors.fill: parent
onClicked: {
parent.clicks += 1
parent.source = "image://karta/map" + karta.clicks
}
}
}
}
Rectangle {
id: form
width: parent.width
height: 50
y: parent.height-50
color: "#061f29"
Row {
id: row
anchors.fill: parent
anchors.margins: 10
spacing: 20
Text {
id: seedText
text: ctrl.message
color: "#FFFFFF"
font.pointSize: 12
font.bold: true
MouseArea {
id: mouseArea
anchors.fill: parent
onReleased: ctrl.textReleased(seedInput)
}
}
TextInput {
id: seedInput
color: "#1e8bb8"
font.pointSize: 12
font.bold: true
width: 96;
height: 20
focus: true
text: "1"
}
}
}
}
// End of the QML string`
// vim: ft=qml.go | cmd/karta-gui/qml.go | 0.575111 | 0.467636 | qml.go | starcoder |
package raster
import (
"bytes"
"errors"
"image"
"image/jpeg"
"image/png"
"math"
"github.com/xeonx/geographic"
)
//n returns 2^level
func n(level int) int {
return 1 << uint(level)
}
//X2Lon transforms x into a longitude in degree at a given level.
func X2Lon(level int, x int) float64 {
return float64(x)/float64(n(level))*360.0 - 180.0
}
//Lon2X transforms a longitude in degree into x at a given level.
func Lon2X(level int, longitudeDeg float64) int {
return int(float64(n(level)) * (longitudeDeg + 180.) / 360.)
}
//Y2Lat transforms y into a latitude in degree at a given level.
func Y2Lat(level int, y int) float64 {
var yosm = y
latitudeRad := math.Atan(math.Sinh(math.Pi * (1. - 2.*float64(yosm)/float64(n(level)))))
return -(latitudeRad * 180.0 / math.Pi)
}
//Lat2Y transforms a latitude in degree into y at a given level.
func Lat2Y(level int, latitudeDeg float64) int {
latitudeRad := latitudeDeg / 180 * math.Pi
yosm := int(float64(n(level)) * (1. - math.Log(math.Tan(latitudeRad)+1/math.Cos(latitudeRad))/math.Pi) / 2.)
return n(level) - yosm - 1
}
//Encode encodes an image in the given format. Only jpg and png are supported.
func Encode(img image.Image, format string) ([]byte, error) {
var b bytes.Buffer
if format == "jpg" {
err := jpeg.Encode(&b, img, nil)
if err != nil {
return nil, err
}
} else if format == "png" {
err := png.Encode(&b, img)
if err != nil {
return nil, err
}
} else {
return nil, errors.New("Unsupported image format: '" + format + "'. Only 'jpg' or 'png' allowed.")
}
return b.Bytes(), nil
}
//Decode decodes an image into the given format. Only jpg and png are supported.
func Decode(rawImg []byte, format string) (image.Image, error) {
b := bytes.NewBuffer(rawImg)
var img image.Image
var err error
if format == "jpg" {
img, err = jpeg.Decode(b)
if err != nil {
return nil, err
}
} else if format == "png" {
img, err = png.Decode(b)
if err != nil {
return nil, err
}
} else {
return nil, errors.New("Unsupported image format: '" + format + "'. Only 'jpg' or 'png' allowed.")
}
return img, nil
}
//Filter returns true if the given tile is excluded from copy
type Filter func(level, x, y int) (bool, error)
//Any composes Filter in a single Filter excluding a tile if at least one Filter excludes the tile.
func Any(filters ...Filter) Filter {
return func(level, x, y int) (bool, error) {
for _, f := range filters {
b, err := f(level, x, y)
if err != nil {
return false, err
}
if b {
return true, nil
}
}
return false, nil
}
}
//All composes Filter in a single Filter excluding a tile if all Filters excludes the tile.
func All(filters ...Filter) Filter {
return func(level, x, y int) (bool, error) {
for _, f := range filters {
b, err := f(level, x, y)
if err != nil {
return false, err
}
if !b {
return false, nil
}
}
return true, nil
}
}
//transformFct is any transformation function usable inside the encoding or decoding of images.
type transformFct func(src []byte) ([]byte, error)
//GetTansformFct returns a function able to convert between the raw format of from and the one of to.
//A nil value means that no transformation is required.
func getTansformFct(from TileReader, to TileReadWriter) transformFct {
var transform transformFct
if from.TileFormat() != to.TileFormat() {
transform = func(src []byte) ([]byte, error) {
img, err := Decode(src, from.TileFormat())
if err != nil {
return nil, err
}
return Encode(img, to.TileFormat())
}
}
return transform
}
//Copier copies tiles from a TileReader to a TileReadWriter.
//An optional filter allow to discard Tiles before copy.
type Copier struct {
from TileReader
to TileReadWriter
transform transformFct
Filter Filter
}
//NewCopier creates a Copier between from and to.
func NewCopier(from TileReader, to TileReadWriter) (*Copier, error) {
return &Copier{
from: from,
to: to,
transform: getTansformFct(from, to),
}, nil
}
//TileBlock represents a rectangular set of tiles at a given level
type TileBlock struct {
Level int
Xmin int
Xmax int
Ymin int
Ymax int
}
//Count returns the number of tiles within the block.
func (b TileBlock) Count() int {
return (b.Xmax - b.Xmin + 1) * (b.Ymax - b.Ymin + 1)
}
//GetTileBlock computes the tile block enveloping the bounding box
func GetTileBlock(bbox geographic.BoundingBox, level int) (TileBlock, error) {
b := TileBlock{
Level: level,
Xmin: Lon2X(level, bbox.LongitudeMinDeg),
Xmax: Lon2X(level, bbox.LongitudeMaxDeg),
Ymin: Lat2Y(level, bbox.LatitudeMinDeg),
Ymax: Lat2Y(level, bbox.LatitudeMaxDeg),
}
if b.Ymin > b.Ymax {
y := b.Ymin
b.Ymin = b.Ymax
b.Ymax = y
}
if X2Lon(level, b.Xmax) == bbox.LongitudeMaxDeg {
b.Xmax--
}
return b, nil
}
//CopyBlock copies a block of tiles.
//If progressFct is not nil, it is called during the iteration after each tile.
//It returns the count of tiles copied in the destination and the first error encountered, if any.
func (c *Copier) CopyBlock(block TileBlock, progressFct func(level, x, y int, processed bool)) (int, error) {
processedCount := 0
for x := block.Xmin; x <= block.Xmax; x++ {
for y := block.Ymin; y <= block.Ymax; y++ {
processed, err := c.Copy(block.Level, x, y)
if err != nil {
return processedCount, err
}
if progressFct != nil {
progressFct(block.Level, x, y, processed)
}
if processed {
processedCount++
}
}
}
return processedCount, nil
}
//Copy copies a single of tile.
//It returns the true if the tile was copied in the destination and the first error encountered, if any.
func (c *Copier) Copy(level, x, y int) (bool, error) {
if c.Filter != nil {
filtered, err := c.Filter(level, x, y)
if err != nil {
return false, err
}
if filtered {
return false, nil
}
}
rawImg, err := c.from.GetRaw(level, x, y)
if err != nil {
return false, err
}
if c.transform != nil {
rawImg, err = c.transform(rawImg)
if err != nil {
return false, err
}
}
err = c.to.SetRaw(level, x, y, rawImg)
if err != nil {
return false, err
}
return true, nil
}
//Copy copies a single tile from a reader to a writer.
//It returns the true if the tile was copied in the destination and the first error encountered, if any.
func Copy(from TileReader, to TileReadWriter, level, x, y int) (bool, error) {
c, err := NewCopier(from, to)
if err != nil {
return false, err
}
return c.Copy(level, x, y)
} | raster.go | 0.853196 | 0.504333 | raster.go | starcoder |
package board
import (
"fmt"
"math"
)
type Tile struct {
Visit int
X int
Y int
}
type Board struct {
Tiles [][]Tile
PositionX int
PositionY int
}
func NewBoard(size, startX, startY int) Board {
b := Board{PositionX: startX, PositionY: startY}
for i := 0; i < size; i++ {
b.Tiles = append(b.Tiles, []Tile{})
for j := 0; j < size; j++ {
b.Tiles[i] = append(b.Tiles[i], Tile{Visit: 0, X: i, Y: j})
}
}
b.Tiles[startX][startY].Visit = 1
return b
}
func (b *Board) Solve(startingX, startingY, boardSize int) {
visitCount := 1
for {
least := math.MaxInt8
leastTile := Tile{}
for _, candidate := range b.currentCandidates() {
nextMoves := b.candidates(candidate.X, candidate.Y)
if len(nextMoves) < least {
least = len(nextMoves)
leastTile = candidate
}
}
visitCount += 1
b.Tiles[leastTile.X][leastTile.Y].Visit = visitCount
b.moveToTile(leastTile)
if b.PositionX == startingX && b.PositionY == startingY {
fmt.Println("Failed to find a path; try again?")
break
}
if visitCount == boardSize*boardSize {
break
}
}
}
func (b *Board) currentCandidates() []Tile {
return b.candidates(b.PositionX, b.PositionY)
}
func (b *Board) HorizontalCandidates(x, y int, potentialMoves []Tile) []Tile {
if x+3 <= len(b.Tiles)-1 {
potentialMoves = append(potentialMoves, b.Tiles[x+3][y])
}
if x-3 >= 0 {
potentialMoves = append(potentialMoves, b.Tiles[x-3][y])
}
return potentialMoves
}
func (b *Board) VerticalCandidates(x, y int, potentialMoves []Tile) []Tile {
if y+3 <= len(b.Tiles)-1 {
potentialMoves = append(potentialMoves, b.Tiles[x][y+3])
}
if y-3 >= 0 {
potentialMoves = append(potentialMoves, b.Tiles[x][y-3])
}
return potentialMoves
}
func (b *Board) DiagonalCandidates(x, y int, potentialMoves []Tile) []Tile {
if x+2 <= len(b.Tiles)-1 && y+2 <= len(b.Tiles)-1 {
potentialMoves = append(potentialMoves, b.Tiles[x+2][y+2])
}
if x-2 >= 0 && y-2 >= 0 {
potentialMoves = append(potentialMoves, b.Tiles[x-2][y-2])
}
if x+2 <= len(b.Tiles)-1 && y-2 >= 0 {
potentialMoves = append(potentialMoves, b.Tiles[x+2][y-2])
}
if x-2 >= 0 && y+2 <= len(b.Tiles)-1 {
potentialMoves = append(potentialMoves, b.Tiles[x-2][y+2])
}
return potentialMoves
}
func (b *Board) candidates(x, y int) []Tile {
potentialMoves := []Tile{}
if x >= len(b.Tiles) || y >= len(b.Tiles) {
return potentialMoves
}
potentialMoves = b.HorizontalCandidates(x, y, potentialMoves)
potentialMoves = b.VerticalCandidates(x, y, potentialMoves)
potentialMoves = b.DiagonalCandidates(x, y, potentialMoves)
moves := []Tile{}
//Remove all potential moves that have been visited before
for _, move := range potentialMoves {
if move.Visit == 0 {
moves = append(moves, move)
}
}
return moves
}
func (b *Board) Print() {
for i, row := range b.Tiles {
for j := range row {
fmt.Printf(" %3d", b.Tiles[i][j].Visit)
}
fmt.Println()
}
}
func (b *Board) GetTiles() [][]Tile {
return b.Tiles
}
func (b *Board) moveToTile(tile Tile) {
tile.Visit = 1
b.PositionX = tile.X
b.PositionY = tile.Y
} | board/board.go | 0.578924 | 0.477981 | board.go | starcoder |
package strmatcher
// Type is the type of the matcher.
type Type byte
const (
// Full is the type of matcher that the input string must exactly equal to the pattern.
Full Type = 0
// Domain is the type of matcher that the input string must be a sub-domain or itself of the pattern.
Domain Type = 1
// Substr is the type of matcher that the input string must contain the pattern as a sub-string.
Substr Type = 2
// Regex is the type of matcher that the input string must matches the regular-expression pattern.
Regex Type = 3
)
// Matcher is the interface to determine a string matches a pattern.
// * This is a basic matcher to represent a certain kind of match semantic(full, substr, domain or regex).
type Matcher interface {
// Type returns the matcher's type.
Type() Type
// Pattern returns the matcher's raw string representation.
Pattern() string
// String returns a string representation of the matcher containing its type and pattern.
String() string
// Match returns true if the given string matches a predefined pattern.
// * This method is seldom used for performance reason
// and is generally taken over by their corresponding MatcherGroup.
Match(input string) bool
}
// MatcherGroup is an advanced type of matcher to accept a bunch of basic Matchers (of certain type, not all matcher types).
// For example:
// * FullMatcherGroup accepts FullMatcher and uses a hash table to facilitate lookup.
// * DomainMatcherGroup accepts DomainMatcher and uses a trie to optimize both memory consumption and lookup speed.
type MatcherGroup interface {
// Match returns all matched matchers with their corresponding values.
Match(input string) []uint32
// MatchAny returns true as soon as one matching matcher is found.
MatchAny(input string) bool
}
// IndexMatcher is a general type of matcher thats accepts all kinds of basic matchers.
// It should:
// * Accept all Matcher types with no exception.
// * Optimize string matching with a combination of MatcherGroups.
// * Obey certain priority order specification when returning matched Matchers.
type IndexMatcher interface {
// Size returns number of matchers added to IndexMatcher.
Size() uint32
// Add adds a new Matcher to IndexMatcher, and returns its index. The index will never be 0.
Add(matcher Matcher) uint32
// Build builds the IndexMatcher to be ready for matching.
Build() error
// Match returns the indices of all matchers that matches the input.
// * Empty array is returned if no such matcher exists.
// * The order of returned matchers should follow priority specification.
// Priority specification:
// 1. Priority between matcher types: full > domain > substr > regex.
// 2. Priority of same-priority matchers matching at same position: the early added takes precedence.
// 3. Priority of domain matchers matching at different levels: the further matched domain takes precedence.
// 4. Priority of substr matchers matching at different positions: the further matched substr takes precedence.
Match(input string) []uint32
// MatchAny returns true as soon as one matching matcher is found.
MatchAny(input string) bool
} | common/strmatcher/strmatcher.go | 0.738575 | 0.579014 | strmatcher.go | starcoder |
package uitheme
import (
"image/color"
"fyne.io/fyne"
"fyne.io/fyne/theme"
)
type DarkBlue struct{}
func NewDarkBlue() *DarkBlue {
return &DarkBlue{}
}
func (DarkBlue) BackgroundColor() color.Color {
return color.RGBA{R: 0x1e, G: 0x1e, B: 0x1e, A: 0xff}
}
//func (DarkBlue) ButtonColor() color.Color { return color.RGBA{R: 0x6f, G: 0x42, B: 0xc1, A: 0xff} } // 紫色
//func (DarkBlue) ButtonColor() color.Color { return color.RGBA{R: 0x2e, G: 0x22, B: 0x8b, A: 0xff} } // 2E228BFF
func (DarkBlue) ButtonColor() color.Color { return color.RGBA{R: 0x14, G: 0x14, B: 0x14, A: 0xff} } // 141414FF
func (DarkBlue) DisabledButtonColor() color.Color {
return color.RGBA{R: 0xf, G: 0xf, B: 0x11, A: 0xff}
}
func (DarkBlue) TextColor() color.Color { return color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff} }
func (DarkBlue) DisabledTextColor() color.Color {
return color.RGBA{R: 0xc8, G: 0xc8, B: 0xc8, A: 0xff}
}
func (DarkBlue) IconColor() color.Color { return color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff} }
func (DarkBlue) DisabledIconColor() color.Color {
return color.RGBA{R: 0xc8, G: 0xc8, B: 0xc8, A: 0xff}
}
func (DarkBlue) HyperlinkColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} }
func (DarkBlue) PlaceHolderColor() color.Color {
return color.RGBA{R: 0x6c, G: 0x75, B: 0x7d, A: 0xff}
}
func (DarkBlue) PrimaryColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} }
func (DarkBlue) HoverColor() color.Color { return color.RGBA{R: 0x66, G: 0x10, B: 0xf2, A: 0xff} }
func (DarkBlue) FocusColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} }
func (DarkBlue) ScrollBarColor() color.Color { return color.RGBA{R: 0x23, G: 0x23, B: 0x23, A: 0x8} }
func (DarkBlue) ShadowColor() color.Color { return color.RGBA{R: 0x0, G: 0x0, B: 0x0, A: 0x40} }
func (DarkBlue) TextSize() int { return 14 }
func (DarkBlue) TextFont() fyne.Resource { return theme.LightTheme().TextFont() }
func (DarkBlue) TextBoldFont() fyne.Resource { return theme.LightTheme().TextBoldFont() }
func (DarkBlue) TextItalicFont() fyne.Resource { return theme.LightTheme().TextItalicFont() }
func (DarkBlue) TextBoldItalicFont() fyne.Resource { return theme.LightTheme().TextBoldItalicFont() }
func (DarkBlue) TextMonospaceFont() fyne.Resource { return theme.LightTheme().TextMonospaceFont() }
func (DarkBlue) Padding() int { return 4 }
func (DarkBlue) IconInlineSize() int { return 12 }
func (DarkBlue) ScrollBarSize() int { return 12 }
func (DarkBlue) ScrollBarSmallSize() int { return 3 } | uitheme/darkBlue.go | 0.682891 | 0.40439 | darkBlue.go | starcoder |
package tile
import (
"fmt"
"math"
"time"
)
//ToRegularProjectedGrid converts a regular geographic grid to a regular Mercator grid.
func ToRegularProjectedGrid(grid Grid) Grid {
h := grid.Header
projector := MercatorProjector{}
defer TimeTrack(time.Now(), fmt.Sprint("Project ", h.Nx, "*", h.Ny, "=", (h.Nx*h.Ny)/1000, "k"))
//coordinates on y axis
projYs := make([]float64, h.Ny, h.Ny)
for y := 0; y < h.Ny; y++ {
lat := h.La1 - float64(y)*h.Dy
lon := h.Lo1
projYs[y] = projector.ToProjection(lon, lat).Y
}
//coordinates in x axis
projXs := make([]float64, h.Nx, h.Nx)
for x := 0; x < h.Nx; x++ {
lat := h.La1
lon := h.Lo1 + float64(x)*h.Dx
projXs[x] = projector.ToProjection(lon, lat).X
}
//irregular projection grid
pointValues := make([]PointValue, len(grid.Data))
for y := 0; y < h.Ny; y++ {
for x := 0; x < h.Nx; x++ {
idx := y*h.Nx + x
value := grid.Data[idx]
pointValues[idx] = PointValue{Point{projXs[x], projYs[y]}, value}
}
}
b := bounds(grid)
projBounds := Bounds{Point{MercatorPole * -1, MercatorPole * -1}, Point{MercatorPole, MercatorPole}}
dx := (MercatorPole * 2) / float64(h.Nx-1)
dy := (MercatorPole * 2) / float64(h.Ny-1)
result := make([]float32, len(grid.Data))
//iterate grid points in regular projected grid, calculate lat/lon to find surrounding grid points in the original grid
idx := 0
for y := projBounds.Max.Y; y >= projBounds.Min.Y; y -= dy {
for x := projBounds.Min.X; x <= projBounds.Max.X; x += dx {
latlon := projector.FromProjection(x, y)
idxX := int(((latlon.X - b.Min.X) / (b.Width())) * float64(h.Nx-1))
idxY := int(((b.Max.Y - latlon.Y) / (b.height())) * float64(h.Ny-1))
ul := pointValues[idxY*h.Nx+idxX]
ll := pointValues[(idxY+1)*h.Nx+idxX]
ur := pointValues[(idxY)*h.Nx+(idxX+1)]
lr := pointValues[(idxY+1)*h.Nx+(idxX+1)]
invariantOk := ul.X < ur.X && ll.Y < ul.Y && ll.X <= x && lr.X >= x && ll.Y <= y && ul.Y >= y
if !invariantOk {
fmt.Println("Invariant failed")
}
p := Point{x, y}
v := BilinearInterpolation(&ll, &ul, &lr, &ur, &p)
result[idx] = float32(v)
idx++
}
}
return Grid{Header: GridHeader{
ScanMode: grid.Header.ScanMode,
Nx: grid.Header.Nx,
Ny: grid.Header.Ny,
Dx: dx,
Dy: dy,
Lo1: projBounds.Min.X,
La1: projBounds.Max.Y,
Lo2: projBounds.Max.X,
La2: projBounds.Min.Y}, Data: result}
}
func bounds(grid Grid) Bounds {
p1 := Point{grid.Header.Lo1, grid.Header.La1}
p2 := Point{grid.Header.Lo2, grid.Header.La2}
xMin := math.Min(p1.X, p2.X)
xMax := math.Max(p1.X, p2.X)
yMin := math.Min(p1.Y, p2.Y)
yMax := math.Max(p1.Y, p2.Y)
return Bounds{Point{xMin, yMin}, Point{xMax, yMax}}
}
//Create function for scaling float64 -> float64
func Scale(from1 float64, from2 float64, to1 float64, to2 float64) func(x float64) float64 {
length1 := from2 - from1
length2 := to2 - to1
return func(x float64) float64 {
ratio := (x - from1) / length1
return to1 + ratio*length2
}
} | tile/project.go | 0.794305 | 0.586464 | project.go | starcoder |
package sitrep
import (
"github.com/gocql/gocql"
"time"
"github.com/relops/cqlc/cqlc"
"log"
)
const (
CQLC_VERSION = "0.10.5"
)
type CreateUsersInExerciseEmailColumn struct {
}
func (b *CreateUsersInExerciseEmailColumn) ColumnName() string {
return "email"
}
func (b *CreateUsersInExerciseEmailColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *CreateUsersInExerciseEmailColumn) Eq(value string) cqlc.Condition {
column := &CreateUsersInExerciseEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *CreateUsersInExerciseEmailColumn) PartitionBy() cqlc.Column {
return b
}
func (b *CreateUsersInExerciseEmailColumn) In(value ...string) cqlc.Condition {
column := &CreateUsersInExerciseEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type CreateUsersInExerciseExercisesColumn struct {
}
func (b *CreateUsersInExerciseExercisesColumn) ColumnName() string {
return "exercises"
}
func (b *CreateUsersInExerciseExercisesColumn) To(value *map[string]string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type CreateUsersInExercise struct {
Email string
Exercises map[string]string
}
func (s *CreateUsersInExercise) EmailValue() string {
return s.Email
}
func (s *CreateUsersInExercise) ExercisesValue() map[string]string {
return s.Exercises
}
type CreateUsersInExerciseDef struct {
EMAIL cqlc.LastPartitionedStringColumn
EXERCISES cqlc.StringStringMapColumn
}
func BindCreateUsersInExercise(iter *gocql.Iter) ([]CreateUsersInExercise, error) {
array := make([]CreateUsersInExercise, 0)
err := MapCreateUsersInExercise(iter, func(t CreateUsersInExercise) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapCreateUsersInExercise(iter *gocql.Iter, callback func(t CreateUsersInExercise) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := CreateUsersInExercise{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "email":
row[i] = &t.Email
case "exercises":
row[i] = &t.Exercises
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *CreateUsersInExerciseDef) SupportsUpsert() bool {
return true
}
func (s *CreateUsersInExerciseDef) TableName() string {
return "create_users_in_exercise"
}
func (s *CreateUsersInExerciseDef) Keyspace() string {
return "sitrep"
}
func (s *CreateUsersInExerciseDef) Bind(v CreateUsersInExercise) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &CreateUsersInExerciseEmailColumn{}, Value: v.Email},
cqlc.ColumnBinding{Column: &CreateUsersInExerciseExercisesColumn{}, Value: v.Exercises},
}
return cqlc.TableBinding{Table: &CreateUsersInExerciseDef{}, Columns: cols}
}
func (s *CreateUsersInExerciseDef) To(v *CreateUsersInExercise) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &CreateUsersInExerciseEmailColumn{}, Value: &v.Email},
cqlc.ColumnBinding{Column: &CreateUsersInExerciseExercisesColumn{}, Value: &v.Exercises},
}
return cqlc.TableBinding{Table: &CreateUsersInExerciseDef{}, Columns: cols}
}
func (s *CreateUsersInExerciseDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&CreateUsersInExerciseEmailColumn{},
&CreateUsersInExerciseExercisesColumn{},
}
}
func CreateUsersInExerciseTableDef() *CreateUsersInExerciseDef {
return &CreateUsersInExerciseDef{
EMAIL: &CreateUsersInExerciseEmailColumn{},
EXERCISES: &CreateUsersInExerciseExercisesColumn{},
}
}
func (s *CreateUsersInExerciseDef) EmailColumn() cqlc.LastPartitionedStringColumn {
return &CreateUsersInExerciseEmailColumn{}
}
func (s *CreateUsersInExerciseDef) ExercisesColumn() cqlc.StringStringMapColumn {
return &CreateUsersInExerciseExercisesColumn{}
}
type ExerciseByIdentifierActiveUntilColumn struct {
}
func (b *ExerciseByIdentifierActiveUntilColumn) ColumnName() string {
return "active_until"
}
func (b *ExerciseByIdentifierActiveUntilColumn) To(value *time.Time) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierExerciseDescriptionColumn struct {
}
func (b *ExerciseByIdentifierExerciseDescriptionColumn) ColumnName() string {
return "exercise_description"
}
func (b *ExerciseByIdentifierExerciseDescriptionColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierExerciseNameColumn struct {
}
func (b *ExerciseByIdentifierExerciseNameColumn) ColumnName() string {
return "exercise_name"
}
func (b *ExerciseByIdentifierExerciseNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierHasActivationColumn struct {
}
func (b *ExerciseByIdentifierHasActivationColumn) ColumnName() string {
return "has_activation"
}
func (b *ExerciseByIdentifierHasActivationColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierIdColumn struct {
}
func (b *ExerciseByIdentifierIdColumn) ColumnName() string {
return "id"
}
func (b *ExerciseByIdentifierIdColumn) To(value *gocql.UUID) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *ExerciseByIdentifierIdColumn) Eq(value gocql.UUID) cqlc.Condition {
column := &ExerciseByIdentifierIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *ExerciseByIdentifierIdColumn) PartitionBy() cqlc.Column {
return b
}
func (b *ExerciseByIdentifierIdColumn) In(value ...gocql.UUID) cqlc.Condition {
column := &ExerciseByIdentifierIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type ExerciseByIdentifierIsActiveColumn struct {
}
func (b *ExerciseByIdentifierIsActiveColumn) ColumnName() string {
return "is_active"
}
func (b *ExerciseByIdentifierIsActiveColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifier struct {
ActiveUntil time.Time
ExerciseDescription string
ExerciseName string
HasActivation bool
Id gocql.UUID
IsActive bool
}
func (s *ExerciseByIdentifier) ActiveUntilValue() time.Time {
return s.ActiveUntil
}
func (s *ExerciseByIdentifier) ExerciseDescriptionValue() string {
return s.ExerciseDescription
}
func (s *ExerciseByIdentifier) ExerciseNameValue() string {
return s.ExerciseName
}
func (s *ExerciseByIdentifier) HasActivationValue() bool {
return s.HasActivation
}
func (s *ExerciseByIdentifier) IdValue() gocql.UUID {
return s.Id
}
func (s *ExerciseByIdentifier) IsActiveValue() bool {
return s.IsActive
}
type ExerciseByIdentifierDef struct {
ACTIVE_UNTIL cqlc.TimestampColumn
EXERCISE_DESCRIPTION cqlc.StringColumn
EXERCISE_NAME cqlc.StringColumn
HAS_ACTIVATION cqlc.BooleanColumn
ID cqlc.LastPartitionedUUIDColumn
IS_ACTIVE cqlc.BooleanColumn
}
func BindExerciseByIdentifier(iter *gocql.Iter) ([]ExerciseByIdentifier, error) {
array := make([]ExerciseByIdentifier, 0)
err := MapExerciseByIdentifier(iter, func(t ExerciseByIdentifier) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapExerciseByIdentifier(iter *gocql.Iter, callback func(t ExerciseByIdentifier) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := ExerciseByIdentifier{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "active_until":
row[i] = &t.ActiveUntil
case "exercise_description":
row[i] = &t.ExerciseDescription
case "exercise_name":
row[i] = &t.ExerciseName
case "has_activation":
row[i] = &t.HasActivation
case "id":
row[i] = &t.Id
case "is_active":
row[i] = &t.IsActive
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *ExerciseByIdentifierDef) SupportsUpsert() bool {
return true
}
func (s *ExerciseByIdentifierDef) TableName() string {
return "exercise_by_identifier"
}
func (s *ExerciseByIdentifierDef) Keyspace() string {
return "sitrep"
}
func (s *ExerciseByIdentifierDef) Bind(v ExerciseByIdentifier) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExerciseByIdentifierActiveUntilColumn{}, Value: v.ActiveUntil},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierExerciseDescriptionColumn{}, Value: v.ExerciseDescription},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierExerciseNameColumn{}, Value: v.ExerciseName},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierHasActivationColumn{}, Value: v.HasActivation},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierIdColumn{}, Value: v.Id},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierIsActiveColumn{}, Value: v.IsActive},
}
return cqlc.TableBinding{Table: &ExerciseByIdentifierDef{}, Columns: cols}
}
func (s *ExerciseByIdentifierDef) To(v *ExerciseByIdentifier) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExerciseByIdentifierActiveUntilColumn{}, Value: &v.ActiveUntil},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierExerciseDescriptionColumn{}, Value: &v.ExerciseDescription},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierExerciseNameColumn{}, Value: &v.ExerciseName},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierHasActivationColumn{}, Value: &v.HasActivation},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierIdColumn{}, Value: &v.Id},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierIsActiveColumn{}, Value: &v.IsActive},
}
return cqlc.TableBinding{Table: &ExerciseByIdentifierDef{}, Columns: cols}
}
func (s *ExerciseByIdentifierDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&ExerciseByIdentifierActiveUntilColumn{},
&ExerciseByIdentifierExerciseDescriptionColumn{},
&ExerciseByIdentifierExerciseNameColumn{},
&ExerciseByIdentifierHasActivationColumn{},
&ExerciseByIdentifierIdColumn{},
&ExerciseByIdentifierIsActiveColumn{},
}
}
func ExerciseByIdentifierTableDef() *ExerciseByIdentifierDef {
return &ExerciseByIdentifierDef{
ACTIVE_UNTIL: &ExerciseByIdentifierActiveUntilColumn{},
EXERCISE_DESCRIPTION: &ExerciseByIdentifierExerciseDescriptionColumn{},
EXERCISE_NAME: &ExerciseByIdentifierExerciseNameColumn{},
HAS_ACTIVATION: &ExerciseByIdentifierHasActivationColumn{},
ID: &ExerciseByIdentifierIdColumn{},
IS_ACTIVE: &ExerciseByIdentifierIsActiveColumn{},
}
}
func (s *ExerciseByIdentifierDef) ActiveUntilColumn() cqlc.TimestampColumn {
return &ExerciseByIdentifierActiveUntilColumn{}
}
func (s *ExerciseByIdentifierDef) ExerciseDescriptionColumn() cqlc.StringColumn {
return &ExerciseByIdentifierExerciseDescriptionColumn{}
}
func (s *ExerciseByIdentifierDef) ExerciseNameColumn() cqlc.StringColumn {
return &ExerciseByIdentifierExerciseNameColumn{}
}
func (s *ExerciseByIdentifierDef) HasActivationColumn() cqlc.BooleanColumn {
return &ExerciseByIdentifierHasActivationColumn{}
}
func (s *ExerciseByIdentifierDef) IdColumn() cqlc.LastPartitionedUUIDColumn {
return &ExerciseByIdentifierIdColumn{}
}
func (s *ExerciseByIdentifierDef) IsActiveColumn() cqlc.BooleanColumn {
return &ExerciseByIdentifierIsActiveColumn{}
}
type ExerciseByIdentifierAndEmailEmailColumn struct {
}
func (b *ExerciseByIdentifierAndEmailEmailColumn) ColumnName() string {
return "email"
}
func (b *ExerciseByIdentifierAndEmailEmailColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmailExerciseNameColumn struct {
}
func (b *ExerciseByIdentifierAndEmailExerciseNameColumn) ColumnName() string {
return "exercise_name"
}
func (b *ExerciseByIdentifierAndEmailExerciseNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmailIdColumn struct {
}
func (b *ExerciseByIdentifierAndEmailIdColumn) ColumnName() string {
return "id"
}
func (b *ExerciseByIdentifierAndEmailIdColumn) To(value *gocql.UUID) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *ExerciseByIdentifierAndEmailIdColumn) Eq(value gocql.UUID) cqlc.Condition {
column := &ExerciseByIdentifierAndEmailIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *ExerciseByIdentifierAndEmailIdColumn) PartitionBy() cqlc.Column {
return b
}
func (b *ExerciseByIdentifierAndEmailIdColumn) In(value ...gocql.UUID) cqlc.Condition {
column := &ExerciseByIdentifierAndEmailIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type ExerciseByIdentifierAndEmailIsActiveColumn struct {
}
func (b *ExerciseByIdentifierAndEmailIsActiveColumn) ColumnName() string {
return "is_active"
}
func (b *ExerciseByIdentifierAndEmailIsActiveColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmailIsAuthorizedColumn struct {
}
func (b *ExerciseByIdentifierAndEmailIsAuthorizedColumn) ColumnName() string {
return "is_authorized"
}
func (b *ExerciseByIdentifierAndEmailIsAuthorizedColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmailIsOperatorColumn struct {
}
func (b *ExerciseByIdentifierAndEmailIsOperatorColumn) ColumnName() string {
return "is_operator"
}
func (b *ExerciseByIdentifierAndEmailIsOperatorColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmailUserNameColumn struct {
}
func (b *ExerciseByIdentifierAndEmailUserNameColumn) ColumnName() string {
return "user_name"
}
func (b *ExerciseByIdentifierAndEmailUserNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExerciseByIdentifierAndEmail struct {
Email string
ExerciseName string
Id gocql.UUID
IsActive bool
IsAuthorized bool
IsOperator bool
UserName string
}
func (s *ExerciseByIdentifierAndEmail) EmailValue() string {
return s.Email
}
func (s *ExerciseByIdentifierAndEmail) ExerciseNameValue() string {
return s.ExerciseName
}
func (s *ExerciseByIdentifierAndEmail) IdValue() gocql.UUID {
return s.Id
}
func (s *ExerciseByIdentifierAndEmail) IsActiveValue() bool {
return s.IsActive
}
func (s *ExerciseByIdentifierAndEmail) IsAuthorizedValue() bool {
return s.IsAuthorized
}
func (s *ExerciseByIdentifierAndEmail) IsOperatorValue() bool {
return s.IsOperator
}
func (s *ExerciseByIdentifierAndEmail) UserNameValue() string {
return s.UserName
}
type ExerciseByIdentifierAndEmailDef struct {
EMAIL cqlc.StringColumn
EXERCISE_NAME cqlc.StringColumn
ID cqlc.LastPartitionedUUIDColumn
IS_ACTIVE cqlc.BooleanColumn
IS_AUTHORIZED cqlc.BooleanColumn
IS_OPERATOR cqlc.BooleanColumn
USER_NAME cqlc.StringColumn
}
func BindExerciseByIdentifierAndEmail(iter *gocql.Iter) ([]ExerciseByIdentifierAndEmail, error) {
array := make([]ExerciseByIdentifierAndEmail, 0)
err := MapExerciseByIdentifierAndEmail(iter, func(t ExerciseByIdentifierAndEmail) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapExerciseByIdentifierAndEmail(iter *gocql.Iter, callback func(t ExerciseByIdentifierAndEmail) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := ExerciseByIdentifierAndEmail{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "email":
row[i] = &t.Email
case "exercise_name":
row[i] = &t.ExerciseName
case "id":
row[i] = &t.Id
case "is_active":
row[i] = &t.IsActive
case "is_authorized":
row[i] = &t.IsAuthorized
case "is_operator":
row[i] = &t.IsOperator
case "user_name":
row[i] = &t.UserName
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *ExerciseByIdentifierAndEmailDef) SupportsUpsert() bool {
return true
}
func (s *ExerciseByIdentifierAndEmailDef) TableName() string {
return "exercise_by_identifier_and_email"
}
func (s *ExerciseByIdentifierAndEmailDef) Keyspace() string {
return "sitrep"
}
func (s *ExerciseByIdentifierAndEmailDef) Bind(v ExerciseByIdentifierAndEmail) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailEmailColumn{}, Value: v.Email},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailExerciseNameColumn{}, Value: v.ExerciseName},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIdColumn{}, Value: v.Id},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsActiveColumn{}, Value: v.IsActive},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsAuthorizedColumn{}, Value: v.IsAuthorized},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsOperatorColumn{}, Value: v.IsOperator},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailUserNameColumn{}, Value: v.UserName},
}
return cqlc.TableBinding{Table: &ExerciseByIdentifierAndEmailDef{}, Columns: cols}
}
func (s *ExerciseByIdentifierAndEmailDef) To(v *ExerciseByIdentifierAndEmail) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailEmailColumn{}, Value: &v.Email},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailExerciseNameColumn{}, Value: &v.ExerciseName},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIdColumn{}, Value: &v.Id},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsActiveColumn{}, Value: &v.IsActive},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsAuthorizedColumn{}, Value: &v.IsAuthorized},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailIsOperatorColumn{}, Value: &v.IsOperator},
cqlc.ColumnBinding{Column: &ExerciseByIdentifierAndEmailUserNameColumn{}, Value: &v.UserName},
}
return cqlc.TableBinding{Table: &ExerciseByIdentifierAndEmailDef{}, Columns: cols}
}
func (s *ExerciseByIdentifierAndEmailDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&ExerciseByIdentifierAndEmailEmailColumn{},
&ExerciseByIdentifierAndEmailExerciseNameColumn{},
&ExerciseByIdentifierAndEmailIdColumn{},
&ExerciseByIdentifierAndEmailIsActiveColumn{},
&ExerciseByIdentifierAndEmailIsAuthorizedColumn{},
&ExerciseByIdentifierAndEmailIsOperatorColumn{},
&ExerciseByIdentifierAndEmailUserNameColumn{},
}
}
func ExerciseByIdentifierAndEmailTableDef() *ExerciseByIdentifierAndEmailDef {
return &ExerciseByIdentifierAndEmailDef{
EMAIL: &ExerciseByIdentifierAndEmailEmailColumn{},
EXERCISE_NAME: &ExerciseByIdentifierAndEmailExerciseNameColumn{},
ID: &ExerciseByIdentifierAndEmailIdColumn{},
IS_ACTIVE: &ExerciseByIdentifierAndEmailIsActiveColumn{},
IS_AUTHORIZED: &ExerciseByIdentifierAndEmailIsAuthorizedColumn{},
IS_OPERATOR: &ExerciseByIdentifierAndEmailIsOperatorColumn{},
USER_NAME: &ExerciseByIdentifierAndEmailUserNameColumn{},
}
}
func (s *ExerciseByIdentifierAndEmailDef) EmailColumn() cqlc.StringColumn {
return &ExerciseByIdentifierAndEmailEmailColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) ExerciseNameColumn() cqlc.StringColumn {
return &ExerciseByIdentifierAndEmailExerciseNameColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) IdColumn() cqlc.LastPartitionedUUIDColumn {
return &ExerciseByIdentifierAndEmailIdColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) IsActiveColumn() cqlc.BooleanColumn {
return &ExerciseByIdentifierAndEmailIsActiveColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) IsAuthorizedColumn() cqlc.BooleanColumn {
return &ExerciseByIdentifierAndEmailIsAuthorizedColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) IsOperatorColumn() cqlc.BooleanColumn {
return &ExerciseByIdentifierAndEmailIsOperatorColumn{}
}
func (s *ExerciseByIdentifierAndEmailDef) UserNameColumn() cqlc.StringColumn {
return &ExerciseByIdentifierAndEmailUserNameColumn{}
}
type ExercisePermissionsLevelExerciseIdentifierColumn struct {
desc bool
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) ColumnName() string {
return "exercise_identifier"
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) To(value *gocql.UUID) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) ClusterWith() string {
return b.ColumnName()
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Desc() cqlc.ClusteredColumn {
return &ExercisePermissionsLevelExerciseIdentifierColumn{desc: true}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) IsDescending() bool {
return b.desc
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Eq(value gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) In(value ...gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Gt(value gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.GtPredicate}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Ge(value gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.GePredicate}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Lt(value gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.LtPredicate}
}
func (b *ExercisePermissionsLevelExerciseIdentifierColumn) Le(value gocql.UUID) cqlc.Condition {
column := &ExercisePermissionsLevelExerciseIdentifierColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.LePredicate}
}
type ExercisePermissionsLevelIsAdminColumn struct {
}
func (b *ExercisePermissionsLevelIsAdminColumn) ColumnName() string {
return "is_admin"
}
func (b *ExercisePermissionsLevelIsAdminColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelIsAuthorizedColumn struct {
}
func (b *ExercisePermissionsLevelIsAuthorizedColumn) ColumnName() string {
return "is_authorized"
}
func (b *ExercisePermissionsLevelIsAuthorizedColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelIsInvisibleColumn struct {
}
func (b *ExercisePermissionsLevelIsInvisibleColumn) ColumnName() string {
return "is_invisible"
}
func (b *ExercisePermissionsLevelIsInvisibleColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelIsOcColumn struct {
}
func (b *ExercisePermissionsLevelIsOcColumn) ColumnName() string {
return "is_oc"
}
func (b *ExercisePermissionsLevelIsOcColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelIsTraineeColumn struct {
}
func (b *ExercisePermissionsLevelIsTraineeColumn) ColumnName() string {
return "is_trainee"
}
func (b *ExercisePermissionsLevelIsTraineeColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelRoleDescriptionColumn struct {
}
func (b *ExercisePermissionsLevelRoleDescriptionColumn) ColumnName() string {
return "role_description"
}
func (b *ExercisePermissionsLevelRoleDescriptionColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type ExercisePermissionsLevelUserEmailColumn struct {
}
func (b *ExercisePermissionsLevelUserEmailColumn) ColumnName() string {
return "user_email"
}
func (b *ExercisePermissionsLevelUserEmailColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *ExercisePermissionsLevelUserEmailColumn) Eq(value string) cqlc.Condition {
column := &ExercisePermissionsLevelUserEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *ExercisePermissionsLevelUserEmailColumn) PartitionBy() cqlc.Column {
return b
}
func (b *ExercisePermissionsLevelUserEmailColumn) In(value ...string) cqlc.Condition {
column := &ExercisePermissionsLevelUserEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type ExercisePermissionsLevel struct {
ExerciseIdentifier gocql.UUID
IsAdmin bool
IsAuthorized bool
IsInvisible bool
IsOc bool
IsTrainee bool
RoleDescription string
UserEmail string
}
func (s *ExercisePermissionsLevel) ExerciseIdentifierValue() gocql.UUID {
return s.ExerciseIdentifier
}
func (s *ExercisePermissionsLevel) IsAdminValue() bool {
return s.IsAdmin
}
func (s *ExercisePermissionsLevel) IsAuthorizedValue() bool {
return s.IsAuthorized
}
func (s *ExercisePermissionsLevel) IsInvisibleValue() bool {
return s.IsInvisible
}
func (s *ExercisePermissionsLevel) IsOcValue() bool {
return s.IsOc
}
func (s *ExercisePermissionsLevel) IsTraineeValue() bool {
return s.IsTrainee
}
func (s *ExercisePermissionsLevel) RoleDescriptionValue() string {
return s.RoleDescription
}
func (s *ExercisePermissionsLevel) UserEmailValue() string {
return s.UserEmail
}
type ExercisePermissionsLevelDef struct {
EXERCISE_IDENTIFIER cqlc.LastClusteredUUIDColumn
IS_ADMIN cqlc.BooleanColumn
IS_AUTHORIZED cqlc.BooleanColumn
IS_INVISIBLE cqlc.BooleanColumn
IS_OC cqlc.BooleanColumn
IS_TRAINEE cqlc.BooleanColumn
ROLE_DESCRIPTION cqlc.StringColumn
USER_EMAIL cqlc.LastPartitionedStringColumn
}
func BindExercisePermissionsLevel(iter *gocql.Iter) ([]ExercisePermissionsLevel, error) {
array := make([]ExercisePermissionsLevel, 0)
err := MapExercisePermissionsLevel(iter, func(t ExercisePermissionsLevel) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapExercisePermissionsLevel(iter *gocql.Iter, callback func(t ExercisePermissionsLevel) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := ExercisePermissionsLevel{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "exercise_identifier":
row[i] = &t.ExerciseIdentifier
case "is_admin":
row[i] = &t.IsAdmin
case "is_authorized":
row[i] = &t.IsAuthorized
case "is_invisible":
row[i] = &t.IsInvisible
case "is_oc":
row[i] = &t.IsOc
case "is_trainee":
row[i] = &t.IsTrainee
case "role_description":
row[i] = &t.RoleDescription
case "user_email":
row[i] = &t.UserEmail
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *ExercisePermissionsLevelDef) SupportsUpsert() bool {
return true
}
func (s *ExercisePermissionsLevelDef) TableName() string {
return "exercise_permissions_level"
}
func (s *ExercisePermissionsLevelDef) Keyspace() string {
return "sitrep"
}
func (s *ExercisePermissionsLevelDef) Bind(v ExercisePermissionsLevel) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelExerciseIdentifierColumn{}, Value: v.ExerciseIdentifier},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsAdminColumn{}, Value: v.IsAdmin},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsAuthorizedColumn{}, Value: v.IsAuthorized},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsInvisibleColumn{}, Value: v.IsInvisible},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsOcColumn{}, Value: v.IsOc},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsTraineeColumn{}, Value: v.IsTrainee},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelRoleDescriptionColumn{}, Value: v.RoleDescription},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelUserEmailColumn{}, Value: v.UserEmail},
}
return cqlc.TableBinding{Table: &ExercisePermissionsLevelDef{}, Columns: cols}
}
func (s *ExercisePermissionsLevelDef) To(v *ExercisePermissionsLevel) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelExerciseIdentifierColumn{}, Value: &v.ExerciseIdentifier},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsAdminColumn{}, Value: &v.IsAdmin},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsAuthorizedColumn{}, Value: &v.IsAuthorized},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsInvisibleColumn{}, Value: &v.IsInvisible},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsOcColumn{}, Value: &v.IsOc},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelIsTraineeColumn{}, Value: &v.IsTrainee},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelRoleDescriptionColumn{}, Value: &v.RoleDescription},
cqlc.ColumnBinding{Column: &ExercisePermissionsLevelUserEmailColumn{}, Value: &v.UserEmail},
}
return cqlc.TableBinding{Table: &ExercisePermissionsLevelDef{}, Columns: cols}
}
func (s *ExercisePermissionsLevelDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&ExercisePermissionsLevelExerciseIdentifierColumn{},
&ExercisePermissionsLevelIsAdminColumn{},
&ExercisePermissionsLevelIsAuthorizedColumn{},
&ExercisePermissionsLevelIsInvisibleColumn{},
&ExercisePermissionsLevelIsOcColumn{},
&ExercisePermissionsLevelIsTraineeColumn{},
&ExercisePermissionsLevelRoleDescriptionColumn{},
&ExercisePermissionsLevelUserEmailColumn{},
}
}
func ExercisePermissionsLevelTableDef() *ExercisePermissionsLevelDef {
return &ExercisePermissionsLevelDef{
EXERCISE_IDENTIFIER: &ExercisePermissionsLevelExerciseIdentifierColumn{},
IS_ADMIN: &ExercisePermissionsLevelIsAdminColumn{},
IS_AUTHORIZED: &ExercisePermissionsLevelIsAuthorizedColumn{},
IS_INVISIBLE: &ExercisePermissionsLevelIsInvisibleColumn{},
IS_OC: &ExercisePermissionsLevelIsOcColumn{},
IS_TRAINEE: &ExercisePermissionsLevelIsTraineeColumn{},
ROLE_DESCRIPTION: &ExercisePermissionsLevelRoleDescriptionColumn{},
USER_EMAIL: &ExercisePermissionsLevelUserEmailColumn{},
}
}
func (s *ExercisePermissionsLevelDef) ExerciseIdentifierColumn() cqlc.LastClusteredUUIDColumn {
return &ExercisePermissionsLevelExerciseIdentifierColumn{}
}
func (s *ExercisePermissionsLevelDef) IsAdminColumn() cqlc.BooleanColumn {
return &ExercisePermissionsLevelIsAdminColumn{}
}
func (s *ExercisePermissionsLevelDef) IsAuthorizedColumn() cqlc.BooleanColumn {
return &ExercisePermissionsLevelIsAuthorizedColumn{}
}
func (s *ExercisePermissionsLevelDef) IsInvisibleColumn() cqlc.BooleanColumn {
return &ExercisePermissionsLevelIsInvisibleColumn{}
}
func (s *ExercisePermissionsLevelDef) IsOcColumn() cqlc.BooleanColumn {
return &ExercisePermissionsLevelIsOcColumn{}
}
func (s *ExercisePermissionsLevelDef) IsTraineeColumn() cqlc.BooleanColumn {
return &ExercisePermissionsLevelIsTraineeColumn{}
}
func (s *ExercisePermissionsLevelDef) RoleDescriptionColumn() cqlc.StringColumn {
return &ExercisePermissionsLevelRoleDescriptionColumn{}
}
func (s *ExercisePermissionsLevelDef) UserEmailColumn() cqlc.LastPartitionedStringColumn {
return &ExercisePermissionsLevelUserEmailColumn{}
}
type SchemaMigrationsVersionColumn struct {
}
func (b *SchemaMigrationsVersionColumn) ColumnName() string {
return "version"
}
func (b *SchemaMigrationsVersionColumn) To(value *int64) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *SchemaMigrationsVersionColumn) CanIncrement() bool {
return true
}
type SchemaMigrationsVersionrowColumn struct {
}
func (b *SchemaMigrationsVersionrowColumn) ColumnName() string {
return "versionrow"
}
func (b *SchemaMigrationsVersionrowColumn) To(value *int64) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *SchemaMigrationsVersionrowColumn) Eq(value int64) cqlc.Condition {
column := &SchemaMigrationsVersionrowColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *SchemaMigrationsVersionrowColumn) PartitionBy() cqlc.Column {
return b
}
func (b *SchemaMigrationsVersionrowColumn) In(value ...int64) cqlc.Condition {
column := &SchemaMigrationsVersionrowColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type SchemaMigrations struct {
Version int64
Versionrow int64
}
func (s *SchemaMigrations) VersionValue() int64 {
return s.Version
}
func (s *SchemaMigrations) VersionrowValue() int64 {
return s.Versionrow
}
type SchemaMigrationsDef struct {
VERSION cqlc.CounterColumn
VERSIONROW cqlc.LastPartitionedInt64Column
}
func BindSchemaMigrations(iter *gocql.Iter) ([]SchemaMigrations, error) {
array := make([]SchemaMigrations, 0)
err := MapSchemaMigrations(iter, func(t SchemaMigrations) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapSchemaMigrations(iter *gocql.Iter, callback func(t SchemaMigrations) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := SchemaMigrations{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "version":
row[i] = &t.Version
case "versionrow":
row[i] = &t.Versionrow
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *SchemaMigrationsDef) IsCounterTable() bool {
return true
}
func (s *SchemaMigrationsDef) TableName() string {
return "schema_migrations"
}
func (s *SchemaMigrationsDef) Keyspace() string {
return "sitrep"
}
func (s *SchemaMigrationsDef) Bind(v SchemaMigrations) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &SchemaMigrationsVersionColumn{}, Value: v.Version},
cqlc.ColumnBinding{Column: &SchemaMigrationsVersionrowColumn{}, Value: v.Versionrow},
}
return cqlc.TableBinding{Table: &SchemaMigrationsDef{}, Columns: cols}
}
func (s *SchemaMigrationsDef) To(v *SchemaMigrations) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &SchemaMigrationsVersionColumn{}, Value: &v.Version},
cqlc.ColumnBinding{Column: &SchemaMigrationsVersionrowColumn{}, Value: &v.Versionrow},
}
return cqlc.TableBinding{Table: &SchemaMigrationsDef{}, Columns: cols}
}
func (s *SchemaMigrationsDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&SchemaMigrationsVersionColumn{},
&SchemaMigrationsVersionrowColumn{},
}
}
func SchemaMigrationsTableDef() *SchemaMigrationsDef {
return &SchemaMigrationsDef{
VERSION: &SchemaMigrationsVersionColumn{},
VERSIONROW: &SchemaMigrationsVersionrowColumn{},
}
}
func (s *SchemaMigrationsDef) VersionColumn() cqlc.CounterColumn {
return &SchemaMigrationsVersionColumn{}
}
func (s *SchemaMigrationsDef) VersionrowColumn() cqlc.LastPartitionedInt64Column {
return &SchemaMigrationsVersionrowColumn{}
}
type SettingsByExerciseIdentifierIdColumn struct {
}
func (b *SettingsByExerciseIdentifierIdColumn) ColumnName() string {
return "id"
}
func (b *SettingsByExerciseIdentifierIdColumn) To(value *gocql.UUID) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *SettingsByExerciseIdentifierIdColumn) Eq(value gocql.UUID) cqlc.Condition {
column := &SettingsByExerciseIdentifierIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *SettingsByExerciseIdentifierIdColumn) PartitionBy() cqlc.Column {
return b
}
func (b *SettingsByExerciseIdentifierIdColumn) In(value ...gocql.UUID) cqlc.Condition {
column := &SettingsByExerciseIdentifierIdColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type SettingsByExerciseIdentifierIsDefaultColumn struct {
}
func (b *SettingsByExerciseIdentifierIsDefaultColumn) ColumnName() string {
return "is_default"
}
func (b *SettingsByExerciseIdentifierIsDefaultColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type SettingsByExerciseIdentifierSettingsColumn struct {
}
func (b *SettingsByExerciseIdentifierSettingsColumn) ColumnName() string {
return "settings"
}
func (b *SettingsByExerciseIdentifierSettingsColumn) To(value *map[string]string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type SettingsByExerciseIdentifierSettingsVersionColumn struct {
}
func (b *SettingsByExerciseIdentifierSettingsVersionColumn) ColumnName() string {
return "settings_version"
}
func (b *SettingsByExerciseIdentifierSettingsVersionColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type SettingsByExerciseIdentifier struct {
Id gocql.UUID
IsDefault bool
Settings map[string]string
SettingsVersion string
}
func (s *SettingsByExerciseIdentifier) IdValue() gocql.UUID {
return s.Id
}
func (s *SettingsByExerciseIdentifier) IsDefaultValue() bool {
return s.IsDefault
}
func (s *SettingsByExerciseIdentifier) SettingsValue() map[string]string {
return s.Settings
}
func (s *SettingsByExerciseIdentifier) SettingsVersionValue() string {
return s.SettingsVersion
}
type SettingsByExerciseIdentifierDef struct {
ID cqlc.LastPartitionedUUIDColumn
IS_DEFAULT cqlc.BooleanColumn
SETTINGS cqlc.StringStringMapColumn
SETTINGS_VERSION cqlc.StringColumn
}
func BindSettingsByExerciseIdentifier(iter *gocql.Iter) ([]SettingsByExerciseIdentifier, error) {
array := make([]SettingsByExerciseIdentifier, 0)
err := MapSettingsByExerciseIdentifier(iter, func(t SettingsByExerciseIdentifier) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapSettingsByExerciseIdentifier(iter *gocql.Iter, callback func(t SettingsByExerciseIdentifier) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := SettingsByExerciseIdentifier{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "id":
row[i] = &t.Id
case "is_default":
row[i] = &t.IsDefault
case "settings":
row[i] = &t.Settings
case "settings_version":
row[i] = &t.SettingsVersion
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *SettingsByExerciseIdentifierDef) SupportsUpsert() bool {
return true
}
func (s *SettingsByExerciseIdentifierDef) TableName() string {
return "settings_by_exercise_identifier"
}
func (s *SettingsByExerciseIdentifierDef) Keyspace() string {
return "sitrep"
}
func (s *SettingsByExerciseIdentifierDef) Bind(v SettingsByExerciseIdentifier) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierIdColumn{}, Value: v.Id},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierIsDefaultColumn{}, Value: v.IsDefault},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierSettingsColumn{}, Value: v.Settings},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierSettingsVersionColumn{}, Value: v.SettingsVersion},
}
return cqlc.TableBinding{Table: &SettingsByExerciseIdentifierDef{}, Columns: cols}
}
func (s *SettingsByExerciseIdentifierDef) To(v *SettingsByExerciseIdentifier) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierIdColumn{}, Value: &v.Id},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierIsDefaultColumn{}, Value: &v.IsDefault},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierSettingsColumn{}, Value: &v.Settings},
cqlc.ColumnBinding{Column: &SettingsByExerciseIdentifierSettingsVersionColumn{}, Value: &v.SettingsVersion},
}
return cqlc.TableBinding{Table: &SettingsByExerciseIdentifierDef{}, Columns: cols}
}
func (s *SettingsByExerciseIdentifierDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&SettingsByExerciseIdentifierIdColumn{},
&SettingsByExerciseIdentifierIsDefaultColumn{},
&SettingsByExerciseIdentifierSettingsColumn{},
&SettingsByExerciseIdentifierSettingsVersionColumn{},
}
}
func SettingsByExerciseIdentifierTableDef() *SettingsByExerciseIdentifierDef {
return &SettingsByExerciseIdentifierDef{
ID: &SettingsByExerciseIdentifierIdColumn{},
IS_DEFAULT: &SettingsByExerciseIdentifierIsDefaultColumn{},
SETTINGS: &SettingsByExerciseIdentifierSettingsColumn{},
SETTINGS_VERSION: &SettingsByExerciseIdentifierSettingsVersionColumn{},
}
}
func (s *SettingsByExerciseIdentifierDef) IdColumn() cqlc.LastPartitionedUUIDColumn {
return &SettingsByExerciseIdentifierIdColumn{}
}
func (s *SettingsByExerciseIdentifierDef) IsDefaultColumn() cqlc.BooleanColumn {
return &SettingsByExerciseIdentifierIsDefaultColumn{}
}
func (s *SettingsByExerciseIdentifierDef) SettingsColumn() cqlc.StringStringMapColumn {
return &SettingsByExerciseIdentifierSettingsColumn{}
}
func (s *SettingsByExerciseIdentifierDef) SettingsVersionColumn() cqlc.StringColumn {
return &SettingsByExerciseIdentifierSettingsVersionColumn{}
}
type UsersByEmailAccessValidTillColumn struct {
}
func (b *UsersByEmailAccessValidTillColumn) ColumnName() string {
return "access_valid_till"
}
func (b *UsersByEmailAccessValidTillColumn) To(value *time.Time) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailAnalyticsUserTrackingTokenColumn struct {
}
func (b *UsersByEmailAnalyticsUserTrackingTokenColumn) ColumnName() string {
return "analytics_user_tracking_token"
}
func (b *UsersByEmailAnalyticsUserTrackingTokenColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailEmailColumn struct {
}
func (b *UsersByEmailEmailColumn) ColumnName() string {
return "email"
}
func (b *UsersByEmailEmailColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *UsersByEmailEmailColumn) Eq(value string) cqlc.Condition {
column := &UsersByEmailEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *UsersByEmailEmailColumn) PartitionBy() cqlc.Column {
return b
}
func (b *UsersByEmailEmailColumn) In(value ...string) cqlc.Condition {
column := &UsersByEmailEmailColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type UsersByEmailEncryptedPasswordColumn struct {
}
func (b *UsersByEmailEncryptedPasswordColumn) ColumnName() string {
return "encrypted_password"
}
func (b *UsersByEmailEncryptedPasswordColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsAdminColumn struct {
}
func (b *UsersByEmailIsAdminColumn) ColumnName() string {
return "is_admin"
}
func (b *UsersByEmailIsAdminColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsAnalyzedColumn struct {
}
func (b *UsersByEmailIsAnalyzedColumn) ColumnName() string {
return "is_analyzed"
}
func (b *UsersByEmailIsAnalyzedColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsBannedColumn struct {
}
func (b *UsersByEmailIsBannedColumn) ColumnName() string {
return "is_banned"
}
func (b *UsersByEmailIsBannedColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsConfirmedColumn struct {
}
func (b *UsersByEmailIsConfirmedColumn) ColumnName() string {
return "is_confirmed"
}
func (b *UsersByEmailIsConfirmedColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsEmployeeColumn struct {
}
func (b *UsersByEmailIsEmployeeColumn) ColumnName() string {
return "is_employee"
}
func (b *UsersByEmailIsEmployeeColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailIsExpiringColumn struct {
}
func (b *UsersByEmailIsExpiringColumn) ColumnName() string {
return "is_expiring"
}
func (b *UsersByEmailIsExpiringColumn) To(value *bool) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailJwtEncryptionKeyColumn struct {
}
func (b *UsersByEmailJwtEncryptionKeyColumn) ColumnName() string {
return "jwt_encryption_key"
}
func (b *UsersByEmailJwtEncryptionKeyColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailLastLoggedInColumn struct {
}
func (b *UsersByEmailLastLoggedInColumn) ColumnName() string {
return "last_logged_in"
}
func (b *UsersByEmailLastLoggedInColumn) To(value *time.Time) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailRealNameColumn struct {
}
func (b *UsersByEmailRealNameColumn) ColumnName() string {
return "real_name"
}
func (b *UsersByEmailRealNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailTwitterNameColumn struct {
}
func (b *UsersByEmailTwitterNameColumn) ColumnName() string {
return "twitter_name"
}
func (b *UsersByEmailTwitterNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailUserRankColumn struct {
}
func (b *UsersByEmailUserRankColumn) ColumnName() string {
return "user_rank"
}
func (b *UsersByEmailUserRankColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailUserSelfDescriptionColumn struct {
}
func (b *UsersByEmailUserSelfDescriptionColumn) ColumnName() string {
return "user_self_description"
}
func (b *UsersByEmailUserSelfDescriptionColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailUserTitleColumn struct {
}
func (b *UsersByEmailUserTitleColumn) ColumnName() string {
return "user_title"
}
func (b *UsersByEmailUserTitleColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmailUserUnitColumn struct {
}
func (b *UsersByEmailUserUnitColumn) ColumnName() string {
return "user_unit"
}
func (b *UsersByEmailUserUnitColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByEmail struct {
AccessValidTill time.Time
AnalyticsUserTrackingToken string
Email string
EncryptedPassword string
IsAdmin bool
IsAnalyzed bool
IsBanned bool
IsConfirmed bool
IsEmployee bool
IsExpiring bool
JwtEncryptionKey string
LastLoggedIn time.Time
RealName string
TwitterName string
UserRank string
UserSelfDescription string
UserTitle string
UserUnit string
}
func (s *UsersByEmail) AccessValidTillValue() time.Time {
return s.AccessValidTill
}
func (s *UsersByEmail) AnalyticsUserTrackingTokenValue() string {
return s.AnalyticsUserTrackingToken
}
func (s *UsersByEmail) EmailValue() string {
return s.Email
}
func (s *UsersByEmail) EncryptedPasswordValue() string {
return s.EncryptedPassword
}
func (s *UsersByEmail) IsAdminValue() bool {
return s.IsAdmin
}
func (s *UsersByEmail) IsAnalyzedValue() bool {
return s.IsAnalyzed
}
func (s *UsersByEmail) IsBannedValue() bool {
return s.IsBanned
}
func (s *UsersByEmail) IsConfirmedValue() bool {
return s.IsConfirmed
}
func (s *UsersByEmail) IsEmployeeValue() bool {
return s.IsEmployee
}
func (s *UsersByEmail) IsExpiringValue() bool {
return s.IsExpiring
}
func (s *UsersByEmail) JwtEncryptionKeyValue() string {
return s.JwtEncryptionKey
}
func (s *UsersByEmail) LastLoggedInValue() time.Time {
return s.LastLoggedIn
}
func (s *UsersByEmail) RealNameValue() string {
return s.RealName
}
func (s *UsersByEmail) TwitterNameValue() string {
return s.TwitterName
}
func (s *UsersByEmail) UserRankValue() string {
return s.UserRank
}
func (s *UsersByEmail) UserSelfDescriptionValue() string {
return s.UserSelfDescription
}
func (s *UsersByEmail) UserTitleValue() string {
return s.UserTitle
}
func (s *UsersByEmail) UserUnitValue() string {
return s.UserUnit
}
type UsersByEmailDef struct {
ACCESS_VALID_TILL cqlc.TimestampColumn
ANALYTICS_USER_TRACKING_TOKEN cqlc.StringColumn
EMAIL cqlc.LastPartitionedStringColumn
ENCRYPTED_PASSWORD cqlc.StringColumn
IS_ADMIN cqlc.BooleanColumn
IS_ANALYZED cqlc.BooleanColumn
IS_BANNED cqlc.BooleanColumn
IS_CONFIRMED cqlc.BooleanColumn
IS_EMPLOYEE cqlc.BooleanColumn
IS_EXPIRING cqlc.BooleanColumn
JWT_ENCRYPTION_KEY cqlc.StringColumn
LAST_LOGGED_IN cqlc.TimestampColumn
REAL_NAME cqlc.StringColumn
TWITTER_NAME cqlc.StringColumn
USER_RANK cqlc.StringColumn
USER_SELF_DESCRIPTION cqlc.StringColumn
USER_TITLE cqlc.StringColumn
USER_UNIT cqlc.StringColumn
}
func BindUsersByEmail(iter *gocql.Iter) ([]UsersByEmail, error) {
array := make([]UsersByEmail, 0)
err := MapUsersByEmail(iter, func(t UsersByEmail) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapUsersByEmail(iter *gocql.Iter, callback func(t UsersByEmail) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := UsersByEmail{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "access_valid_till":
row[i] = &t.AccessValidTill
case "analytics_user_tracking_token":
row[i] = &t.AnalyticsUserTrackingToken
case "email":
row[i] = &t.Email
case "encrypted_password":
row[i] = &t.EncryptedPassword
case "is_admin":
row[i] = &t.IsAdmin
case "is_analyzed":
row[i] = &t.IsAnalyzed
case "is_banned":
row[i] = &t.IsBanned
case "is_confirmed":
row[i] = &t.IsConfirmed
case "is_employee":
row[i] = &t.IsEmployee
case "is_expiring":
row[i] = &t.IsExpiring
case "jwt_encryption_key":
row[i] = &t.JwtEncryptionKey
case "last_logged_in":
row[i] = &t.LastLoggedIn
case "real_name":
row[i] = &t.RealName
case "twitter_name":
row[i] = &t.TwitterName
case "user_rank":
row[i] = &t.UserRank
case "user_self_description":
row[i] = &t.UserSelfDescription
case "user_title":
row[i] = &t.UserTitle
case "user_unit":
row[i] = &t.UserUnit
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *UsersByEmailDef) SupportsUpsert() bool {
return true
}
func (s *UsersByEmailDef) TableName() string {
return "users_by_email"
}
func (s *UsersByEmailDef) Keyspace() string {
return "sitrep"
}
func (s *UsersByEmailDef) Bind(v UsersByEmail) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &UsersByEmailAccessValidTillColumn{}, Value: v.AccessValidTill},
cqlc.ColumnBinding{Column: &UsersByEmailAnalyticsUserTrackingTokenColumn{}, Value: v.AnalyticsUserTrackingToken},
cqlc.ColumnBinding{Column: &UsersByEmailEmailColumn{}, Value: v.Email},
cqlc.ColumnBinding{Column: &UsersByEmailEncryptedPasswordColumn{}, Value: v.EncryptedPassword},
cqlc.ColumnBinding{Column: &UsersByEmailIsAdminColumn{}, Value: v.IsAdmin},
cqlc.ColumnBinding{Column: &UsersByEmailIsAnalyzedColumn{}, Value: v.IsAnalyzed},
cqlc.ColumnBinding{Column: &UsersByEmailIsBannedColumn{}, Value: v.IsBanned},
cqlc.ColumnBinding{Column: &UsersByEmailIsConfirmedColumn{}, Value: v.IsConfirmed},
cqlc.ColumnBinding{Column: &UsersByEmailIsEmployeeColumn{}, Value: v.IsEmployee},
cqlc.ColumnBinding{Column: &UsersByEmailIsExpiringColumn{}, Value: v.IsExpiring},
cqlc.ColumnBinding{Column: &UsersByEmailJwtEncryptionKeyColumn{}, Value: v.JwtEncryptionKey},
cqlc.ColumnBinding{Column: &UsersByEmailLastLoggedInColumn{}, Value: v.LastLoggedIn},
cqlc.ColumnBinding{Column: &UsersByEmailRealNameColumn{}, Value: v.RealName},
cqlc.ColumnBinding{Column: &UsersByEmailTwitterNameColumn{}, Value: v.TwitterName},
cqlc.ColumnBinding{Column: &UsersByEmailUserRankColumn{}, Value: v.UserRank},
cqlc.ColumnBinding{Column: &UsersByEmailUserSelfDescriptionColumn{}, Value: v.UserSelfDescription},
cqlc.ColumnBinding{Column: &UsersByEmailUserTitleColumn{}, Value: v.UserTitle},
cqlc.ColumnBinding{Column: &UsersByEmailUserUnitColumn{}, Value: v.UserUnit},
}
return cqlc.TableBinding{Table: &UsersByEmailDef{}, Columns: cols}
}
func (s *UsersByEmailDef) To(v *UsersByEmail) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &UsersByEmailAccessValidTillColumn{}, Value: &v.AccessValidTill},
cqlc.ColumnBinding{Column: &UsersByEmailAnalyticsUserTrackingTokenColumn{}, Value: &v.AnalyticsUserTrackingToken},
cqlc.ColumnBinding{Column: &UsersByEmailEmailColumn{}, Value: &v.Email},
cqlc.ColumnBinding{Column: &UsersByEmailEncryptedPasswordColumn{}, Value: &v.EncryptedPassword},
cqlc.ColumnBinding{Column: &UsersByEmailIsAdminColumn{}, Value: &v.IsAdmin},
cqlc.ColumnBinding{Column: &UsersByEmailIsAnalyzedColumn{}, Value: &v.IsAnalyzed},
cqlc.ColumnBinding{Column: &UsersByEmailIsBannedColumn{}, Value: &v.IsBanned},
cqlc.ColumnBinding{Column: &UsersByEmailIsConfirmedColumn{}, Value: &v.IsConfirmed},
cqlc.ColumnBinding{Column: &UsersByEmailIsEmployeeColumn{}, Value: &v.IsEmployee},
cqlc.ColumnBinding{Column: &UsersByEmailIsExpiringColumn{}, Value: &v.IsExpiring},
cqlc.ColumnBinding{Column: &UsersByEmailJwtEncryptionKeyColumn{}, Value: &v.JwtEncryptionKey},
cqlc.ColumnBinding{Column: &UsersByEmailLastLoggedInColumn{}, Value: &v.LastLoggedIn},
cqlc.ColumnBinding{Column: &UsersByEmailRealNameColumn{}, Value: &v.RealName},
cqlc.ColumnBinding{Column: &UsersByEmailTwitterNameColumn{}, Value: &v.TwitterName},
cqlc.ColumnBinding{Column: &UsersByEmailUserRankColumn{}, Value: &v.UserRank},
cqlc.ColumnBinding{Column: &UsersByEmailUserSelfDescriptionColumn{}, Value: &v.UserSelfDescription},
cqlc.ColumnBinding{Column: &UsersByEmailUserTitleColumn{}, Value: &v.UserTitle},
cqlc.ColumnBinding{Column: &UsersByEmailUserUnitColumn{}, Value: &v.UserUnit},
}
return cqlc.TableBinding{Table: &UsersByEmailDef{}, Columns: cols}
}
func (s *UsersByEmailDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&UsersByEmailAccessValidTillColumn{},
&UsersByEmailAnalyticsUserTrackingTokenColumn{},
&UsersByEmailEmailColumn{},
&UsersByEmailEncryptedPasswordColumn{},
&UsersByEmailIsAdminColumn{},
&UsersByEmailIsAnalyzedColumn{},
&UsersByEmailIsBannedColumn{},
&UsersByEmailIsConfirmedColumn{},
&UsersByEmailIsEmployeeColumn{},
&UsersByEmailIsExpiringColumn{},
&UsersByEmailJwtEncryptionKeyColumn{},
&UsersByEmailLastLoggedInColumn{},
&UsersByEmailRealNameColumn{},
&UsersByEmailTwitterNameColumn{},
&UsersByEmailUserRankColumn{},
&UsersByEmailUserSelfDescriptionColumn{},
&UsersByEmailUserTitleColumn{},
&UsersByEmailUserUnitColumn{},
}
}
func UsersByEmailTableDef() *UsersByEmailDef {
return &UsersByEmailDef{
ACCESS_VALID_TILL: &UsersByEmailAccessValidTillColumn{},
ANALYTICS_USER_TRACKING_TOKEN: &UsersByEmailAnalyticsUserTrackingTokenColumn{},
EMAIL: &UsersByEmailEmailColumn{},
ENCRYPTED_PASSWORD: &UsersByEmailEncryptedPasswordColumn{},
IS_ADMIN: &UsersByEmailIsAdminColumn{},
IS_ANALYZED: &UsersByEmailIsAnalyzedColumn{},
IS_BANNED: &UsersByEmailIsBannedColumn{},
IS_CONFIRMED: &UsersByEmailIsConfirmedColumn{},
IS_EMPLOYEE: &UsersByEmailIsEmployeeColumn{},
IS_EXPIRING: &UsersByEmailIsExpiringColumn{},
JWT_ENCRYPTION_KEY: &UsersByEmailJwtEncryptionKeyColumn{},
LAST_LOGGED_IN: &UsersByEmailLastLoggedInColumn{},
REAL_NAME: &UsersByEmailRealNameColumn{},
TWITTER_NAME: &UsersByEmailTwitterNameColumn{},
USER_RANK: &UsersByEmailUserRankColumn{},
USER_SELF_DESCRIPTION: &UsersByEmailUserSelfDescriptionColumn{},
USER_TITLE: &UsersByEmailUserTitleColumn{},
USER_UNIT: &UsersByEmailUserUnitColumn{},
}
}
func (s *UsersByEmailDef) AccessValidTillColumn() cqlc.TimestampColumn {
return &UsersByEmailAccessValidTillColumn{}
}
func (s *UsersByEmailDef) AnalyticsUserTrackingTokenColumn() cqlc.StringColumn {
return &UsersByEmailAnalyticsUserTrackingTokenColumn{}
}
func (s *UsersByEmailDef) EmailColumn() cqlc.LastPartitionedStringColumn {
return &UsersByEmailEmailColumn{}
}
func (s *UsersByEmailDef) EncryptedPasswordColumn() cqlc.StringColumn {
return &UsersByEmailEncryptedPasswordColumn{}
}
func (s *UsersByEmailDef) IsAdminColumn() cqlc.BooleanColumn {
return &UsersByEmailIsAdminColumn{}
}
func (s *UsersByEmailDef) IsAnalyzedColumn() cqlc.BooleanColumn {
return &UsersByEmailIsAnalyzedColumn{}
}
func (s *UsersByEmailDef) IsBannedColumn() cqlc.BooleanColumn {
return &UsersByEmailIsBannedColumn{}
}
func (s *UsersByEmailDef) IsConfirmedColumn() cqlc.BooleanColumn {
return &UsersByEmailIsConfirmedColumn{}
}
func (s *UsersByEmailDef) IsEmployeeColumn() cqlc.BooleanColumn {
return &UsersByEmailIsEmployeeColumn{}
}
func (s *UsersByEmailDef) IsExpiringColumn() cqlc.BooleanColumn {
return &UsersByEmailIsExpiringColumn{}
}
func (s *UsersByEmailDef) JwtEncryptionKeyColumn() cqlc.StringColumn {
return &UsersByEmailJwtEncryptionKeyColumn{}
}
func (s *UsersByEmailDef) LastLoggedInColumn() cqlc.TimestampColumn {
return &UsersByEmailLastLoggedInColumn{}
}
func (s *UsersByEmailDef) RealNameColumn() cqlc.StringColumn {
return &UsersByEmailRealNameColumn{}
}
func (s *UsersByEmailDef) TwitterNameColumn() cqlc.StringColumn {
return &UsersByEmailTwitterNameColumn{}
}
func (s *UsersByEmailDef) UserRankColumn() cqlc.StringColumn {
return &UsersByEmailUserRankColumn{}
}
func (s *UsersByEmailDef) UserSelfDescriptionColumn() cqlc.StringColumn {
return &UsersByEmailUserSelfDescriptionColumn{}
}
func (s *UsersByEmailDef) UserTitleColumn() cqlc.StringColumn {
return &UsersByEmailUserTitleColumn{}
}
func (s *UsersByEmailDef) UserUnitColumn() cqlc.StringColumn {
return &UsersByEmailUserUnitColumn{}
}
type UsersByJwtEncryptionKeyColumn struct {
}
func (b *UsersByJwtEncryptionKeyColumn) ColumnName() string {
return "encryption_key"
}
func (b *UsersByJwtEncryptionKeyColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByJwtJwtColumn struct {
}
func (b *UsersByJwtJwtColumn) ColumnName() string {
return "jwt"
}
func (b *UsersByJwtJwtColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
func (b *UsersByJwtJwtColumn) Eq(value string) cqlc.Condition {
column := &UsersByJwtJwtColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.EqPredicate}
}
func (b *UsersByJwtJwtColumn) PartitionBy() cqlc.Column {
return b
}
func (b *UsersByJwtJwtColumn) In(value ...string) cqlc.Condition {
column := &UsersByJwtJwtColumn{}
binding := cqlc.ColumnBinding{Column: column, Value: value}
return cqlc.Condition{Binding: binding, Predicate: cqlc.InPredicate}
}
type UsersByJwtUserEmailColumn struct {
}
func (b *UsersByJwtUserEmailColumn) ColumnName() string {
return "user_email"
}
func (b *UsersByJwtUserEmailColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByJwtUserNameColumn struct {
}
func (b *UsersByJwtUserNameColumn) ColumnName() string {
return "user_name"
}
func (b *UsersByJwtUserNameColumn) To(value *string) cqlc.ColumnBinding {
return cqlc.ColumnBinding{Column: b, Value: value}
}
type UsersByJwt struct {
EncryptionKey string
Jwt string
UserEmail string
UserName string
}
func (s *UsersByJwt) EncryptionKeyValue() string {
return s.EncryptionKey
}
func (s *UsersByJwt) JwtValue() string {
return s.Jwt
}
func (s *UsersByJwt) UserEmailValue() string {
return s.UserEmail
}
func (s *UsersByJwt) UserNameValue() string {
return s.UserName
}
type UsersByJwtDef struct {
ENCRYPTION_KEY cqlc.StringColumn
JWT cqlc.LastPartitionedStringColumn
USER_EMAIL cqlc.StringColumn
USER_NAME cqlc.StringColumn
}
func BindUsersByJwt(iter *gocql.Iter) ([]UsersByJwt, error) {
array := make([]UsersByJwt, 0)
err := MapUsersByJwt(iter, func(t UsersByJwt) (bool, error) {
array = append(array, t)
return true, nil
})
return array, err
}
func MapUsersByJwt(iter *gocql.Iter, callback func(t UsersByJwt) (bool, error)) error {
columns := iter.Columns()
row := make([]interface{}, len(columns))
for {
t := UsersByJwt{}
for i := 0; i < len(columns); i++ {
switch columns[i].Name {
case "encryption_key":
row[i] = &t.EncryptionKey
case "jwt":
row[i] = &t.Jwt
case "user_email":
row[i] = &t.UserEmail
case "user_name":
row[i] = &t.UserName
default:
log.Fatal("unhandled column: ", columns[i].Name)
}
}
if !iter.Scan(row...) {
break
}
readNext, err := callback(t)
if err != nil {
return err
}
if !readNext {
return nil
}
}
return nil
}
func (s *UsersByJwtDef) SupportsUpsert() bool {
return true
}
func (s *UsersByJwtDef) TableName() string {
return "users_by_jwt"
}
func (s *UsersByJwtDef) Keyspace() string {
return "sitrep"
}
func (s *UsersByJwtDef) Bind(v UsersByJwt) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &UsersByJwtEncryptionKeyColumn{}, Value: v.EncryptionKey},
cqlc.ColumnBinding{Column: &UsersByJwtJwtColumn{}, Value: v.Jwt},
cqlc.ColumnBinding{Column: &UsersByJwtUserEmailColumn{}, Value: v.UserEmail},
cqlc.ColumnBinding{Column: &UsersByJwtUserNameColumn{}, Value: v.UserName},
}
return cqlc.TableBinding{Table: &UsersByJwtDef{}, Columns: cols}
}
func (s *UsersByJwtDef) To(v *UsersByJwt) cqlc.TableBinding {
cols := []cqlc.ColumnBinding{
cqlc.ColumnBinding{Column: &UsersByJwtEncryptionKeyColumn{}, Value: &v.EncryptionKey},
cqlc.ColumnBinding{Column: &UsersByJwtJwtColumn{}, Value: &v.Jwt},
cqlc.ColumnBinding{Column: &UsersByJwtUserEmailColumn{}, Value: &v.UserEmail},
cqlc.ColumnBinding{Column: &UsersByJwtUserNameColumn{}, Value: &v.UserName},
}
return cqlc.TableBinding{Table: &UsersByJwtDef{}, Columns: cols}
}
func (s *UsersByJwtDef) ColumnDefinitions() []cqlc.Column {
return []cqlc.Column{
&UsersByJwtEncryptionKeyColumn{},
&UsersByJwtJwtColumn{},
&UsersByJwtUserEmailColumn{},
&UsersByJwtUserNameColumn{},
}
}
func UsersByJwtTableDef() *UsersByJwtDef {
return &UsersByJwtDef{
ENCRYPTION_KEY: &UsersByJwtEncryptionKeyColumn{},
JWT: &UsersByJwtJwtColumn{},
USER_EMAIL: &UsersByJwtUserEmailColumn{},
USER_NAME: &UsersByJwtUserNameColumn{},
}
}
func (s *UsersByJwtDef) EncryptionKeyColumn() cqlc.StringColumn {
return &UsersByJwtEncryptionKeyColumn{}
}
func (s *UsersByJwtDef) JwtColumn() cqlc.LastPartitionedStringColumn {
return &UsersByJwtJwtColumn{}
}
func (s *UsersByJwtDef) UserEmailColumn() cqlc.StringColumn {
return &UsersByJwtUserEmailColumn{}
}
func (s *UsersByJwtDef) UserNameColumn() cqlc.StringColumn {
return &UsersByJwtUserNameColumn{}
} | schema/sitrep.go | 0.569972 | 0.451085 | sitrep.go | starcoder |
package boid
import (
"math"
v "github.com/BozeBro/boids/vector"
"github.com/hajimehoshi/ebiten/v2"
)
type Boid interface {
Update(float64, float64, []Boid, int, chan *Data)
Draw(*ebiten.Image)
Coords() v.Vector2D
Velocity() v.Vector2D
Apply(*v.Vector2D, *v.Vector2D)
}
type Data struct {
Index int
NewPos *v.Vector2D
NewVel *v.Vector2D
}
type Arrow struct {
ImageWidth int
ImageHeight int
SightDis float64
SightAngle float64
Pos *v.Vector2D
Vel *v.Vector2D
Accel *v.Vector2D
Image *ebiten.Image
}
// Teleport places point on opposite end of the screen when offscreen.
func Teleport(pos, edge float64) float64 {
if pos < 0 {
pos = edge
} else if pos > edge {
pos = 0.
}
return pos
}
func (a *Arrow) avoidWalls(sx, sy float64) *v.Vector2D {
steering := &v.Vector2D{0, 0}
theta := v.AngleReg(*a.Vel)
heading := v.Vector2D{a.Pos.X + a.SightDis*math.Cos(theta), a.Pos.Y + a.SightDis*math.Sin(theta)}
topL := v.Vector2D{0, 0}
topR := v.Vector2D{sx, 0}
botL := v.Vector2D{0, sy}
botR := v.Vector2D{sx, sy}
var (
lt, _, lbool = v.IsIntersect(*a.Pos, heading, topL, v.Vector2D{0, sy - 1})
ut, _, ubool = v.IsIntersect(*a.Pos, heading, v.Vector2D{1, 0}, topR)
rt, _, rbool = v.IsIntersect(*a.Pos, heading, v.Vector2D{sx - 1, 0}, botR)
dt, _, dbool = v.IsIntersect(*a.Pos, heading, botL, v.Vector2D{sx - 1, sy})
)
//var x, y float64
if lbool {
steerAway := *a.Vel
x, y := v.IntersectionPoint(*a.Pos, heading, lt)
d := v.Distance(*a.Pos, v.Vector2D{x, y})
//wall := v.Vector2D{x, y}
if a.Vel.Y < 0 {
v.RotatePoints(math.Pi/2, v.Vector2D{}, &steerAway)
} else {
v.RotatePoints(-math.Pi/2, v.Vector2D{}, &steerAway)
}
steerAway.Multiply(d)
steering.Add(steerAway)
}
if ubool {
steerAway := *a.Vel
x, y := v.IntersectionPoint(*a.Pos, heading, ut)
d := v.Distance(*a.Pos, v.Vector2D{x, y})
//wall = v.Vector2D{x, y}
if a.Vel.X < 0 {
v.RotatePoints(-math.Pi/2, v.Vector2D{}, &steerAway)
} else {
v.RotatePoints(math.Pi/2, v.Vector2D{}, &steerAway)
}
steerAway.Multiply(d)
steering.Add(steerAway)
}
if rbool {
steerAway := *a.Vel
x, y := v.IntersectionPoint(*a.Pos, heading, rt)
d := v.Distance(*a.Pos, v.Vector2D{x, y})
//wall = v.Vector2D{x, y}
if a.Vel.Y < 0 {
v.RotatePoints(-math.Pi/2, v.Vector2D{}, &steerAway)
} else {
v.RotatePoints(math.Pi/2, v.Vector2D{}, &steerAway)
}
steerAway.Multiply(d)
steering.Add(steerAway)
}
if dbool {
steerAway := *a.Vel
x, y := v.IntersectionPoint(*a.Pos, heading, dt)
d := v.Distance(*a.Pos, v.Vector2D{x, y})
//wall = v.Vector2D{x, y}
if a.Vel.X < 0 {
v.RotatePoints(math.Pi/2, v.Vector2D{}, &steerAway)
} else {
v.RotatePoints(-math.Pi/2, v.Vector2D{}, &steerAway)
}
steerAway.Multiply(d)
steering.Add(steerAway)
}
//steering.SetMagnitude(4)
//steering.Limit(1.)
return steering
}
func (a *Arrow) Update(sx, sy float64, population []Boid, index int, info chan *Data) {
a.Pos.X = Teleport(a.Pos.X, sx)
a.Pos.Y = Teleport(a.Pos.Y, sy)
align, cohesion, separation := a.rules(population)
a.Accel.Add(*align)
a.Accel.Add(*cohesion)
a.Accel.Add(*separation)
//a.Accel.Limit(0.3)
a.Accel.SetMagnitude(1.)
avoid := a.avoidWalls(sx, sy)
a.Accel = &v.Vector2D{
X: a.Accel.X + avoid.X,
Y: a.Accel.Y + avoid.Y,
}
newPos := &v.Vector2D{a.Pos.X + a.Vel.X, a.Pos.Y + a.Vel.Y}
newVel := &v.Vector2D{a.Vel.X + a.Accel.X, a.Vel.Y + a.Accel.Y}
a.Accel = &v.Vector2D{0, 0}
newVel.Limit(4)
data := &Data{
Index: index,
NewPos: newPos,
NewVel: newVel,
}
info <- data
}
func (a *Arrow) Draw(screen *ebiten.Image) {
option := &ebiten.DrawImageOptions{}
// Do this translation so PosX, PosY is near the center of the arrow.
option.GeoM.Translate(-1*float64(a.ImageWidth)/2, -1*float64(a.ImageHeight)/2)
// Don't rotate if vectors are nil
if a.Vel.X != 0 || a.Vel.Y != 0 {
theta := v.Angle(*a.Vel)
option.GeoM.Rotate(theta)
}
option.GeoM.Translate(a.Pos.X, a.Pos.Y)
screen.DrawImage(a.Image, option)
}
func (a *Arrow) Apply(newPos, newVel *v.Vector2D) {
a.Pos = newPos
a.Vel = newVel
}
func (a *Arrow) align(population []Boid) v.Vector2D {
maxspeed := 5.
maxforce := 0.1
var counter int
steering := v.Vector2D{}
for _, boid := range population {
pos := boid.Coords()
d := v.Distance(*a.Pos, pos)
if boid != a && d <= float64(a.SightDis) {
counter++
steering.Add(boid.Velocity())
}
}
if counter > 0 {
steering.Divide(float64(counter))
steering.SetMagnitude(maxspeed)
steering.Subtract(*a.Vel)
steering.Limit(maxforce)
}
return steering
}
func (a *Arrow) cohesion(population []Boid) v.Vector2D {
maxspeed := 5.
maxforce := 0.07
var counter int
steering := v.Vector2D{}
for _, boid := range population {
d := v.Distance(*a.Pos, boid.Coords())
if boid != a && d <= float64(a.SightDis) {
counter++
steering.Add(boid.Coords())
}
}
if counter > 0 {
steering.Divide(float64(counter))
steering.Subtract(*a.Pos)
steering.SetMagnitude(maxspeed)
steering.Subtract(*a.Vel)
steering.SetMagnitude(maxforce * .8)
}
return steering
}
func (a *Arrow) separation(population []Boid) v.Vector2D {
maxspeed := 6.
maxforce := 0.03
var counter int
perception := 50.
steering := v.Vector2D{}
for _, boid := range population {
d := v.Distance(*a.Pos, boid.Coords())
if boid != a && d <= perception {
counter++
diff := *a.Pos
diff.Subtract(boid.Coords())
diff.Divide(d)
steering.Add(diff)
}
}
if counter > 0 {
steering.Divide(float64(counter))
steering.SetMagnitude(maxspeed)
steering.Subtract(*a.Vel)
steering.SetMagnitude(maxforce * 1.4)
}
return steering
}
func (a *Arrow) rules(population []Boid) (steeringA, steeringC, steeringS *v.Vector2D) {
steeringA, steeringC, steeringS = &v.Vector2D{}, &v.Vector2D{}, &v.Vector2D{}
maxspeedA, maxspeedC, maxspeedS := 4., 4., 4.
maxforceA, maxforceC, maxforceS := 1., 0.9, 1.5
perceptionA, perceptionC, perceptionS := 75., 70., 50.
var counterA, counterC, counterS int
cosView := math.Cos(a.SightAngle)
newPos := &v.Vector2D{a.Pos.X + a.Vel.X, a.Pos.Y + a.Vel.Y}
for _, boid := range population {
pos := boid.Coords()
//align
angle := v.IsSeen(a.Pos, newPos, &pos)
if boid != a && angle > cosView {
d := v.Distance(*a.Pos, pos)
if d <= perceptionA {
counterA++
steeringA.Add(boid.Velocity())
}
if d <= perceptionC {
counterC++
steeringC.Add(boid.Coords())
}
if d <= perceptionS {
counterS++
diff := *a.Pos
diff.Subtract(boid.Coords())
//diff.Divide(d)
steeringS.Add(diff)
}
}
}
if counterA > 0 {
steeringA.Divide(float64(counterA))
steeringA.SetMagnitude(maxspeedA)
steeringA.Subtract(*a.Vel)
steeringA.Limit(maxforceA)
}
if counterC > 0 {
steeringC.Divide(float64(counterC))
steeringC.Subtract(*a.Pos)
steeringC.SetMagnitude(maxspeedC)
steeringC.Subtract(*a.Vel)
steeringC.Limit(maxforceC)
}
if counterS > 0 {
steeringS.Divide(float64(counterS))
steeringS.SetMagnitude(maxspeedS)
steeringS.Subtract(*a.Vel)
steeringS.Limit(maxforceS)
}
return steeringA, steeringC, steeringS
}
/* func (a *Arrow) avoidWalls(sx, sy float64) {
//theta := math.Atan2(a.Pos.X, a.Pos.Y)
} */
func (a *Arrow) Coords() v.Vector2D {
return *a.Pos
}
func (a *Arrow) Velocity() v.Vector2D {
return *a.Vel
} | boid/boid.go | 0.553747 | 0.610947 | boid.go | starcoder |
package geom
type Rect struct {
Vec // Position (contains X,Y)
Size Vec // Size (X,Y)
}
func (rect *Rect) Pos() Vec {
return rect.Vec
}
func (rect *Rect) Left() float64 { return rect.Vec.X }
func (rect *Rect) Right() float64 { return rect.Vec.X + rect.Size.X }
func (rect *Rect) Top() float64 { return rect.Vec.Y }
func (rect *Rect) Bottom() float64 { return rect.Vec.Y + rect.Size.Y }
func (rect *Rect) DistancePoint(point Vec) float64 {
yDist := 0.0
if point.Y < rect.Top() {
yDist = rect.Top() - point.Y
} else if point.Y > rect.Bottom() {
yDist = point.Y - rect.Bottom()
}
xDist := 0.0
if point.X < rect.Left() {
xDist = rect.Left() - point.X
} else if point.X > rect.Right() {
xDist = point.X - rect.Right()
}
return Vec{xDist, yDist}.Norm()
}
// DistanceRect will give you the distance in pixels between two rectangles
// this is useful for seeing how far an object is from another object.
func (rect *Rect) DistanceRect(otherRect Rect) float64 {
// source: https://stackoverflow.com/questions/4978323/how-to-calculate-distance-between-two-rectangles-context-a-game-in-lua
left := otherRect.Right() < rect.Left()
right := rect.Right() < otherRect.Left()
bottom := otherRect.Bottom() < rect.Top()
top := rect.Bottom() < otherRect.Top()
if top && left {
// dist((x1, y1b), (x2b, y2))
return Vec{rect.Left(), rect.Bottom()}.DistancePoint(Vec{otherRect.Right(), otherRect.Top()})
} else if left && bottom {
// dist((x1, y1), (x2b, y2b))
return Vec{rect.Left(), rect.Top()}.DistancePoint(Vec{otherRect.Right(), otherRect.Bottom()})
} else if bottom && right {
// dist((x1b, y1), (x2, y2b))
return Vec{rect.Right(), rect.Top()}.DistancePoint(Vec{otherRect.Left(), otherRect.Bottom()})
} else if right && top {
// dist((x1b, y1b), (x2, y2))
return Vec{rect.Right(), rect.Bottom()}.DistancePoint(Vec{otherRect.Left(), otherRect.Bottom()})
} else if left {
// x1 - x2b
return rect.Left() - otherRect.Right()
} else if right {
// x2 - x1b
return otherRect.Left() - rect.Right()
} else if bottom {
// y1 - y2b
return rect.Top() - otherRect.Bottom()
} else if top {
// y2 - y1b
return otherRect.Top() - rect.Bottom()
}
return 0
}
func (rect *Rect) CollisionPoint(pos Vec) bool {
return pos.X > rect.Left() && pos.X < rect.Right() &&
pos.Y > rect.Top() && pos.Y < rect.Bottom()
}
func (r1 Rect) CollisionRectangle(r2 Rect) bool {
return r1.Right() > r2.Left() && r1.Bottom() > r2.Top() &&
r1.Left() < r2.Right() && r1.Top() < r2.Bottom()
} | gml/internal/geom/rect.go | 0.867738 | 0.568416 | rect.go | starcoder |
package interpreter
import (
"fmt"
"log"
"strconv"
"strings"
)
type Token interface {
Eval(c Context) (Token, bool)
String() string
Galaxy() string
}
type Value interface {
Token
Value() int64
}
type Var interface {
Token
Get(c Context) Token
}
type Func interface {
Token
Apply(v Token) Token
}
type Ap struct{}
func (t Ap) Eval(c Context) (Token, bool) {
return t, false
}
func (t Ap) String() string {
return "ap"
}
func (t Ap) Galaxy() string {
return "ap"
}
type Ap2 struct {
F Token
A Token
}
func (t Ap2) Apply(v Token) Token {
return Ap2{F: t, A: v}
}
func (t Ap2) Eval(c Context) (Token, bool) {
f := c.Eval(t.F).(Func)
r := f.Apply(t.A)
// log.Printf("%s => %s", t, r)
return r, true
}
func (t Ap2) Galaxy() string {
return fmt.Sprintf("ap %s %s", t.F.Galaxy(), t.A.Galaxy())
}
func (t Ap2) String() string {
return fmt.Sprintf("(ap %s %s)", t.F, t.A)
}
type VarN struct {
N int
}
func (t VarN) Eval(c Context) (Token, bool) {
return c.GetVar(t.N), true
}
func (t VarN) String() string {
return fmt.Sprintf(":%d", t.N)
}
func (t VarN) Galaxy() string {
return fmt.Sprintf(":%d", t.N)
}
type Int struct {
V int64
}
func (t Int) Value() int64 {
return t.V
}
func (t Int) Eval(c Context) (Token, bool) {
return t, false
}
func (t Int) String() string {
return strconv.FormatInt(t.V, 10)
}
func (t Int) Galaxy() string {
return strconv.FormatInt(t.V, 10)
}
type Inc struct{}
type Inc1 struct {
X0 Token
}
func (t Inc) Apply(v Token) Token {
return Inc1{X0: v}
}
func (t Inc1) Eval(c Context) (Token, bool) {
v := c.Eval(t.X0).(Int)
r := Int{V: v.V + 1}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Inc) Eval(c Context) (Token, bool) {
return t, false
}
func (t Inc) String() string {
return "inc"
}
func (t Inc) Galaxy() string {
return "inc"
}
func (t Inc1) String() string {
return fmt.Sprintf("(inc %s)", t.X0)
}
func (t Inc1) Galaxy() string {
return fmt.Sprintf("ap inc %s", t.X0.Galaxy())
}
type Dec struct{}
type Dec1 struct {
X0 Token
}
func (t Dec) Apply(v Token) Token {
return Dec1{X0: v}
}
func (t Dec1) Eval(c Context) (Token, bool) {
v := c.Eval(t.X0).(Int)
r := Int{V: v.V - 1}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Dec) Eval(c Context) (Token, bool) {
return t, false
}
func (t Dec) String() string {
return "dec"
}
func (t Dec1) String() string {
return fmt.Sprintf("(dec %s)", t.X0)
}
func (t Dec) Galaxy() string {
return "dec"
}
func (t Dec1) Galaxy() string {
return fmt.Sprintf("ap dec %s", t.X0.Galaxy())
}
type Add struct{}
type Add1 struct {
X0 Token
}
type Add2 struct {
X0 Token
X1 Token
}
func (t Add) Apply(v Token) Token {
return Add1{X0: v}
}
func (t Add1) Apply(v Token) Token {
return Add2{X0: t.X0, X1: v}
}
func (t Add2) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
x1 := c.Eval(t.X1).(Int)
r := Int{V: x0.V + x1.V}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Add) Eval(c Context) (Token, bool) {
return t, false
}
func (t Add1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Add) String() string {
return "add"
}
func (t Add1) String() string {
return fmt.Sprintf("(add1 %s)", t.X0)
}
func (t Add2) String() string {
return fmt.Sprintf("(add2 %s %s)", t.X0, t.X1)
}
func (t Add) Galaxy() string {
return "add"
}
func (t Add1) Galaxy() string {
return fmt.Sprintf("ap add %s", t.X0.Galaxy())
}
func (t Add2) Galaxy() string {
return fmt.Sprintf("ap ap add %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
type Mul struct{}
type Mul1 struct {
X0 Token
}
type Mul2 struct {
X0 Token
X1 Token
}
func (t Mul) Apply(v Token) Token {
return Mul1{X0: v}
}
func (t Mul1) Apply(v Token) Token {
return Mul2{X0: t.X0, X1: v}
}
func (t Mul2) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
x1 := c.Eval(t.X1).(Int)
r := Int{V: x0.V * x1.V}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Mul) Eval(c Context) (Token, bool) {
return t, false
}
func (t Mul1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Mul) String() string {
return "mul"
}
func (t Mul1) String() string {
return fmt.Sprintf("(mul1 %s)", t.X0)
}
func (t Mul2) String() string {
return fmt.Sprintf("(mul2 %s %s)", t.X0, t.X1)
}
func (t Mul) Galaxy() string {
return "mul"
}
func (t Mul1) Galaxy() string {
return fmt.Sprintf("ap mul %s", t.X0.Galaxy())
}
func (t Mul2) Galaxy() string {
return fmt.Sprintf("ap ap mul %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
type Div struct{}
type Div1 struct {
X0 Token
}
type Div2 struct {
X0 Token
X1 Token
}
func (t Div) Apply(v Token) Token {
return Div1{X0: v}
}
func (t Div1) Apply(v Token) Token {
return Div2{X0: t.X0, X1: v}
}
func (t Div2) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
x1 := c.Eval(t.X1).(Int)
r := Int{V: x0.V / x1.V}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Div) Eval(c Context) (Token, bool) {
return t, false
}
func (t Div1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Div) String() string {
return "div"
}
func (t Div1) String() string {
return fmt.Sprintf("(div1 %s)", t.X0)
}
func (t Div2) String() string {
return fmt.Sprintf("(div2 %s %s)", t.X0, t.X1)
}
func (t Div) Galaxy() string {
return "div"
}
func (t Div1) Galaxy() string {
return fmt.Sprintf("ap div %s", t.X0.Galaxy())
}
func (t Div2) Galaxy() string {
return fmt.Sprintf("ap ap div %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
type Eq struct{}
type Eq1 struct {
X0 Token
}
type Eq2 struct {
X0 Token
X1 Token
}
func (t Eq) Apply(v Token) Token {
return Eq1{X0: v}
}
func (t Eq1) Apply(v Token) Token {
return Eq2{X0: t.X0, X1: v}
}
func (t Eq2) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
x1 := c.Eval(t.X1).(Int)
var r Token = False{}
if x0.V == x1.V {
r = True{}
}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Eq) Eval(c Context) (Token, bool) {
return t, false
}
func (t Eq1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Eq) String() string {
return "eq"
}
func (t Eq1) String() string {
return fmt.Sprintf("(eq1 %s)", t.X0)
}
func (t Eq2) String() string {
return fmt.Sprintf("(eq2 %s %s)", t.X0, t.X1)
}
func (t Eq) Galaxy() string {
return "eq"
}
func (t Eq1) Galaxy() string {
return fmt.Sprintf("ap eq %s", t.X0.Galaxy())
}
func (t Eq2) Galaxy() string {
return fmt.Sprintf("ap ap eq %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
type Lt struct{}
type Lt1 struct {
X0 Token
}
type Lt2 struct {
X0 Token
X1 Token
}
func (t Lt) Apply(v Token) Token {
return Lt1{X0: v}
}
func (t Lt1) Apply(v Token) Token {
return Lt2{X0: t.X0, X1: v}
}
func (t Lt2) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
x1 := c.Eval(t.X1).(Int)
var r Token = False{}
if x0.V < x1.V {
r = True{}
}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Lt) Eval(c Context) (Token, bool) {
return t, false
}
func (t Lt1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Lt) String() string {
return "lt"
}
func (t Lt1) String() string {
return fmt.Sprintf("(lt1 %s)", t.X0)
}
func (t Lt2) String() string {
return fmt.Sprintf("(lt2 %s %s)", t.X0, t.X1)
}
func (t Lt) Galaxy() string {
return "lt"
}
func (t Lt1) Galaxy() string {
return fmt.Sprintf("ap lt %s", t.X0.Galaxy())
}
func (t Lt2) Galaxy() string {
return fmt.Sprintf("ap ap lt %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func ModulateToken(v Token) string {
switch tt := v.(type) { // TailEval?
case Int:
return modInt(tt.V)
case ICons:
return modCons(tt)
default:
log.Panicf("Invalid `modulate` argument: %s", v)
}
return "ERROR"
}
func modInt(v int64) string {
if v == 0 {
return "010"
}
prefix := "01"
if v < 0 {
prefix = "10"
v = -v
}
rs := []string{prefix}
s := strconv.FormatInt(v, 2)
n := (len(s) + 3) / 4
rs = append(rs, strings.Repeat("1", n), "0")
m := (4 * n) - len(s)
if m > 0 {
rs = append(rs, strings.Repeat("0", m))
}
rs = append(rs, s)
return strings.Join(rs, "")
}
func modCons(v ICons) string {
if v.IsNil() {
return "00"
}
return "11" + ModulateToken(v.Car()) + ModulateToken(v.Cdr())
}
func DemodulateToken(v string) Token {
r, s := demodToken(v)
if len(s) > 0 {
log.Panicf("Extra tail on demod %s\n=> %s\n++ %s", v, r, s)
}
return r
}
func demodToken(v string) (Token, string) {
if len(v) == 0 {
return nil, v
}
prefix, w := v[0:2], v[2:]
if prefix == "00" {
return Nil{}, w
}
if prefix == "11" {
car, w := demodToken(w)
cdr, w := demodToken(w)
return Cons2{X0: car, X1: cdr}, w
}
return demodInt(v)
}
func demodInt(v string) (Token, string) {
var negative bool
prefix, w := v[0:2], v[2:]
switch prefix {
case "01":
negative = false
case "10":
negative = true
default:
log.Panicf("Invalid modulated int prefix: %#v", v)
}
nlen := 0
for ; w[0] == '1'; w = w[1:] {
nlen += 4
}
w = w[1:]
if nlen == 0 {
return Int{V: 0}, w
}
num, w := w[:nlen], w[nlen:]
n, err := strconv.ParseInt(num, 2, 64)
if err != nil {
log.Panic(err)
}
if negative {
n = -n
}
return Int{V: n}, w
}
type Signal struct {
S string
}
func (t Signal) Eval(c Context) (Token, bool) {
return t, false
}
func (t Signal) String() string {
return fmt.Sprintf("%#v", t.S)
}
func (t Signal) Galaxy() string {
return fmt.Sprintf("%#v", t.S)
}
type Modulate struct{}
type Modulate1 struct {
X0 Token
}
func (t Modulate) Apply(v Token) Token {
return Modulate1{X0: v}
}
func (t Modulate1) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0)
r := Signal{S: ModulateToken(x0)}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Modulate) Eval(c Context) (Token, bool) {
return t, false
}
func (t Modulate) String() string {
return "mod"
}
func (t Modulate1) String() string {
return fmt.Sprintf("(mod %s)", t.X0)
}
func (t Modulate) Galaxy() string {
return "mod"
}
func (t Modulate1) Galaxy() string {
return fmt.Sprintf("ap mod %s", t.X0.Galaxy())
}
type Demodulate struct{}
type Demodulate1 struct {
X0 Token
}
func (t Demodulate) Apply(v Token) Token {
return Demodulate1{X0: v}
}
func (t Demodulate1) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Signal).S
r := DemodulateToken(x0)
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Demodulate) Eval(c Context) (Token, bool) {
return t, false
}
func (t Demodulate) String() string {
return "dem"
}
func (t Demodulate1) String() string {
return fmt.Sprintf("(dem %s)", t.X0)
}
func (t Demodulate) Galaxy() string {
return "dem"
}
func (t Demodulate1) Galaxy() string {
return fmt.Sprintf("ap dem %s", t.X0.Galaxy())
}
type Send struct{}
type Send1 struct {
X0 Token
}
func (t Send) Apply(v Token) Token {
return Send1{X0: v}
}
func (t Send1) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0)
log.Printf("Sending: %s", x0)
r := DemodulateToken(c.Send(ModulateToken(x0)))
log.Printf("Receivd: %s", r.Galaxy())
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Send) Eval(c Context) (Token, bool) {
return t, false
}
func (t Send) String() string {
return "send"
}
func (t Send1) String() string {
return fmt.Sprintf("(send %s)", t.X0)
}
func (t Send) Galaxy() string {
return "send"
}
func (t Send1) Galaxy() string {
return fmt.Sprintf(" ap send %s", t.X0.Galaxy())
}
type Neg struct{}
type Neg1 struct {
X0 Token
}
func (t Neg) Apply(v Token) Token {
return Neg1{X0: v}
}
func (t Neg) Eval(c Context) (Token, bool) {
return t, false
}
func (t Neg1) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int)
r := Int{V: -x0.V}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Neg) String() string {
return "neg"
}
func (t Neg1) String() string {
return fmt.Sprintf("(neg %s)", t.X0)
}
func (t Neg) Galaxy() string {
return "neg"
}
func (t Neg1) Galaxy() string {
return fmt.Sprintf("ap neg %s", t.X0.Galaxy())
}
type S struct{}
type S1 struct {
X0 Token
}
type S2 struct {
X0 Token
X1 Token
}
type S3 struct {
X0 Token
X1 Token
X2 Token
}
func (t S) Apply(v Token) Token {
return S1{X0: v}
}
func (t S1) Apply(v Token) Token {
return S2{X0: t.X0, X1: v}
}
func (t S2) Apply(v Token) Token {
return S3{X0: t.X0, X1: t.X1, X2: v}
}
func (t S3) Eval(c Context) (Token, bool) {
f0 := c.Eval(t.X0).(Func)
x2 := c.Eval(t.X2)
f1 := c.Eval(f0.Apply(x2)).(Func)
r := f1.Apply(
Ap2{
F: t.X1,
A: x2,
},
)
// log.Printf("%s => %s", t, r)
return r, true
}
func (t S) Eval(c Context) (Token, bool) {
return t, false
}
func (t S1) Eval(c Context) (Token, bool) {
return t, false
}
func (t S2) Eval(c Context) (Token, bool) {
return t, false
}
func (t S) String() string {
return "s"
}
func (t S1) String() string {
return fmt.Sprintf("(s1 %s)", t.X0)
}
func (t S2) String() string {
return fmt.Sprintf("(s2 %s %s)", t.X0, t.X1)
}
func (t S3) String() string {
return fmt.Sprintf("(s3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t S) Galaxy() string {
return "s"
}
func (t S1) Galaxy() string {
return fmt.Sprintf("ap s %s", t.X0.Galaxy())
}
func (t S2) Galaxy() string {
return fmt.Sprintf("ap ap s %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t S3) Galaxy() string {
return fmt.Sprintf("ap ap ap s %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
}
type C struct{}
type C1 struct {
X0 Token
}
type C2 struct {
X0 Token
X1 Token
}
type C3 struct {
X0 Token
X1 Token
X2 Token
}
func (t C) Apply(v Token) Token {
return C1{X0: v}
}
func (t C1) Apply(v Token) Token {
return C2{X0: t.X0, X1: v}
}
func (t C2) Apply(v Token) Token {
return C3{X0: t.X0, X1: t.X1, X2: v}
}
func (t C3) Eval(c Context) (Token, bool) {
f0 := c.Eval(t.X0).(Func)
f1 := c.Eval(f0.Apply(t.X2)).(Func)
r := f1.Apply(t.X1)
// log.Printf("%s => %s", t, r)
return r, true
}
func (t C) Eval(c Context) (Token, bool) {
return t, false
}
func (t C1) Eval(c Context) (Token, bool) {
return t, false
}
func (t C2) Eval(c Context) (Token, bool) {
return t, false
}
func (t C) String() string {
return "c"
}
func (t C1) String() string {
return fmt.Sprintf("(c1 %s)", t.X0)
}
func (t C2) String() string {
return fmt.Sprintf("(c2 %s %s)", t.X0, t.X1)
}
func (t C3) String() string {
return fmt.Sprintf("(c3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t C) Galaxy() string {
return "c"
}
func (t C1) Galaxy() string {
return fmt.Sprintf("ap c %s", t.X0.Galaxy())
}
func (t C2) Galaxy() string {
return fmt.Sprintf("ap ap c %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t C3) Galaxy() string {
return fmt.Sprintf("ap ap ap c %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
}
type B struct{}
type B1 struct {
X0 Token
}
type B2 struct {
X0 Token
X1 Token
}
type B3 struct {
X0 Token
X1 Token
X2 Token
}
func (t B) Apply(v Token) Token {
return B1{X0: v}
}
func (t B1) Apply(v Token) Token {
return B2{X0: t.X0, X1: v}
}
func (t B2) Apply(v Token) Token {
return B3{X0: t.X0, X1: t.X1, X2: v}
}
func (t B3) Eval(c Context) (Token, bool) {
r := c.Eval(t.X0).(Func).Apply(
Ap2{
F: t.X1,
A: t.X2,
},
)
return r, true
}
func (t B) Eval(c Context) (Token, bool) {
return t, false
}
func (t B1) Eval(c Context) (Token, bool) {
return t, false
}
func (t B2) Eval(c Context) (Token, bool) {
return t, false
}
func (t B) String() string {
return "b"
}
func (t B1) String() string {
return fmt.Sprintf("(b1 %s)", t.X0)
}
func (t B2) String() string {
return fmt.Sprintf("(b2 %s %s)", t.X0, t.X1)
}
func (t B3) String() string {
return fmt.Sprintf("(b3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t B) Galaxy() string {
return "b"
}
func (t B1) Galaxy() string {
return fmt.Sprintf("ap b %s", t.X0.Galaxy())
}
func (t B2) Galaxy() string {
return fmt.Sprintf("ap ap b %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t B3) Galaxy() string {
return fmt.Sprintf("ap ap ap b %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
}
type Pwr2 struct{}
type Pwr21 struct {
X0 Token
}
func (t Pwr2) Apply(v Token) Token {
return Pwr21{X0: v}
}
func (t Pwr21) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int).V
r := Int{V: 1 << x0}
// log.Printf("%s => %s", t, r)
return r, false
}
func (t Pwr2) Eval(c Context) (Token, bool) {
return t, false
}
func (t Pwr2) String() string {
return "pwr2"
}
func (t Pwr21) String() string {
return fmt.Sprintf("(pwr2 %s)", t.X0)
}
func (t Pwr2) Galaxy() string {
return "pwr2"
}
func (t Pwr21) Galaxy() string {
return fmt.Sprintf("ap pwr2 %s", t.X0.Galaxy())
}
type I struct{}
type I1 struct {
X0 Token
}
func (t I) Apply(v Token) Token {
return I1{X0: v}
}
func (t I1) Eval(c Context) (Token, bool) {
r := t.X0
// log.Printf("%s => %s", t, r)
return r, true
}
func (t I) Eval(c Context) (Token, bool) {
return t, false
}
func (t I) String() string {
return "i"
}
func (t I1) String() string {
return fmt.Sprintf("(i %s)", t.X0)
}
func (t I) Galaxy() string {
return "i"
}
func (t I1) Galaxy() string {
return fmt.Sprintf("ap i %s", t.X0.Galaxy())
}
type True struct{}
type True1 struct {
X0 Token
}
type True2 struct {
X0 Token
}
func (t True) Apply(v Token) Token {
return True1{X0: v}
}
func (t True1) Apply(v Token) Token {
return True2{X0: t.X0}
}
func (t True2) Eval(c Context) (Token, bool) {
r := t.X0
// log.Printf("%s => %s", t, r)
return r, true
}
func (t True) Eval(c Context) (Token, bool) {
return t, false
}
func (t True1) Eval(c Context) (Token, bool) {
return t, false
}
func (t True) String() string {
return "t"
}
func (t True1) String() string {
return fmt.Sprintf("(t %s)", t.X0)
}
func (t True2) String() string {
return fmt.Sprintf("(t %s ?)", t.X0)
}
func (t True) Galaxy() string {
return "t"
}
func (t True1) Galaxy() string {
return fmt.Sprintf("ap t %s", t.X0.Galaxy())
}
func (t True2) Galaxy() string {
return fmt.Sprintf("ap ap t %s 42", t.X0.Galaxy())
}
type False struct{}
type False1 struct{}
type False2 struct {
X1 Token
}
func (t False) Apply(v Token) Token {
return False1{}
}
func (t False1) Apply(v Token) Token {
return False2{X1: v}
}
func (t False2) Eval(c Context) (Token, bool) {
r := t.X1
// log.Printf("%s => %s", t, r)
return r, true
}
func (t False) Eval(c Context) (Token, bool) {
return t, false
}
func (t False1) Eval(c Context) (Token, bool) {
return t, false
}
func (t False) String() string {
return "f"
}
func (t False1) String() string {
return "(f ?)"
}
func (t False2) String() string {
return fmt.Sprintf("(f ? %s)", t.X1)
}
func (t False) Galaxy() string {
return "f"
}
func (t False1) Galaxy() string {
return "ap f 42"
}
func (t False2) Galaxy() string {
return fmt.Sprintf("ap ap f 42 %s", t.X1.Galaxy())
}
type Cons struct{}
type Cons1 struct {
X0 Token
}
type Cons2 struct {
X0 Token
X1 Token
}
type Cons3 struct {
X0 Token
X1 Token
X2 Token
}
func (t Cons) Apply(v Token) Token {
return Cons1{X0: v}
}
func (t Cons1) Apply(v Token) Token {
return Cons2{X0: t.X0, X1: v}
}
func (t Cons2) Apply(v Token) Token {
return Cons3{X0: t.X0, X1: t.X1, X2: v}
}
func (t Cons3) Eval(c Context) (Token, bool) {
y1 := c.Eval(t.X2).(Func)
y2 := c.Eval(y1.Apply(t.X0)).(Func)
r := y2.Apply(t.X1)
// log.Printf("%s => %s", t, r)
return r, true
}
func (t Cons) Eval(c Context) (Token, bool) {
return t, false
}
func (t Cons1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Cons2) Eval(c Context) (Token, bool) {
t.X0 = c.Eval(t.X0)
t.X1 = c.Eval(t.X1)
return t, false
}
func (t Cons2) Car() Token {
return t.X0
}
func (t Cons2) Cdr() Token {
return t.X1
}
func (t Cons2) IsNil() bool {
return false
}
func (t Cons) String() string {
return "cons"
}
func (t Cons1) String() string {
return fmt.Sprintf("(cons1 %s)", t.X0)
}
func (t Cons2) String() string {
return fmt.Sprintf("(cons2 %s %s)", t.X0, t.X1)
}
func (t Cons3) String() string {
return fmt.Sprintf("(cons3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t Cons) Galaxy() string {
return "cons"
}
func (t Cons1) Galaxy() string {
return fmt.Sprintf("ap cons %s", t.X0.Galaxy())
}
func (t Cons2) Galaxy() string {
return fmt.Sprintf("ap ap cons %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t Cons3) Galaxy() string {
return fmt.Sprintf("ap ap ap cons %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
}
type Vec struct {
Cons
}
func (t Vec) String() string {
return "vec"
}
func (t Vec) Galaxy() string {
return "vec"
}
type Car struct{}
type Car1 struct {
X0 Token
}
func (t Car) Apply(v Token) Token {
return Car1{X0: v}
}
func (t Car1) Eval(c Context) (Token, bool) {
y1 := c.Eval(t.X0).(Func)
r := y1.Apply(True{})
// log.Printf("%s => %s", t, r)
return r, true
}
func (t Car) Eval(c Context) (Token, bool) {
return t, false
}
func (t Car) String() string {
return "car"
}
func (t Car1) String() string {
return fmt.Sprintf("(car %s)", t.X0)
}
func (t Car) Galaxy() string {
return "car"
}
func (t Car1) Galaxy() string {
return fmt.Sprintf("ap car %s", t.X0.Galaxy())
}
type Cdr struct{}
type Cdr1 struct {
X0 Token
}
func (t Cdr) Apply(v Token) Token {
return Cdr1{X0: v}
}
func (t Cdr1) Eval(c Context) (Token, bool) {
y1 := c.Eval(t.X0).(Func)
r := y1.Apply(False{})
// log.Printf("%s => %s", t, r)
return r, true
}
func (t Cdr) Eval(c Context) (Token, bool) {
return t, false
}
func (t Cdr) String() string {
return "cdr"
}
func (t Cdr1) String() string {
return fmt.Sprintf("(cdr %s)", t.X0)
}
func (t Cdr) Galaxy() string {
return "cdr"
}
func (t Cdr1) Galaxy() string {
return fmt.Sprintf("ap cdr %s", t.X0.Galaxy())
}
type Nil struct{}
type Nil1 struct{}
func (t Nil) Apply(v Token) Token {
return Nil1{}
}
func (t Nil1) Eval(c Context) (Token, bool) {
return True{}, false
}
func (t Nil) Eval(c Context) (Token, bool) {
return t, false
}
func (t Nil) Car() Token {
return Nil{}
}
func (t Nil) Cdr() Token {
return Nil{}
}
func (t Nil) IsNil() bool {
return true
}
func (t Nil) String() string {
return "nil"
}
func (t Nil1) String() string {
return "(nil ?)"
}
func (t Nil) Galaxy() string {
return "nil"
}
func (t Nil1) Galaxy() string {
return "ap nil 42"
}
type isNil struct{}
type isNil1 struct{}
type isNil2 struct{}
func (t isNil) Apply(v Token) Token {
return isNil1{}
}
func (t isNil1) Apply(v Token) Token {
return isNil2{}
}
func (t isNil2) Eval(c Context) (Token, bool) {
return False{}, false
}
func (t isNil) Eval(c Context) (Token, bool) {
return t, false
}
func (t isNil1) Eval(c Context) (Token, bool) {
return t, false
}
func (t isNil) String() string {
return "*isnil*"
}
func (t isNil1) String() string {
return "(*isnil* ?)"
}
func (t isNil2) String() string {
return "(*isnil* ? ?)"
}
func (t isNil) Galaxy() string {
return "*isnil*"
}
func (t isNil1) Galaxy() string {
return "ap *isnil* 42"
}
func (t isNil2) Galaxy() string {
return "ap ap *isnil* 42 42"
}
type IsNil struct{}
type IsNil1 struct {
X0 Token
}
func (t IsNil) Apply(v Token) Token {
return IsNil1{X0: v}
}
func (t IsNil1) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Func)
r := x0.Apply(isNil{})
// log.Printf("%s => %s", t, r)
return r, true
}
func (t IsNil) Eval(c Context) (Token, bool) {
return t, false
}
func (t IsNil) String() string {
return "isnil"
}
func (t IsNil1) String() string {
return fmt.Sprintf("(isnil %s)", t.X0)
}
func (t IsNil) Galaxy() string {
return "isnil"
}
func (t IsNil1) Galaxy() string {
return fmt.Sprintf("ap isnil %s", t.X0.Galaxy())
}
type ICons interface {
Token
Car() Token
Cdr() Token
IsNil() bool
}
func DrawPoints(c Context, v Token) *Picture {
var pts []Point
r := NewPicture()
for i := c.Eval(v).(ICons); !i.IsNil(); i = i.Cdr().(ICons) {
p := i.Car().(ICons)
x := int(p.Car().(Int).V)
y := int(p.Cdr().(Int).V)
pts = append(pts, Pt(x, y))
}
c.Picture().DrawPts(pts...)
r.DrawPts(pts...)
// log.Printf("Draw %s", r)
return r
}
type Draw struct{}
type Draw1 struct {
X0 Token
}
func (t Draw) Apply(v Token) Token {
return Draw1{X0: v}
}
func (t Draw1) Eval(c Context) (Token, bool) {
return DrawPoints(c, t.X0), false
}
func (t Draw) Eval(c Context) (Token, bool) {
return t, false
}
func (t Draw) String() string {
return "draw"
}
func (t Draw1) String() string {
return fmt.Sprintf("(draw %s)", t.X0)
}
func (t Draw) Galaxy() string {
return "draw"
}
func (t Draw1) Galaxy() string {
return fmt.Sprintf("ap draw %s", t.X0.Galaxy())
}
type Checkerboard struct{}
func (t Checkerboard) Apply(v Token) Token {
log.Panicf("%s not implemented", t)
return nil
}
func (t Checkerboard) Eval(c Context) (Token, bool) {
return t, false
}
func (t Checkerboard) String() string {
return "checkerboard"
}
func (t Checkerboard) Galaxy() string {
return "checkerboard"
}
type Multipledraw struct{}
type Multipledraw1 struct {
X0 Token
}
func (t Multipledraw) Apply(v Token) Token {
return Multipledraw1{X0: v}
}
func (t Multipledraw1) Eval(c Context) (Token, bool) {
r := NewPicture()
v := c.Eval(t.X0).(ICons)
for i := v; !i.IsNil(); i = c.Eval(i.Cdr()).(ICons) {
r.DrawPicture(DrawPoints(c, i.Car()))
}
return r, false
}
func (t Multipledraw) Eval(c Context) (Token, bool) {
return t, false
}
func (t Multipledraw) String() string {
return "multipledraw"
}
func (t Multipledraw1) String() string {
return fmt.Sprintf("(multipledraw %s)", t.X0)
}
func (t Multipledraw) Galaxy() string {
return "multipledraw"
}
func (t Multipledraw1) Galaxy() string {
return fmt.Sprintf("ap multipledraw %s", t.X0.Galaxy())
}
type If0 struct{}
type If01 struct {
X0 Token
}
type If02 struct {
X0 Token
X1 Token
}
type If03 struct {
X0 Token
X1 Token
X2 Token
}
func (t If0) Apply(v Token) Token {
return If01{X0: v}
}
func (t If01) Apply(v Token) Token {
return If02{X0: t.X0, X1: v}
}
func (t If02) Apply(v Token) Token {
return If03{X0: t.X0, X1: t.X1, X2: v}
}
func (t If03) Eval(c Context) (Token, bool) {
x0 := c.Eval(t.X0).(Int).V
r := t.X2
if x0 == 0 {
r = t.X1
}
// log.Printf("%s => %s", t, r)
return r, true
}
func (t If0) Eval(c Context) (Token, bool) {
return t, false
}
func (t If01) Eval(c Context) (Token, bool) {
return t, false
}
func (t If02) Eval(c Context) (Token, bool) {
return t, false
}
func (t If0) String() string {
return "if0"
}
func (t If01) String() string {
return fmt.Sprintf("(if0_1 %s)", t.X0)
}
func (t If02) String() string {
return fmt.Sprintf("(if0_2 %s %s)", t.X0, t.X1)
}
func (t If03) String() string {
return fmt.Sprintf("(if0_3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t If0) Galaxy() string {
return "if0"
}
func (t If01) Galaxy() string {
return fmt.Sprintf("ap if0 %s", t.X0.Galaxy())
}
func (t If02) Galaxy() string {
return fmt.Sprintf("ap ap if0 %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t If03) Galaxy() string {
return fmt.Sprintf("ap ap ap if0 %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
}
type interactHelper struct{}
type interactHelper1 struct {
X0 Token
}
type interactHelper2 struct {
X0 Token
X1 Token
}
func (t interactHelper) Apply(v Token) Token {
return interactHelper1{X0: v}
}
func (t interactHelper1) Apply(v Token) Token {
return interactHelper2{X0: t.X0, X1: v}
}
func (t interactHelper2) Eval(c Context) (Token, bool) {
// (f38 x0 x1) =
// (if0 (car x1)
// -- then
// (cons (modem (car (cdr x1))) (cons (multipledraw (car (cdr (cdr x1)))) nil))
// -- else
// (interact x0 (modem (car (cdr x1))) (send (car (cdr (cdr x1))))))
// f38(protocol, (flag, newState, data)) = if flag == 0
// then (modem(newState), multipledraw(data))
// else interact(protocol, modem(newState), send(data))
x1 := c.Eval(t.X1).(ICons)
flag := c.Eval(x1.Car()).(Int)
x11 := c.Eval(x1.Cdr()).(ICons)
newState := x11.Car()
x12 := c.Eval(x11.Cdr()).(ICons)
data := x12.Car()
if flag.V == 0 {
r := Cons2{
X0: newState,
X1: Cons2{
X0: Multipledraw1{X0: data},
X1: Nil{},
},
}
// log.Printf("%s => %s", t, r)
return r, true
} else {
r := Interact3{
X0: t.X0,
X1: newState,
X2: Send1{
X0: data,
},
}
// log.Printf("%s => %s", t, r)
return r, true
}
}
func (t interactHelper) Eval(c Context) (Token, bool) {
return t, false
}
func (t interactHelper1) Eval(c Context) (Token, bool) {
return t, false
}
func (t interactHelper) String() string {
return "interact-helper"
}
func (t interactHelper1) String() string {
return fmt.Sprintf("(interact-helper1 %s)", t.X0)
}
func (t interactHelper2) String() string {
return fmt.Sprintf("(interact-helper2 %s %s)", t.X0, t.X1)
}
func (t interactHelper) Galaxy() string {
return "*interact-helper*"
}
func (t interactHelper1) Galaxy() string {
return fmt.Sprintf("ap *interact-helper* %s", t.X0.Galaxy())
}
func (t interactHelper2) Galaxy() string {
return fmt.Sprintf("ap ap *interact-helper* %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
type Interact struct{}
type Interact1 struct {
X0 Token
}
type Interact2 struct {
X0 Token
X1 Token
}
type Interact3 struct {
X0 Token
X1 Token
X2 Token
}
func (t Interact) Apply(v Token) Token {
return Interact1{X0: v}
}
func (t Interact1) Apply(v Token) Token {
return Interact2{X0: t.X0, X1: v}
}
func (t Interact2) Apply(v Token) Token {
return Interact3{X0: t.X0, X1: t.X1, X2: v}
}
func (t Interact3) Eval(c Context) (Token, bool) {
// (interact x0 x1 x2) = (f38 x0 (x0 x1 x2))
r := interactHelper2{
X0: t.X0,
X1: Ap2{
F: Ap2{
F: t.X0,
A: t.X1,
},
A: t.X2,
},
}
// log.Printf("%s => %s", t, r)
return r, true
}
func (t Interact) Eval(c Context) (Token, bool) {
return t, false
}
func (t Interact1) Eval(c Context) (Token, bool) {
return t, false
}
func (t Interact2) Eval(c Context) (Token, bool) {
return t, false
}
func (t Interact) String() string {
return "interact"
}
func (t Interact1) String() string {
return fmt.Sprintf("(interact1 %s)", t.X0)
}
func (t Interact2) String() string {
return fmt.Sprintf("(interact2 %s %s)", t.X0, t.X1)
}
func (t Interact3) String() string {
return fmt.Sprintf("(interact3 %s %s %s)", t.X0, t.X1, t.X2)
}
func (t Interact) Galaxy() string {
return "interact"
}
func (t Interact1) Galaxy() string {
return fmt.Sprintf("ap interact %s", t.X0.Galaxy())
}
func (t Interact2) Galaxy() string {
return fmt.Sprintf("ap ap interact %s %s", t.X0.Galaxy(), t.X1.Galaxy())
}
func (t Interact3) Galaxy() string {
return fmt.Sprintf("ap ap ap interact %s %s %s", t.X0.Galaxy(), t.X1.Galaxy(), t.X2.Galaxy())
} | diseaz/interpreter/tokens.go | 0.682891 | 0.419707 | tokens.go | starcoder |
package prettyformat
import (
"bytes"
"errors"
"fmt"
"reflect"
"sort"
"strings"
)
var (
// ErrInvalidType is returned when the type of the passed value cannot be determined
ErrInvalidType = errors.New("invalid type")
// ErrArbitraryPointerType is returned when an arbitrary pointer is passed
ErrArbitraryPointerType = errors.New("arbitrary pointer types cannot be serialized reliably")
// ErrFunctionType is returned when a function is passed
ErrFunctionType = errors.New("functions cannot be serialized")
// ErrInterfaceType is returned when an interface instance is passed
ErrInterfaceType = errors.New("interfaces cannot be serialized")
// ErrChanType is returned when a channel is passed
ErrChanType = errors.New("channels cannot be serialized")
)
const (
paddingIncrement = 2
)
// Format a value into a pretty-printed string
func Format(value interface{}) (string, error) {
return formatPadded(value, paddingIncrement)
}
func formatPadded(value interface{}, padding int) (string, error) {
switch reflect.TypeOf(value).Kind() {
case reflect.Invalid:
return formatInvalid(value, padding)
case reflect.Bool:
return formatBool(value, padding)
case reflect.Int:
return formatInt(value, padding)
case reflect.Int8:
return formatInt8(value, padding)
case reflect.Int16:
return formatInt16(value, padding)
case reflect.Int32:
return formatInt32(value, padding)
case reflect.Int64:
return formatInt64(value, padding)
case reflect.Uint:
return formatUint(value, padding)
case reflect.Uint8:
return formatUint8(value, padding)
case reflect.Uint16:
return formatUint16(value, padding)
case reflect.Uint32:
return formatUint32(value, padding)
case reflect.Uint64:
return formatUint64(value, padding)
case reflect.Uintptr:
return formatUintptr(value, padding)
case reflect.Float32:
return formatFloat32(value, padding)
case reflect.Float64:
return formatFloat64(value, padding)
case reflect.Complex64:
return formatComplex64(value, padding)
case reflect.Complex128:
return formatComplex128(value, padding)
case reflect.Array:
return formatArray(value, padding)
case reflect.Chan:
return formatChan(value, padding)
case reflect.Func:
return formatFunc(value, padding)
case reflect.Interface:
return formatInterface(value, padding)
case reflect.Map:
return formatMap(value, padding)
case reflect.Ptr:
return formatPtr(value, padding)
case reflect.Slice:
return formatSlice(value, padding)
case reflect.String:
return formatString(value, padding)
case reflect.Struct:
return formatStruct(value, padding)
case reflect.UnsafePointer:
return formatUnsafePointer(value, padding)
default:
return formatInvalid(value, padding)
}
}
func formatInvalid(value interface{}, padding int) (string, error) {
return "", ErrInvalidType
}
func formatBool(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(bool)), nil
}
func formatInt(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(int)), nil
}
func formatInt8(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(int8)), nil
}
func formatInt16(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(int16)), nil
}
func formatInt32(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(int32)), nil
}
func formatInt64(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(int64)), nil
}
func formatUint(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(uint)), nil
}
func formatUint8(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(uint8)), nil
}
func formatUint16(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(uint16)), nil
}
func formatUint32(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(uint32)), nil
}
func formatUint64(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(uint64)), nil
}
func formatUintptr(value interface{}, padding int) (string, error) {
return "", ErrArbitraryPointerType
}
func formatFloat32(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(float32)), nil
}
func formatFloat64(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(float64)), nil
}
func formatComplex64(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(complex64)), nil
}
func formatComplex128(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%v", value.(complex128)), nil
}
func formatArray(value interface{}, padding int) (string, error) {
t := reflect.TypeOf(value)
v := reflect.ValueOf(value)
elemType := t.Elem()
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("[%d]%s{", v.Len(), typeName(elemType)))
buffer.WriteString("\n")
for i := 0; i < v.Len(); i++ {
elem := v.Index(i)
elemInterface := elem.Interface()
formattedValue, err := formatPadded(elemInterface, padding+paddingIncrement)
if err != nil {
return "", err
}
buffer.WriteString(strings.Repeat(" ", padding))
if elemType.Kind() == reflect.Interface && !explicitType(reflect.TypeOf(elemInterface)) {
buffer.WriteString(fmt.Sprintf("(%s)%s,\n", typeName(reflect.TypeOf(elemInterface)), formattedValue))
} else {
buffer.WriteString(fmt.Sprintf("%s,\n", formattedValue))
}
}
buffer.WriteString(strings.Repeat(" ", padding-paddingIncrement))
buffer.WriteString("}")
return buffer.String(), nil
}
func formatChan(value interface{}, padding int) (string, error) {
return "", ErrChanType
}
func formatFunc(value interface{}, padding int) (string, error) {
return "", ErrFunctionType
}
func formatInterface(value interface{}, padding int) (string, error) {
return "", ErrInterfaceType
}
type mapEntry struct {
key string
keyType string
elem string
elemType string
}
type mapEntries []mapEntry
func (e mapEntries) Len() int {
return len(e)
}
func (e mapEntries) Less(i, j int) bool {
return e[i].key < e[j].key
}
func (e mapEntries) Swap(i, j int) {
el := e[i]
e[i] = e[j]
e[j] = el
}
func formatMap(value interface{}, padding int) (string, error) {
t := reflect.TypeOf(value)
keyType := t.Key()
elemType := t.Elem()
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("%s{", typeName(t)))
v := reflect.ValueOf(value)
entries := make(mapEntries, v.Len())
if v.Len() == 0 {
goto finish
}
buffer.WriteString("\n")
for i, key := range v.MapKeys() {
keyInterface := key.Interface()
formattedKey, err := formatPadded(keyInterface, padding+paddingIncrement)
if err != nil {
return "", err
}
entries[i].key = formattedKey
if keyType.Kind() == reflect.Interface && !explicitType(reflect.TypeOf(keyInterface)) {
entries[i].keyType = "(" + typeName(reflect.TypeOf(keyInterface)) + ")"
}
elem := v.MapIndex(key)
elemInterface := elem.Interface()
formattedElem, err := formatPadded(elemInterface, padding+paddingIncrement)
if err != nil {
return "", err
}
entries[i].elem = formattedElem
if elemType.Kind() == reflect.Interface && !explicitType(reflect.TypeOf(elemInterface)) {
entries[i].elemType = "(" + typeName(reflect.TypeOf(elemInterface)) + ")"
}
}
sort.Sort(entries)
for _, entry := range entries {
buffer.WriteString(strings.Repeat(" ", padding) + entry.keyType + entry.key + ": " + entry.elemType + entry.elem + ",\n")
}
buffer.WriteString(strings.Repeat(" ", padding-paddingIncrement))
finish:
buffer.WriteString("}")
return buffer.String(), nil
}
func formatPtr(value interface{}, padding int) (string, error) {
v := reflect.ValueOf(value)
if v.IsNil() {
return "nil", nil
}
formattedElem, err := formatPadded(reflect.Indirect(v).Interface(), padding)
if err != nil {
return "", nil
}
return fmt.Sprintf("&%s", formattedElem), nil
}
func formatSlice(value interface{}, padding int) (string, error) {
t := reflect.TypeOf(value)
elemType := t.Elem()
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("[]%s{", typeName(elemType)))
v := reflect.ValueOf(value)
if v.IsNil() || v.Len() == 0 {
goto finish
}
buffer.WriteString("\n")
for i := 0; i < v.Len(); i++ {
elem := v.Index(i).Interface()
formattedValue, err := formatPadded(elem, padding+paddingIncrement)
if err != nil {
return "", err
}
buffer.WriteString(strings.Repeat(" ", padding))
if elemType.Kind() == reflect.Interface && !explicitType(reflect.TypeOf(elem)) {
elemType := reflect.TypeOf(elem)
buffer.WriteString(fmt.Sprintf("(%s)%s,\n", typeName(elemType), formattedValue))
} else {
buffer.WriteString(fmt.Sprintf("%s,\n", formattedValue))
}
}
buffer.WriteString(strings.Repeat(" ", padding-paddingIncrement))
finish:
buffer.WriteString("}")
return buffer.String(), nil
}
func formatString(value interface{}, padding int) (string, error) {
return fmt.Sprintf("%q", value.(string)), nil
}
func formatStruct(value interface{}, padding int) (string, error) {
t := reflect.TypeOf(value)
v := reflect.ValueOf(value)
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("%s{", typeName(t)))
hasExportedFields := false
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
hidden := field.PkgPath != ""
if hidden {
continue
}
if !hasExportedFields {
hasExportedFields = true
buffer.WriteString("\n")
}
buffer.WriteString(strings.Repeat(" ", padding))
fieldValue := v.Field(i)
fieldValueInterface := fieldValue.Interface()
formattedValue, err := formatPadded(fieldValueInterface, padding+paddingIncrement)
if err != nil {
return "", err
}
if field.Type.Kind() == reflect.Interface && !explicitType(reflect.TypeOf(fieldValueInterface)) {
buffer.WriteString(field.Name + ": (" + typeName(reflect.TypeOf(fieldValueInterface)) + ")" + formattedValue + ",\n")
} else {
buffer.WriteString(field.Name + ": " + formattedValue + ",\n")
}
}
if hasExportedFields {
buffer.WriteString(strings.Repeat(" ", padding-paddingIncrement))
}
buffer.WriteString("}")
return buffer.String(), nil
}
func formatUnsafePointer(value interface{}, padding int) (string, error) {
return "", ErrArbitraryPointerType
}
func explicitType(t reflect.Type) bool {
switch t.Kind() {
case reflect.Array:
return true
case reflect.Map:
return true
case reflect.Slice:
return true
case reflect.Struct:
return true
case reflect.Interface:
return true
default:
return false
}
}
func typeName(t reflect.Type) string {
switch t.Kind() {
case reflect.Array:
return fmt.Sprintf("[%d]%s", t.Len(), typeName(t.Elem()))
case reflect.Map:
return fmt.Sprintf("map[%s]%s", typeName(t.Key()), typeName(t.Elem()))
case reflect.Ptr:
return fmt.Sprintf("*%s", typeName(t.Elem()))
case reflect.Slice:
return fmt.Sprintf("[]%s", typeName(t.Elem()))
case reflect.Struct:
if t.Name() == "" {
return "(anonymous struct)"
}
return t.Name()
case reflect.Interface:
return "interface{}"
default:
return fmt.Sprintf("%s", t.Kind())
}
} | pretty.go | 0.646237 | 0.421314 | pretty.go | starcoder |
package must
import (
"fmt"
"reflect"
"github.com/kylelemons/godebug/diff"
"github.com/kylelemons/godebug/pretty"
)
var _ MustTester = Tester{}
/*
Tester implements MustTester and provides a TestingT to be used for all check functions.
*/
type Tester struct {
T TestingT // *testing.T or equivalent
InterfaceComparison func(expected, got interface{}) bool // Optional custom interface comparison function
InterfaceDiff func(expected, got interface{}) string // Optional custom interace diff function
}
/*
BeEqual compares the expected and got interfaces, triggering an error on the Tester's T if they are not equal.
This corresponds to the function BeEqual
*/
func (tester Tester) BeEqual(expected, got interface{}, a ...interface{}) bool {
tester.T.Helper()
if !tester.equal(expected, got) {
tester.formattedError("diff\n%s", a, tester.diff(expected, got))
return false
}
return true
}
/*
BeEqualErrors compares the expected and got errors, triggering an error on the Tester's T if they are not equal.
This corresponds to the function BeEqualErrors
*/
func (tester Tester) BeEqualErrors(expected, got error, a ...interface{}) bool {
tester.T.Helper()
if expected == nil && got == nil {
return true
}
if (expected == nil || got == nil) || expected.Error() != got.Error() {
tester.formattedError("Expected '%v', got '%v'", a, getErrMessage(expected), getErrMessage(got))
return false
}
return true
}
/*
BeNoError checks whether got is set, triggering an error on the Tester's T if it is non-nil.
This corresponds to the function BeNoError
*/
func (tester Tester) BeNoError(got error, a ...interface{}) bool {
tester.T.Helper()
if got == nil {
return true
}
tester.formattedError("error: %s", a, got.Error())
return false
}
/*
BeSameLength checks whether the two inputs have the same length according to the len function.
This corresponds to the function BeSameLength
*/
func (tester Tester) BeSameLength(expected, got interface{}, a ...interface{}) bool {
tester.T.Helper()
lenExpected, err := lenterface(expected)
if err != nil {
tester.formattedError("could not test lengths - %v", a, err)
return false
}
lenGot, err := lenterface(got)
if err != nil {
tester.formattedError("could not test lengths - %v", a, err)
return false
}
if lenExpected == lenGot {
return true
}
tester.formattedError("expected length %d, got length %d", a, lenExpected, lenGot)
return false
}
// BeError checks that the received error is not nil
func (tester Tester) BeError(got error, a ...interface{}) bool {
tester.T.Helper()
if got != nil {
return true
}
tester.formattedError("expected an error, but got nil", a)
return false
}
// BeErrorIf checks that the received error corresponds to the errorExpected flag
func (tester Tester) BeErrorIf(errorExpected bool, got error, a ...interface{}) bool {
tester.T.Helper()
if errorExpected {
return tester.BeError(got, a...)
}
return tester.BeNoError(got, a...)
}
func lenterface(val interface{}) (int, error) {
kind := reflect.TypeOf(val).Kind()
switch kind {
case reflect.Slice, reflect.Map, reflect.String, reflect.Chan, reflect.Array:
s := reflect.ValueOf(val)
return s.Len(), nil
case reflect.Ptr:
return lenterfacePtr(reflect.ValueOf(val))
}
return 0, fmt.Errorf("cannot get the length of type: %v", kind)
}
func lenterfacePtr(val reflect.Value) (int, error) {
i := reflect.Indirect(val)
switch i.Kind() {
case reflect.Slice, reflect.Map, reflect.String, reflect.Chan, reflect.Array:
return i.Len(), nil
}
return 0, fmt.Errorf("cannot get the length of a pointer to type: %v", i.Kind())
}
func (tester Tester) equal(expected, got interface{}) bool {
if tester.InterfaceComparison != nil {
return tester.InterfaceComparison(expected, got)
}
return pretty.Compare(expected, got) == ""
}
func (tester Tester) diff(expected, got interface{}) string {
if tester.InterfaceDiff != nil {
return tester.InterfaceDiff(expected, got)
}
// Do string diff if strings. Compare does not handle multiline strings well
e, eok := expected.(string)
g, gok := got.(string)
if eok && gok {
return fmt.Sprintf("(- expected, + got)\n%v", diff.Diff(e, g))
}
return fmt.Sprintf("(- expected, + got)\n%v", pretty.Compare(expected, got))
}
func (tester Tester) formattedError(format string, a []interface{}, following ...interface{}) {
tester.T.Helper()
if len(a) > 0 {
var args []interface{}
args = append(args, fmt.Sprint(a...))
args = append(args, following...)
tester.T.Errorf("%v: "+format, args...)
} else {
tester.T.Errorf(format, following...)
}
}
func getErrMessage(err error) string {
if err != nil {
return err.Error()
}
return "<nil>"
} | tester.go | 0.733643 | 0.451508 | tester.go | starcoder |
package geo
import (
"fmt"
"strconv"
"strings"
)
// GGALat2DD converts a GGA latitude to decimal degrees.
// Latitude and north/south designation should be provided separately, with
// north/south presented as one of "NnSs". Decimal degree coordinates are
// returned with precision to 4 decimal places (11.132 m).
// e.g. "2116.6922" -> "21.2782"
func GGALat2DD(lat string, ns string) (string, error) {
if len(lat) > 0 && (string(lat[0]) == "-" || string(lat[0]) == "+") {
return "", fmt.Errorf("+/- should be passed as N/S")
}
if len(lat) < 4 {
return "", fmt.Errorf("bad GGA latitude, len < 4: %v", lat)
}
deg, err := strconv.ParseFloat(lat[:2], 64)
if err != nil {
return "", fmt.Errorf("bad GGA latitude, deg not numeric: %v", lat)
}
min, err := strconv.ParseFloat(lat[2:], 64)
if err != nil {
return "", fmt.Errorf("bad GGA latitude, min not numeric: %v", lat)
}
if deg > 90 {
return "", fmt.Errorf("bad GGA latitude, gga=%v,%v deg=%v", lat, ns, deg)
}
if min > 60 {
return "", fmt.Errorf("bad GGA latitude, gga=%v,%v min=%v", lat, ns, min)
}
switch strings.ToUpper(ns) {
case "N":
return fmt.Sprintf("%.4f", deg+(min/60.0)), nil
case "S":
return fmt.Sprintf("%.4f", -1*(deg+(min/60.0))), nil
default:
return "", fmt.Errorf("bad GGA latitude, bad north/south char: %v", lat)
}
}
// GGALon2DD converts a GGA longitude to decimal degrees
// Longitude and east/west designation should be provided separately, with
// east/west presented as one of "EeWw". Decimal degree coordinates are
// returned with precision to 4 decimal places (11.132 m).
// e.g. "15752.6526" -> "157.8775"
func GGALon2DD(lon string, ew string) (string, error) {
if len(lon) > 0 && (string(lon[0]) == "-" || string(lon[0]) == "+") {
return "", fmt.Errorf("+/- should be passed as E/W")
}
if len(lon) < 5 {
return "", fmt.Errorf("bad GGA longitude, len < 5: %v", lon)
}
deg, err := strconv.ParseFloat(lon[:3], 64)
if err != nil {
return "", fmt.Errorf("bad GGA longitude, deg not numeric: %v", lon)
}
min, err := strconv.ParseFloat(lon[3:], 64)
if err != nil {
return "", fmt.Errorf("bad GGA longitude, min not numeric: %v", lon)
}
if deg > 180 {
return "", fmt.Errorf("bad GGA longitude, deg > 180: gga=%v,%v deg=%v", lon, ew, deg)
}
if min > 60 {
return "", fmt.Errorf("bad GGA longitude, min > 60: gga=%v,%v min=%v", lon, ew, min)
}
switch strings.ToUpper(ew) {
case "E":
return fmt.Sprintf("%.4f", deg+(min/60.0)), nil
case "W":
return fmt.Sprintf("%.4f", -1*(deg+(min/60.0))), nil
default:
return "", fmt.Errorf("bad GGA longitude, bad east/west char: %v", lon)
}
}
// CheckLat checks if the decimal degree longitude string is valid.
func CheckLat(lat string) error {
val, err := strconv.ParseFloat(lat, 64)
if err != nil {
return fmt.Errorf("bad latitude: %v", lat)
}
if val < -90 || val > 90 {
return fmt.Errorf("latitude out of range: %v", lat)
}
return nil
}
// CheckLon checks if the decimal degree latitude string is valid.
func CheckLon(lon string) error {
val, err := strconv.ParseFloat(lon, 64)
if err != nil {
return fmt.Errorf("bad longitude: %v", lon)
}
if val < -180 || val > 180 {
return fmt.Errorf("longitude out of range: %v", lon)
}
return nil
} | geo/geo.go | 0.712832 | 0.483587 | geo.go | starcoder |
package scanfix
import (
"fmt"
"strconv"
f0 "github.com/protofix/protofix/codecfix"
)
// Field is a Protoscan splitter which splits field of the FIX message.
type Field struct {
Format f0.Format
Tag int // Tag is a unique number of the last successfully tokenized FIX field.
Gaps []byte // Last skipped bytes copied by Split. Gaps intend to holds buffered and unprocessed bytes for example if EOF occurs.
splitter int // splitter is an index of a "=" character of a FIX field, splitter is a tag/value separator.
}
// Split is a Protoscan splitter which splits a fields of the FIX message.
// Returns hint is a number of bytes hinted to read
// and returns advance is a needed number of bytes by which the carriage is to shift
// and returns token if available
// and an error if occurs.
// Each token is a value of the FIX message.
func (scan *Field) Split(data []byte, atEOF bool) (int, int, []byte, error) {
scan.Gaps = scan.Gaps[:0]
// Read at least 2 bytes, for example: "8=".
if hint := 2 - len(data); hint > 0 {
return hint, 0, nil, nil
}
if scan.splitter == 0 {
// The splitter character (=) is not at the tail, so reads one more byte.
if data[len(data)-1] != '=' {
return 1, 0, nil, nil
}
scan.splitter = len(data) - 1
s := string(data[:scan.splitter])
i, err := strconv.Atoi(s)
if err != nil {
err = fmt.Errorf("parsing FIX tag: %q, substring: %q, error: %w", data[:len(data)-1], data, err)
return 0, 0, nil, err
}
scan.Tag = i
}
var codec f0.Codec
switch scan.Tag {
// Head 9.
case f0.BodyLength9:
if scan.Format.BodyLength9.Size != nil {
codec = scan.Format.BodyLength9
}
// Head+Body.
default:
codec = scan.Format.Fields[scan.Tag]
// Trail 10.
case f0.CheckSum10:
if scan.Format.CheckSum10.Size != nil {
codec = scan.Format.CheckSum10
}
}
if codec == nil {
codec = scan.Format.Unknown0
}
l := len(data) - scan.splitter - 2
if l > codec.Sizer().Max() {
err := fmt.Errorf(
"unexpected value length %d of the tag %d %q, maximum value length %d, field length %d, field: %q",
l, scan.Tag, f0.TagText[scan.Tag], codec.Sizer().Max(), len(data), data,
)
return 0, 0, nil, err
}
if hint := codec.Sizer().Min() + scan.splitter + 2 - len(data); hint > 0 {
return hint, 0, nil, nil
}
if data[len(data)-1] != 0x01 {
return 1, 0, nil, nil
}
scan.Gaps = append(scan.Gaps, data[:scan.splitter+1]...)
scan.Gaps = append(scan.Gaps, data[len(data)-1:]...)
splitter := scan.splitter
scan.splitter = 0
return 0, len(data), data[splitter+1 : len(data)-1], nil
} | scanfix/scanfix_field.go | 0.565179 | 0.446072 | scanfix_field.go | starcoder |
package hexagolang
// Hexagons implementation interpreted from
// https://www.redblobgames.com/grids/hexagons/implementation.html
// and
// https://www.redblobgames.com/grids/hexagons/
import (
"image"
"math"
)
// H is a single hexagon in the grid.
type H struct {
Q, R int
}
// Delta converts the hex to a delta.
func (h H) Delta() D {
return D{h.Q, h.R, -h.Q - h.R}
}
// Neighbor one step in a specific direction.
func (h H) Neighbor(d DirectionEnum) H {
return Add(h, NeighborDelta(d))
}
// Float returns the cube coordinates as float values.
func (h H) Float() (float64, float64, float64) {
return float64(h.Q), float64(-h.Q - h.R), float64(h.R)
}
// D is the amount of change between two hexagons.
type D struct {
Q, R, S int
}
// Hex converts the delta to a hex.
func (d D) Hex() H {
return H{d.Q, d.R}
}
// Abs returns the delta as absolute values. Cmath.Abs(delta)
func (d D) Abs() D {
return D{
int(math.Abs(float64(d.Q))),
int(math.Abs(float64(d.R))),
int(math.Abs(float64(d.S))),
}
}
// Add is (a + b)
func Add(a H, b D) H {
return H{
Q: a.Q + b.Q,
R: a.R + b.R,
}
}
// Subtract the coordinates of the second hexagon from the first hexagon. (a - b)
func Subtract(a, b H) D {
return D{
Q: a.Q - b.Q,
R: a.R - b.R,
S: -(a.Q - b.Q) - (a.R - b.R),
}
}
// Multiply a delta by a fixed amount (x(a))
func Multiply(d D, k int) D {
return D{d.Q * k, d.R * k, d.S * k}
}
// RotateClockwise rotates one point around another point clockwise
func RotateClockwise(origin, moving H) H {
before := Subtract(moving, origin)
after := D{-before.R, -before.S, -before.Q}
return Add(origin, after)
}
// RotateCounterClockwise rotates one point around another point counter clockwise
func RotateCounterClockwise(origin, moving H) H {
before := Subtract(moving, origin)
after := D{-before.S, -before.Q, -before.R}
return Add(origin, after)
}
// Length returns the manhattan distance for a delta
func Length(d D) int {
abs := d.Abs()
return (abs.Q + abs.R + abs.S) >> 1
}
// Direction returns the Direction one point is in comparison to another point.
func Direction(d D) DirectionEnum {
abs := d.Abs()
if abs.Q >= abs.R && abs.Q >= abs.S {
if d.Q < 0 {
return DirectionNegQ
}
return DirectionPosQ
}
if abs.R >= abs.S {
if d.R < 0 {
return DirectionNegR
}
return DirectionPosR
}
if d.S < 0 {
return DirectionNegS
}
return DirectionPosS
}
// DirectionEnum represents the directions of each of the sides of a hex.
type DirectionEnum int
// String returns the string name of the direction.
func (d DirectionEnum) String() string {
ret := "DirectionUndefined"
switch d {
case DirectionPosQ:
ret = "DirectionPosQ"
case DirectionPosR:
ret = "DirectionPosR"
case DirectionPosS:
ret = "DirectionPosS"
case DirectionNegQ:
ret = "DirectionNegQ"
case DirectionNegR:
ret = "DirectionNegR"
case DirectionNegS:
ret = "DirectionNegS"
}
return ret
}
// Constants for the directions from a Hex.
const (
DirectionPosQ DirectionEnum = iota
DirectionNegR
DirectionPosS
DirectionNegQ
DirectionPosR
DirectionNegS
DirectionUndefined
)
var neighbors = []D{
{1, 0, -1}, {1, -1, 0}, {0, -1, 1}, // positive
{-1, 0, 1}, {-1, 1, 0}, {0, 1, -1}, // negative
{}, // undefined
}
// NeighborDelta returns the delta required to move a single hex in a direction.
func NeighborDelta(d DirectionEnum) D {
return neighbors[d]
}
// Diagonal represents the direction of each point on a hex.
type Diagonal int
// String returns the string name of the direction.
func (d Diagonal) String() string {
ret := "DiagonalUndefined"
switch d {
case DiagonalPosQ:
ret = "DiagonalPosQ"
case DiagonalPosR:
ret = "DiagonalPosR"
case DiagonalPosS:
ret = "DiagonalPosS"
case DiagonalNegQ:
ret = "DiagonalNegQ"
case DiagonalNegR:
ret = "DiagonalNegR"
case DiagonalNegS:
ret = "DiagonalNegS"
}
return ret
}
// Constants for the ddiagonal from a Hex
const (
DiagonalPosQ Diagonal = iota
DiagonalNegR
DiagonalPosS
DiagonalNegQ
DiagonalPosR
DiagonalNegS
DiagonalUndefined
)
var diagonals = []D{
{2, -1, -1}, {1, -2, 1}, {-1, -1, 2}, // positive
{-2, 1, 1}, {-1, 2, -1}, {1, 1, -2}, // negative
{}, // undefined
}
// DiagonalDelta returns the delta required to move a single hex in a direction.
func DiagonalDelta(d DirectionEnum) D {
return diagonals[d]
}
// Line gets the hexagons on a line between two hex.
func Line(a, b H) []H {
delta := Subtract(a, b)
n := Length(delta)
dir := Direction(delta)
results := make([]H, 0, n)
visited := make(map[H]bool, n)
ax, ay, az := a.Float()
bx, by, bz := b.Float()
x, y, z := bx-ax, by-ay, bz-az
step := 1. / float64(n)
for h := 0; h <= n; h++ {
t := step * float64(h)
pnt := unfloat(ax+x*t, ay+y*t, az+z*t)
for visited[pnt] {
pnt = pnt.Neighbor(dir)
}
results = append(results, pnt)
visited[pnt] = true
}
if !visited[b] {
results = append(results, b)
}
return results
}
// Range returns the slice of all points in a distance from a point.
func Range(h H, rad int) map[H]bool {
results := make(map[H]bool, rad*rad)
if rad < 1 {
return results
}
for x := -rad; x <= rad; x++ {
for y := intMax(-rad, -x-rad); y <= intMin(rad, -x+rad); y++ {
z := -x - y
delta := D{
Q: int(x),
R: int(z),
S: int(y),
}
results[Add(h, delta)] = true
}
}
return results
}
// Ring returns the ring of hex points specific manhattan distance from h.
func Ring(h H, rad int) map[H]bool {
results := make(map[H]bool)
if rad < 1 {
return results
}
h = Add(h, Multiply(NeighborDelta(DirectionPosS), rad))
results[h] = true
if rad > 1 {
for i := 0; i < 6; i++ {
for j := 0; j < rad; j++ {
h = Add(h, NeighborDelta(DirectionEnum(i)))
results[h] = true
}
}
}
return results
}
// unfloat returns a tuple as a Point, Rounded.
func unfloat(x, y, z float64) H {
rx, ry, rz := math.Round(x), math.Round(y), math.Round(z)
dx, dy, dz := math.Abs(rx-x), math.Abs(ry-y), math.Abs(rz-z)
if dx > dz && dx > dy {
rx = -rz - ry
} else if dz > dy {
rz = -rx - ry
} else {
ry = -rx - rz
}
return H{
Q: int(math.Round(rx)),
R: int(math.Round(rz)),
}
}
func intMax(a, b int) int {
if a < b {
return b
}
return a
}
func intMin(a, b int) int {
if a < b {
return a
}
return b
}
// Orientation is the orientation of the hexagon map
type Orientation struct {
f, b [4]float64
a float64
c [6]float64
s [6]float64
}
// Define the default set of orientations.
var (
OrientationPointy Orientation = Orientation{
f: [4]float64{math.Sqrt(3.), math.Sqrt(3.) / 2., 0.0, 3. / 2.},
b: [4]float64{math.Sqrt(3.) / 3., -1. / 3., 0.0, 2. / 3.},
a: 0.5,
c: [6]float64{
math.Cos(2. * math.Pi * 0.5 / 6),
math.Cos(2. * math.Pi * 1.5 / 6),
math.Cos(2. * math.Pi * 2.5 / 6),
math.Cos(2. * math.Pi * 3.5 / 6),
math.Cos(2. * math.Pi * 4.5 / 6),
math.Cos(2. * math.Pi * 5.5 / 6),
},
s: [6]float64{
math.Sin(2. * math.Pi * 0.5 / 6),
math.Sin(2. * math.Pi * 1.5 / 6),
math.Sin(2. * math.Pi * 2.5 / 6),
math.Sin(2. * math.Pi * 3.5 / 6),
math.Sin(2. * math.Pi * 4.5 / 6),
math.Sin(2. * math.Pi * 5.5 / 6),
},
}
OrientationFlat Orientation = Orientation{
f: [4]float64{3. / 2., 0.0, math.Sqrt(3.) / 2., math.Sqrt(3.)},
b: [4]float64{2. / 3., 0.0, -1. / 3., math.Sqrt(3.) / 3.},
a: 0.0,
c: [6]float64{
math.Cos(2. * math.Pi * 0. / 6),
math.Cos(2. * math.Pi * 1. / 6),
math.Cos(2. * math.Pi * 2. / 6),
math.Cos(2. * math.Pi * 3. / 6),
math.Cos(2. * math.Pi * 4. / 6),
math.Cos(2. * math.Pi * 5. / 6),
},
s: [6]float64{
math.Sin(2. * math.Pi * 0. / 6),
math.Sin(2. * math.Pi * 1. / 6),
math.Sin(2. * math.Pi * 2. / 6),
math.Sin(2. * math.Pi * 3. / 6),
math.Sin(2. * math.Pi * 4. / 6),
math.Sin(2. * math.Pi * 5. / 6),
},
}
)
// F represents a floating point point, used for polygon drawing functions.
type F struct {
X, Y float64
}
// Add adds b to F.
func (a F) Add(b F) F {
return F{
X: a.X + b.X,
Y: a.Y + b.Y,
}
}
// Subtract subtracts b from F
func (a F) Subtract(b F) F {
return F{
X: a.X - b.X,
Y: a.Y - b.Y,
}
}
// Multiply multiplies F by b
func (a F) Multiply(b F) F {
return F{
X: a.X * b.X,
Y: a.Y * b.Y,
}
}
// Divide divides F by b.
func (a F) Divide(b F) F {
return F{
X: a.X / b.X,
Y: a.Y / b.Y,
}
}
// AsPoint makes a point from an F value
func AsPoint(f F) image.Point {
return image.Point{
X: int(f.X + 0.5),
Y: int(f.Y + 0.5),
}
}
// FromPoint makes an F from a point value
func FromPoint(p image.Point) F {
return F{
X: float64(p.X),
Y: float64(p.Y),
}
}
// Layout is the layout of the hex grid.
type Layout struct {
Radius F // Radius is the radius of the hexagon; supports stretching on X or Y.
Origin F // Origin is the where the center of H{0, 0} will be displayed.
m Orientation
}
// MakeLayout for rendering on the screen.
func MakeLayout(hexSize F, originCenter F, orientation Orientation) Layout {
return Layout{
Radius: hexSize,
Origin: originCenter,
m: orientation,
}
}
// CenterFor returns the point at the center (as a float) of the hex based on the layout.
func (l Layout) CenterFor(h H) F {
q, r :=
float64(h.Q),
float64(h.R)
x := (l.m.f[0]*q + l.m.f[1]*r) * l.Radius.X
y := (l.m.f[2]*q + l.m.f[3]*r) * l.Radius.Y
return F{x + l.Origin.X, y + l.Origin.Y}
}
// HexFor for a hex.F that represents a point where things are laid out.
func (l Layout) HexFor(f F) H {
x, y :=
f.X-l.Origin.X,
f.Y-l.Origin.Y
q := (l.m.b[0]*x + l.m.b[1]*y) / l.Radius.X
r := (l.m.b[2]*x + l.m.b[3]*y) / l.Radius.Y
return unfloat(q, -q-r, r)
}
// RingFor returns a set of hex within rad pixel distance of center.
func (l Layout) RingFor(center H, rad float64) map[H]bool {
result := make(map[H]bool, 1)
if rad < l.Radius.X && rad < l.Radius.Y {
result[center] = true
return result
}
cp := l.CenterFor(center)
P := 1 - rad
pxl := F{rad, 0}
for ; pxl.X > pxl.Y; pxl.Y++ {
if P <= 0 {
P = P + 2*pxl.Y + 1
} else {
pxl.X--
P = P + 2*pxl.Y - 2*pxl.X + 1
}
if pxl.X < pxl.Y {
break
}
points := []F{
{pxl.X + cp.X, pxl.Y + cp.Y},
{-pxl.X + cp.X, pxl.Y + cp.Y},
{pxl.X + cp.X, -pxl.Y + cp.Y},
{-pxl.X + cp.X, -pxl.Y + cp.Y},
{pxl.Y + cp.X, pxl.X + cp.Y},
{-pxl.Y + cp.X, pxl.X + cp.Y},
{pxl.Y + cp.X, -pxl.X + cp.Y},
{-pxl.Y + cp.X, -pxl.X + cp.Y},
}
for _, v := range points {
result[l.HexFor(v)] = true
}
}
return result
}
// AreaFor returns all hex in the area of a screen circle.
func (l Layout) AreaFor(center H, rad float64) map[H]bool {
loop := l.RingFor(center, rad)
result := make(map[H]bool)
for k, v := range loop {
if v == true {
result[k] = true
for _, inside := range Line(k, center) {
result[inside] = true
}
}
}
return result
}
// Vertices returns the location of all verticies for a given hexagon.
func (l Layout) Vertices(h H) []F {
result := make([]F, 6, 7)
center := l.CenterFor(h)
for k := range result {
result[k] = F{
X: center.X + float64(l.Radius.X)*l.m.c[k],
Y: center.Y + float64(l.Radius.Y)*l.m.s[k],
}
}
result = append(result, center)
return result
} | hex.go | 0.935685 | 0.692304 | hex.go | starcoder |
package main
import "fmt"
/*
Here we'll look at different ways of writing a process.
The implementation will depend highly on how components and ports have been defined.
Overall we need to make decisions:
1. who owns the ports: process or component or connection
2. who owns the data: process or component
3. is it concurrent
4. how do we stop/start the process
5. do we combine the process and component (with/without embedding)
*/
/*
Who owns the ports, is mainly a syntactic issue.
*/
/*
Who owns the data is a question also on how to write components.
By having component own the data, it becomes obvious what data
it is working on, however the component definition becomes longer.
Having process own the data makes it slightly easier to inspect
and the components end up being shorter to write (in some cases).
*/
/*
Is it concurrent is a question about the performance of the system.
It might seem counter-intuitive, but a concurrent system is not necessarily
faster, but it can be slower due to the communication overhead.
If the components are large-grained, such that they don't have to
process that many information packets, then it probably doesn't make
a significant difference.
However, let's say you need to process 1e6 information packets per second
then a communication cost (assuming 50ns per packet) would end up
as 0.05second. Which would be a significant portion of the second.
This isn't accounting the thread/goroutine scheduling and cache trashing
that may happen with large graphs.
Similarly, since much of the servers handle requests from many
users. It might make sense to run a single network in a separate
thread/goroutine rather than each component. It would still get the
benefit of parallelism, without the communication overhead.
*/
/*
Stopping and starting the process is a question on whether it should
handle things with context.Context or some other mechanism.
Similarly, how do you introduce exit points for the components.
One approach would be to handle exiting with in and out ports.
This makes the implementation easy, however, it creates a question on
what do you do with inflight messages that's being currently processed.
Alternatively a component could have a "hard-stop" and "graceful-stop"
distinction, where the graceful-stop is specially handled.
Using context.Context would allow nicer integration with Go, for example
the components could use it to make http requests and hence have cancellable
requests as well.
This context.Context could be an explicit parameter or could be
integrated into Process itself -- i.e. Process itself is a context.Context.
However, trying to fit context.Context into the system can introduce additional
complexity. If the system is short-lived then there might not be any significant
benefit to it.
*/
/*
We could flip the dependency and make component embed a process.
This would mean that the network has to deal with the generic structure
and interface. Similarly, it could make handling the process level control
from the network more difficult.
The difficulty and complexity arises, because the network needs to control processes
not components. By pushing the process a level deeper, means there needs to be a way
to access the internal process. Or it would need to expose all the behavior and control
parts.
Although this approach could allow for interesting varations where some processes
are concurrent and some are reactive.
*/
type Process struct {
In map[string]chan string
}
type Printer struct {
Process
}
func (printer *Printer) Execute() {
for value := range printer.In["in"] {
fmt.Println(value)
}
} | 12-process-definition/definitions.go | 0.550366 | 0.700203 | definitions.go | starcoder |
package cryptolib
import (
"crypto/rand"
"crypto/sha256"
"errors"
"math/big"
"strings"
"golang.org/x/crypto/bn256"
)
//GeneratePairingKey generate a private key and two public keys. (Because Pairing goes from G1 x G2 -> G3)
func GeneratePairingKey() (priv []byte, g1Pub []byte, g2Pub []byte, err error) {
privInt, err := rand.Int(rand.Reader, bn256.Order)
if err != nil {
return nil, nil, nil, err
}
g1PubInt := new(bn256.G1).ScalarBaseMult(privInt)
g2PubInt := new(bn256.G2).ScalarBaseMult(privInt)
priv = privInt.Bytes()
g1Pub = g1PubInt.Marshal()
g2Pub = g2PubInt.Marshal()
return
}
//GenerateCertificate generates a certificate for the commitment.
//priv is the private key of the certificate provider and pubG2Byte the public key for the owner of the commitment
func GenerateCertificate(commitment []byte, priv []byte, pubG2Byte []byte) ([]byte, error) {
h := sha256.New()
h.Write(commitment)
hash := h.Sum(nil)
pubG2, err := new(bn256.G2).Unmarshal(pubG2Byte)
if err != true {
return nil, errors.New("Cannot Unmarshale pubG2Byte")
}
//Compute (H(C)+priv)^{-1}
hashInt := new(big.Int).SetBytes(hash)
privInt := new(big.Int).SetBytes(priv)
certInt := new(big.Int).Add(hashInt, privInt)
certInt = certInt.ModInverse(certInt, bn256.Order)
//(H(C)+priv)^{-1}*pubG2
cert := new(bn256.G2).ScalarMult(pubG2, certInt)
certByte := cert.Marshal()
return certByte, nil
}
//VerifyCertificate verifies that the certificate is well formed
//commitment is the commitment used to construct the certificate
//pubG1Byte is the public key of the certificate provider
//pubG2Byte is the public key of the owner of the certificate
func VerifyCertificate(commitment []byte, certificate []byte, pubG1Byte []byte, pubG2Byte []byte) (bool, error) {
h := sha256.New()
h.Write(commitment)
hash := h.Sum(nil)
pubG2, err := new(bn256.G2).Unmarshal(pubG2Byte)
if err != true {
return false, errors.New("Cannot Unmarshal pubG2Byte")
}
pubG1, err := new(bn256.G1).Unmarshal(pubG1Byte)
if err != true {
return false, errors.New("Cannot Unmarshal pubG1Byte")
}
certG2, err := new(bn256.G2).Unmarshal(certificate)
if err != true {
return false, errors.New("Cannot Unmarshal certificate")
}
// We want to verify that e(H(C)*G + pubG1CP, certificate) == e(G, pubG2User)
//We compute the G1 term on the left equality ie H(C)*G + pubG1CP
leftG1 := new(bn256.G1).ScalarBaseMult(new(big.Int).SetBytes(hash))
leftG1 = leftG1.Add(leftG1, pubG1)
//left term ie e(H(C)*G + pubG1CP, certificate)
left := bn256.Pair(leftG1, certG2)
//We compute the right term
//We get back the generator G by doing a scalarBaseMult of 1
rightG1 := new(bn256.G1).ScalarBaseMult(new(big.Int).SetInt64(1))
right := bn256.Pair(rightG1, pubG2)
if strings.Compare(left.String(), right.String()) != 0 {
return false, nil
}
return true, nil
}
//BlindCertificate generates a random and return the input blinded + the generator blinded and the random factor
/* commitment is the commitment used to generate the certificate
* certificate is the certificate
* pubG1Byte is the public key of the certificate provider
* pubG2Byte is the public key of the user
* privUserByte is the private key of the user
*
* The function output (in order of output):
* The blinded commitment
* The blinded certificate
* The blinded public key of the certificate provider
* The blinded public key of the user
* The blinded private key of the user
* The blinded public generator used in the elliptic curve
* The random used to blind
*/
func BlindCertificate(commitment []byte, certificate []byte, pubG1Byte []byte, pubG2Byte []byte, privUserByte []byte) ([]byte, []byte, []byte, []byte, []byte, []byte, []byte) {
random, _ := rand.Int(rand.Reader, bn256.Order)
h := sha256.New()
h.Write(commitment)
hashCommitment := h.Sum(nil)
commitmentInt := new(big.Int).SetBytes(hashCommitment)
privUserInt := new(big.Int).SetBytes(privUserByte)
commitmentInt = commitmentInt.Mul(commitmentInt, random)
certificatePoint, _ := new(bn256.G2).Unmarshal(certificate)
certificatePoint = certificatePoint.ScalarMult(certificatePoint, random)
certificateByte := certificatePoint.Marshal()
pubG1Point, _ := new(bn256.G1).Unmarshal(pubG1Byte)
pubG1Point = pubG1Point.ScalarMult(pubG1Point, random)
g1Byte := pubG1Point.Marshal()
pubG2Point, _ := new(bn256.G2).Unmarshal(pubG2Byte)
pubG2Point = pubG2Point.ScalarMult(pubG2Point, random)
g2Byte := pubG2Point.Marshal()
privUserInt = privUserInt.Mul(privUserInt, random)
generator := new(bn256.G1).ScalarBaseMult(random)
generatorByte := generator.Marshal()
return commitmentInt.Bytes(), certificateByte, g1Byte, g2Byte, privUserInt.Bytes(), generatorByte, random.Bytes()
}
//VerifyBlindCertificate verifies that the blinded certificate is correct
// ie e(b*H(C)*G + b*pubG1CP, b*certificate) == e(b*G, b*pubG2User)
func VerifyBlindCertificate(blindCommitment []byte, blindCertificate []byte, blindPubG1Byte []byte, blindPubG2Byte []byte, blindGenerator []byte) (bool, error) {
/****** Convert []byte to G1 and G2 bn256 point *****/
blindPubG1, b := new(bn256.G1).Unmarshal(blindPubG1Byte)
if b != true {
return false, errors.New("Error during unmarshal pubG1")
}
blindGeneratorPoint, b := new(bn256.G1).Unmarshal(blindGenerator)
if b != true {
return false, errors.New("Error during unmarshal generator")
}
blindPubG2, b := new(bn256.G2).Unmarshal(blindPubG2Byte)
if b != true {
return false, errors.New("Error during unmarshal pubG2")
}
blindCertificatePoint, b := new(bn256.G2).Unmarshal(blindCertificate)
if b != true {
return false, errors.New("Error during unmarshal certificate")
}
//Compute b*H(C)*G1
blindCommitmentInt := new(big.Int).SetBytes(blindCommitment)
leftG1 := new(bn256.G1).ScalarBaseMult(blindCommitmentInt)
//Compute b*H(C) + b*pubG1CP)
leftG1 = leftG1.Add(leftG1, blindPubG1)
left := bn256.Pair(leftG1, blindCertificatePoint)
right := bn256.Pair(blindGeneratorPoint, blindPubG2)
if strings.Compare(left.String(), right.String()) != 0 {
return false, nil
}
return true, nil
} | goService/src/cryptolib/certificate.go | 0.709321 | 0.446133 | certificate.go | starcoder |
package nanocms_compiler
import (
"fmt"
"reflect"
"github.com/go-yaml/yaml"
)
/*
A representation of an object tree, preserving ordering.
*/
type OTree struct {
_data map[interface{}]interface{}
_kidx []interface{}
}
func NewOTree() *OTree {
return new(OTree).Flush()
}
// Flush the content of the tree
func (tree *OTree) Flush() *OTree {
tree._data = make(map[interface{}]interface{})
tree._kidx = make([]interface{}, 0)
return tree
}
// LoadMapSlice loads a yaml.MapSlice object that keeps the ordering
func (tree *OTree) LoadMapSlice(data yaml.MapSlice) *OTree {
for _, item := range data {
kind := reflect.TypeOf(item.Value).Kind()
switch kind {
case reflect.Slice:
tree.Set(item.Key, tree.getMapSlice(item.Value.(yaml.MapSlice), nil))
case reflect.String:
tree.Set(item.Key, item.Value)
default:
panic(fmt.Errorf("Unknown type '%s' while loading state", kind))
}
}
return tree
}
func (tree *OTree) getArray(data interface{}) []interface{} {
cnt := make([]interface{}, 0)
for _, element := range data.([]interface{}) {
switch reflect.TypeOf(element).Kind() {
case reflect.String:
cnt = append(cnt, element.(string))
case reflect.Slice:
cnt = append(cnt, tree.getMapSlice(element.(yaml.MapSlice), nil))
default:
panic(fmt.Sprintf("Value %s has an unsupported type %s", element, reflect.TypeOf(element)))
}
}
return cnt
}
func (tree *OTree) getMapSlice(data yaml.MapSlice, cnt *OTree) *OTree {
if cnt == nil {
cnt = NewOTree()
}
for _, item := range data {
if item.Value != nil {
kind := reflect.TypeOf(item.Value).Kind()
switch kind {
case reflect.Slice:
i_val_t := reflect.TypeOf(item.Value)
if i_val_t.Kind() == reflect.Slice && i_val_t.Elem().Kind() == reflect.Interface {
cnt.Set(item.Key, tree.getArray(item.Value))
} else {
cnt.Set(item.Key, tree.getMapSlice(item.Value.(yaml.MapSlice), nil))
}
case reflect.String:
cnt.Set(item.Key, item.Value)
case reflect.Bool:
cnt.Set(item.Key, item.Value)
default:
panic(fmt.Errorf("Unknown type '%s' while loading state", kind))
}
} else {
cnt.Set(item.Key, nil)
}
}
return cnt
}
// Set the key/value
func (tree *OTree) Set(key interface{}, value interface{}) *OTree {
if tree.Exists(key) {
tree._data[key] = value
} else {
tree._kidx = append(tree._kidx, key)
tree._data[key] = value
}
return tree
}
// Get key with the default
func (tree *OTree) Get(key interface{}, bydefault interface{}) interface{} {
if tree.Exists(key) {
return tree._data[key]
}
return bydefault
}
// GetBranch of the current tree. If branch is not an OTree object or not found, nil is returned.
func (tree *OTree) GetBranch(key interface{}) *OTree {
obj := tree.Get(key, nil)
if obj != nil && reflect.TypeOf(obj).Elem().Kind() == reflect.Struct {
return obj.(*OTree)
}
return nil
}
// GetList returns an object as an array of the interfaces. If an object is not a slice, nil is returned.
func (tree *OTree) GetList(key interface{}) []interface{} {
obj := tree.Get(key, nil)
if reflect.TypeOf(obj).Kind() == reflect.Slice {
return obj.([]interface{})
}
return nil
}
// GetString returns a string, blindly assuming it is one.
// XXX: better implementation needed. :)
func (tree *OTree) GetString(key interface{}) string {
return tree.Get(key, nil).(string)
}
// Check if key is there
func (tree *OTree) Exists(key interface{}) bool {
_, ex := tree._data[key]
return ex
}
// Delete key. Nothing happens if the key wasn't there.
func (tree *OTree) Delete(key interface{}) *OTree {
if tree.Exists(key) {
for i, k := range tree._kidx {
if k == key {
delete(tree._data, key)
tree._kidx = append(tree._kidx[:i], tree._kidx[i+1:]...)
return tree
}
}
}
return tree
}
// Return keys
func (tree *OTree) Keys() []interface{} {
return tree._kidx
}
func (tree *OTree) Items() [][]interface{} {
return nil
}
func (tree *OTree) _to_structure(cnt map[string]interface{}, obj interface{}) interface{} {
if obj == nil {
return nil
}
if cnt == nil {
cnt = make(map[string]interface{})
}
objType := reflect.TypeOf(obj).Kind()
if objType == reflect.Ptr {
for _, obj_k := range obj.(*OTree).Keys() {
cnt[obj_k.(string)] = tree._to_structure(nil, obj.(*OTree).Get(obj_k, nil))
}
} else if objType == reflect.Map {
for obj_k := range obj.(map[interface{}]interface{}) {
cnt[obj_k.(string)] = tree._to_structure(nil, obj.(map[interface{}]interface{})[obj_k])
}
} else if objType == reflect.Slice {
arr := make([]interface{}, 0)
for _, element := range obj.([]interface{}) {
arr = append(arr, tree._to_structure(nil, element))
}
return arr
} else if objType == reflect.String {
return obj.(string)
} else if objType == reflect.Bool {
return obj.(bool)
} else {
fmt.Println("unsupported DSL type:", objType)
}
return cnt
}
// ToYAML exports ordered tree to an unordered YAML (!)
func (tree *OTree) ToYAML() string {
obj := tree._to_structure(nil, tree._data)
data, _ := yaml.Marshal(&obj)
return string(data)
}
func (tree *OTree) Serialise() map[string]interface{} {
obj := tree._to_structure(nil, tree._data)
shallowObj := make(map[string]interface{})
for k, v := range obj.(map[string]interface{}) {
shallowObj[k] = v
}
return shallowObj
} | nanostate/compiler/otree.go | 0.708213 | 0.402216 | otree.go | starcoder |
package data
import (
"bufio"
"bytes"
"regexp"
"strconv"
"strings"
)
type ParseSeq func (seqType SeqType, data []byte) []string
// ParseSequences parses emoji sequences of the specified type from the specified data.
// Note that prior to version 3.0, type information is not included in the sequence data
// files, and ParseSequencesLegacy should be used (with the appropriate file) instead.
func ParseSequences(seqType SeqType, data []byte) []string {
return ParseSequencesMatching(seqTypeRegexp(seqType), data)
}
// ParseSequencesLegacy parses emoji sequences for Emoji versions 1.0 and 2.0. Note
// that in Emoji 1.0, all sequences are in the main data file (filetype Data); for
// Emoji 2.0, all sequences are in the main sequences file (filetype Sequences), with
// no subfiles for variation sequences, ZWJ sequences, etc.
func ParseSequencesLegacy(seqType SeqType, data []byte) []string {
if re, ok := legacySeqTypeRegexp(seqType); ok {
return ParseSequencesMatching(re, data)
}
return nil
}
// ParseSequencesMatching parses emoji sequences from data lines matching
// the specified regexp.
func ParseSequencesMatching(re *regexp.Regexp, data []byte) []string {
var result []string
scanner := bufio.NewScanner(bytes.NewReader(data))
for scanner.Scan() {
line := scanner.Text()
if !re.MatchString(line) {
continue
}
seq, ok := toSeq(line)
if !ok {
continue
}
result = append(result, seq)
}
return result
}
// ------------------------------------------------------------
// Unexported symbols
func toSeq(line string) (string, bool) {
if strings.HasPrefix(line, "#") || strings.TrimSpace(line) == "" {
return "", false
}
seqMatch := seqRegexp.FindStringSubmatch(line)
if len(seqMatch) == 1 {
seq, err := parseSeq(strings.Split(seqMatch[0], " "))
if err != nil {
return "", false
}
return seq, true
}
return "", false
}
func parseSeq(seq []string) (string, error) {
var result []rune
for _, s := range seq {
val, err := strconv.ParseInt(s, 16, 64)
if err != nil {
return "", err
}
result = append(result, rune(val))
}
return string(result), nil
} | data/parse_sequences.go | 0.540439 | 0.437523 | parse_sequences.go | starcoder |
package tmuxfmt
import (
"fmt"
"strconv"
"strings"
)
// Value receives a value from the tmux output as a string and parses it.
type Value interface {
Set(string) error
}
type captureExpr struct {
Expr Expr
Value Value
}
// Capturer captures the output of tmuxfmt expressions into Go values.
type Capturer struct {
exprs []captureExpr
}
// Prepare prepares the specified expressions into a tmuxfmt message. The
// returned capture function will parse the resultant text and fill the
// previously recorded pointers.
func (c *Capturer) Prepare() (msg string, capure func([]byte) error) {
exprs := c.exprs
rendered := make([]string, len(exprs))
for i, e := range c.exprs {
rendered[i] = Render(e.Expr)
}
return strings.Join(rendered, "\t"), func(bs []byte) error {
for i, s := range strings.Split(string(bs), "\t") {
if i >= len(exprs) {
break
}
s = strings.TrimSpace(s)
if err := exprs[i].Value.Set(s); err != nil {
return fmt.Errorf("capture %q: %w", rendered[i], err)
}
}
return nil
}
}
// Var records that the output of the given tmuxfmt expression should be loaded
// into the specified value.
func (c *Capturer) Var(v Value, e Expr) {
c.exprs = append(c.exprs, captureExpr{Expr: e, Value: v})
}
// StringVar specifies that the output of the provided expression should fill
// this string pointer.
func (c *Capturer) StringVar(ptr *string, e Expr) {
c.Var((*stringValue)(ptr), e)
}
type stringValue string
func (v *stringValue) Set(s string) error {
*(*string)(v) = s
return nil
}
// IntVar specifies that the output of the provided expression should be parsed
// as an integer and fill this integer pointer.
func (c *Capturer) IntVar(ptr *int, e Expr) {
c.Var((*intValue)(ptr), e)
}
type intValue int
func (v *intValue) Set(s string) error {
i, err := strconv.Atoi(s)
if err == nil {
*(*int)(v) = i
}
return err
}
// BoolVar specifies that the output of the provided expression should be
// parsed as a boolean and fill this boolean pointer.
func (c *Capturer) BoolVar(ptr *bool, e Expr) {
c.Var((*boolValue)(ptr), e)
}
type boolValue bool
func (v *boolValue) Set(s string) error {
*(*bool)(v) = len(s) > 0 && s != "0"
return nil
} | internal/tmux/tmuxfmt/capture.go | 0.665628 | 0.502991 | capture.go | starcoder |
package router
import (
"math/rand"
"time"
"github.com/streamingfast/dmesh"
"go.uber.org/zap"
"google.golang.org/grpc"
)
// dmeshPlanner is the engine that dispatches queries to the different
// backend nodes, based on the state of the available services
// (through the `dmesh` discovery package), and the range of an
// incoming query.
var getSeed func() int64
func init() {
getSeed = func() int64 {
return time.Now().Unix()
}
}
type Planner interface {
NextPeer(lowBlockNum uint64, highBlockNum uint64, descending bool, withReversible bool) *PeerRange
}
type dmeshPlanner struct {
peers func() []*dmesh.SearchPeer
headDelayTolerance uint64
}
func NewDmeshPlanner(peerFetcher func() []*dmesh.SearchPeer, liveDriftThreshold uint64) *dmeshPlanner {
return &dmeshPlanner{
peers: peerFetcher,
headDelayTolerance: liveDriftThreshold,
}
}
func (s *dmeshPlanner) NextPeer(lowBlockNum uint64, highBlockNum uint64, descending bool, withReversible bool) *PeerRange {
zlog.Debug("finding peers for range",
zap.Uint64("low_block_num", lowBlockNum),
zap.Uint64("high_block_num", highBlockNum),
zap.Bool("descending", descending),
zap.Bool("with_reversible", withReversible))
peers := getReadyPeers(s.peers())
var candidatePeers []*dmesh.SearchPeer
highestSeenTierLevel := uint32(0)
for _, peer := range peers {
if peerCanServeRange(peer, withReversible, descending, highBlockNum, lowBlockNum, s.headDelayTolerance) {
candidatePeers = append(candidatePeers, peer)
if peer.TierLevel > highestSeenTierLevel {
highestSeenTierLevel = peer.TierLevel
}
}
}
if len(candidatePeers) == 0 {
return nil
}
var highestTierPeers []*dmesh.SearchPeer
for _, peer := range candidatePeers {
if peer.TierLevel == highestSeenTierLevel {
highestTierPeers = append(highestTierPeers, peer)
}
}
idx := rand.New(rand.NewSource(getSeed())).Int() % len(highestTierPeers)
selectedPeer := highestTierPeers[idx]
zlog.Debug("dmesh planner peer selected", zap.Reflect("search_peer", selectedPeer), zap.Any("highest_tier_peers", highestTierPeers), zap.Any("all_candidate_peers", candidatePeers))
return getPeerRange(lowBlockNum, highBlockNum, selectedPeer, descending, withReversible)
}
type PeerRange struct {
Conn *grpc.ClientConn
Addr string
LowBlockNum uint64
HighBlockNum uint64
ServesReversible bool
}
func NewPeerRange(peer *dmesh.SearchPeer, lowBlockNum, highBlockNum uint64) *PeerRange {
return &PeerRange{
Conn: peer.Conn(),
Addr: peer.Addr(),
LowBlockNum: lowBlockNum,
HighBlockNum: highBlockNum,
ServesReversible: peer.ServesReversible,
}
}
func getPeerLastBlockNum(peer *dmesh.SearchPeer, withReversible bool) uint64 {
if peer.ServesReversible && withReversible {
return peer.HeadBlock
}
return peer.IrrBlock
}
func getPeerRange(lowBlockNum uint64, highBlockNum uint64, peer *dmesh.SearchPeer, descending bool, withReversible bool) *PeerRange {
var low uint64
var high uint64
if descending {
high = highBlockNum
low = peer.TailBlock
if low < lowBlockNum {
low = lowBlockNum
}
} else {
low = lowBlockNum
if peer.ServesReversible {
// since the peer serves reversible blocks, the ascending query will continue till it reaches it's desired high block num
high = highBlockNum
} else {
// since peers serves irreversible blocks, the ascending query will continue till the smallest high block num
virtualHeadBlock := getPeerLastBlockNum(peer, withReversible)
if virtualHeadBlock < highBlockNum {
high = virtualHeadBlock
} else {
high = highBlockNum
}
}
}
return NewPeerRange(peer, low, high)
}
func peerCanServeRange(peer *dmesh.SearchPeer, withReversible bool, descending bool, highBlockNum uint64, lowBlockNum uint64, headDelayTolerance uint64) bool {
peerLowBlockNum := peer.TailBlock
peerHighBlockNum := getPeerLastBlockNum(peer, withReversible)
// It is possible that when you query a moving head backend (like a live) that has
// drifted, the virtual head of said backend, may be below the
// query's low block num, succesfully serviced by a non-lagging previous peers. We would want to let
// that query go through when it within a certain block threshold, with the assumption that the
// backend willl 'catchup' with the request low block num.
// TODO: when evaluating search archive backend... do we want to do this check? rather then take one backend where you would not wait
if descending {
if peer.ServesReversible {
return (peerHighBlockNum+headDelayTolerance) >= highBlockNum && peerLowBlockNum <= highBlockNum
}
return peerHighBlockNum >= highBlockNum && peerLowBlockNum <= highBlockNum
}
if peer.ServesReversible {
return peerLowBlockNum <= lowBlockNum && (peerHighBlockNum+headDelayTolerance) >= lowBlockNum
}
return peerLowBlockNum <= lowBlockNum && peerHighBlockNum >= lowBlockNum
} | router/planner.go | 0.608361 | 0.462109 | planner.go | starcoder |
package secp256k1
import (
"crypto/ecdsa"
"errors"
"fmt"
"math/big"
)
// These constants define the lengths of serialized public keys.
const (
PubKeyBytesLenCompressed = 33
PubKeyBytesLenUncompressed = 65
PubKeyBytesLenHybrid = 65
)
func isOdd(a *big.Int) bool {
return a.Bit(0) == 1
}
// decompressPoint decompresses a point on the given curve given the X point and
// the solution to use.
func decompressPoint(curve *KoblitzCurve, x *big.Int, ybit bool) (*big.Int, error) {
// TODO: This will probably only work for secp256k1 due to
// optimizations.
// Y = +-sqrt(x^3 + B)
x3 := new(big.Int).Mul(x, x)
x3.Mul(x3, x)
x3.Add(x3, curve.Params().B)
x3.Mod(x3, curve.Params().P)
// Now calculate sqrt mod p of x^3 + B
// This code used to do a full sqrt based on tonelli/shanks,
// but this was replaced by the algorithms referenced in
// https://bitcointalk.org/index.php?topic=162805.msg1712294#msg1712294
y := new(big.Int).Exp(x3, curve.QPlus1Div4(), curve.Params().P)
if ybit != isOdd(y) {
y.Sub(curve.Params().P, y)
}
// Check that y is a square root of x^3 + B.
y2 := new(big.Int).Mul(y, y)
y2.Mod(y2, curve.Params().P)
if y2.Cmp(x3) != 0 {
return nil, fmt.Errorf("invalid square root")
}
// Verify that y-coord has expected parity.
if ybit != isOdd(y) {
return nil, fmt.Errorf("ybit doesn't match oddness")
}
return y, nil
}
const (
pubkeyCompressed byte = 0x2 // y_bit + x coord
pubkeyUncompressed byte = 0x4 // x coord + y coord
pubkeyHybrid byte = 0x6 // y_bit + x coord + y coord
)
// IsCompressedPubKey returns true the the passed serialized public key has
// been encoded in compressed format, and false otherwise.
func IsCompressedPubKey(pubKey []byte) bool {
// The public key is only compressed if it is the correct length and
// the format (first byte) is one of the compressed pubkey values.
return len(pubKey) == PubKeyBytesLenCompressed &&
(pubKey[0]&^byte(0x1) == pubkeyCompressed)
}
// ParsePubKey parses a public key for a koblitz curve from a bytestring into a
// ecdsa.Publickey, verifying that it is valid. It supports compressed,
// uncompressed and hybrid signature formats.
func ParsePubKey(pubKeyStr []byte, curve *KoblitzCurve) (key *PublicKey, err error) {
pubkey := PublicKey{}
pubkey.Curve = curve
if len(pubKeyStr) == 0 {
return nil, errors.New("pubkey string is empty")
}
format := pubKeyStr[0]
ybit := (format & 0x1) == 0x1
format &= ^byte(0x1)
switch len(pubKeyStr) {
case PubKeyBytesLenUncompressed:
if format != pubkeyUncompressed && format != pubkeyHybrid {
return nil, fmt.Errorf("invalid magic in pubkey str: "+
"%d", pubKeyStr[0])
}
pubkey.X = new(big.Int).SetBytes(pubKeyStr[1:33])
pubkey.Y = new(big.Int).SetBytes(pubKeyStr[33:])
// hybrid keys have extra information, make use of it.
if format == pubkeyHybrid && ybit != isOdd(pubkey.Y) {
return nil, fmt.Errorf("ybit doesn't match oddness")
}
case PubKeyBytesLenCompressed:
// format is 0x2 | solution, <X coordinate>
// solution determines which solution of the curve we use.
/// y^2 = x^3 + Curve.B
if format != pubkeyCompressed {
return nil, fmt.Errorf("invalid magic in compressed "+
"pubkey string: %d", pubKeyStr[0])
}
pubkey.X = new(big.Int).SetBytes(pubKeyStr[1:33])
pubkey.Y, err = decompressPoint(curve, pubkey.X, ybit)
if err != nil {
return nil, err
}
default: // wrong!
return nil, fmt.Errorf("invalid pub key length %d",
len(pubKeyStr))
}
if pubkey.X.Cmp(pubkey.Curve.Params().P) >= 0 {
return nil, fmt.Errorf("pubkey X parameter is >= to P")
}
if pubkey.Y.Cmp(pubkey.Curve.Params().P) >= 0 {
return nil, fmt.Errorf("pubkey Y parameter is >= to P")
}
if !pubkey.Curve.IsOnCurve(pubkey.X, pubkey.Y) {
return nil, fmt.Errorf("pubkey isn't on secp256k1 curve")
}
return &pubkey, nil
}
// PublicKey is an ecdsa.PublicKey with additional functions to
// serialize in uncompressed, compressed, and hybrid formats.
type PublicKey ecdsa.PublicKey
// ToECDSA returns the public key as a *ecdsa.PublicKey.
func (p *PublicKey) ToECDSA() *ecdsa.PublicKey {
return (*ecdsa.PublicKey)(p)
}
// SerializeUncompressed serializes a public key in a 65-byte uncompressed
// format.
func (p *PublicKey) SerializeUncompressed() []byte {
b := make([]byte, 0, PubKeyBytesLenUncompressed)
b = append(b, pubkeyUncompressed)
b = paddedAppend(32, b, p.X.Bytes())
return paddedAppend(32, b, p.Y.Bytes())
}
// SerializeCompressed serializes a public key in a 33-byte compressed format.
func (p *PublicKey) SerializeCompressed() []byte {
b := make([]byte, 0, PubKeyBytesLenCompressed)
format := pubkeyCompressed
if isOdd(p.Y) {
format |= 0x1
}
b = append(b, format)
return paddedAppend(32, b, p.X.Bytes())
}
// SerializeHybrid serializes a public key in a 65-byte hybrid format.
func (p *PublicKey) SerializeHybrid() []byte {
b := make([]byte, 0, PubKeyBytesLenHybrid)
format := pubkeyHybrid
if isOdd(p.Y) {
format |= 0x1
}
b = append(b, format)
b = paddedAppend(32, b, p.X.Bytes())
return paddedAppend(32, b, p.Y.Bytes())
}
// IsEqual compares this PublicKey instance to the one passed, returning true if
// both PublicKeys are equivalent. A PublicKey is equivalent to another, if they
// both have the same X and Y coordinate.
func (p *PublicKey) IsEqual(otherPubKey *PublicKey) bool {
return p.X.Cmp(otherPubKey.X) == 0 &&
p.Y.Cmp(otherPubKey.Y) == 0
}
// paddedAppend appends the src byte slice to dst, returning the new slice.
// If the length of the source is smaller than the passed size, leading zero
// bytes are appended to the dst slice before appending src.
func paddedAppend(size uint, dst, src []byte) []byte {
for i := 0; i < int(size)-len(src); i++ {
dst = append(dst, 0)
}
return append(dst, src...)
} | pubkey.go | 0.658527 | 0.531635 | pubkey.go | starcoder |
package rds
import (
"database/sql/driver"
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/rdsdataservice"
"reflect"
"time"
)
// FieldConverter is a function that converts the passed result row field into the expected type.
type FieldConverter func(field *rdsdataservice.Field) (interface{}, error)
// Dialect is an interface that encapsulates a particular languages' eccentricities
type Dialect interface {
// MigrateQuery from the dialect to RDS
MigrateQuery(string, []driver.NamedValue) (*rdsdataservice.ExecuteStatementInput, error)
// GetFieldConverter for a given ColumnMetadata.TypeName field.
GetFieldConverter(columnType string) FieldConverter
// IsIsolationLevelSupported for this dialect?
IsIsolationLevelSupported(level driver.IsolationLevel) bool
}
// ConvertNamedValues converts passed driver.NamedValue instances into RDS SQLParameters
func ConvertNamedValues(args []driver.NamedValue) ([]*rdsdataservice.SqlParameter, error) {
var params = make([]*rdsdataservice.SqlParameter, len(args))
for i, arg := range args {
sqlParam, err := ConvertNamedValue(arg)
if err != nil {
return nil, err
}
params[i] = sqlParam
}
return params, nil
}
// ConvertNamedValue from a NamedValue to an SqlParameter
func ConvertNamedValue(arg driver.NamedValue) (value *rdsdataservice.SqlParameter, err error) {
name := arg.Name
if isNil(arg.Value) {
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{IsNull: aws.Bool(true)},
}
return
}
switch t := arg.Value.(type) {
case string:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{StringValue: aws.String(t)},
}
case []byte:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{BlobValue: t},
}
case bool:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{BooleanValue: &t},
}
case float32:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{DoubleValue: aws.Float64(float64(t))},
}
case float64:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{DoubleValue: &t},
}
case int:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case int8:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case int16:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case int32:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case int64:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(t)},
}
case uint:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case uint8:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{BlobValue: []byte{t}},
}
case uint16:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case uint32:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case uint64:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{LongValue: aws.Int64(int64(t))},
}
case time.Time:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{
StringValue: aws.String(t.Format("2006-01-02 15:04:05.999")),
},
}
case nil:
value = &rdsdataservice.SqlParameter{
Name: &name,
Value: &rdsdataservice.Field{IsNull: aws.Bool(true)},
}
default:
err = fmt.Errorf("%s is unsupported type: %#v", name, arg.Value)
return
}
return
}
func isNil(i interface{}) bool {
if i == nil {
return true
}
switch reflect.TypeOf(i).Kind() {
case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice:
return reflect.ValueOf(i).IsNil()
}
return false
} | dialect.go | 0.554953 | 0.400105 | dialect.go | starcoder |
package sortsearch
import (
"sort"
)
/*
# Top K Frequent Elements
# https://leetcode.com/explore/interview/card/top-interview-questions-medium/110/sorting-and-searching/799/
Given a non-empty array of integers, return the k most frequent elements.
Example 1:
Input: nums = [1,1,1,2,2,3], k = 2
Output: [1,2]
Example 2:
Input: nums = [1], k = 1
Output: [1]
Note:
You may assume k is always valid, 1 ≤ k ≤ number of unique elements.
Your algorithm's time complexity must be better than O(n log n), where n is the array's size.
It's guaranteed that the answer is unique, in other words the set of the top k frequent elements is unique.
You can return the answer in any order.
*/
func TopKFrequent(nums []int, k int) []int {
return topKFrequent(nums, k)
}
func topKFrequent(nums []int, k int) []int {
// 统计元素的频次
freqMap := make(map[int]int, 0)
for i := range nums {
if _, ok := freqMap[nums[i]]; ok {
freqMap[nums[i]]++
} else {
freqMap[nums[i]] = 1
}
}
// 桶排序
bucket := make(kfreqs, 0, len(nums))
for k, v := range freqMap {
bucket = append(bucket, kfreq{
num: v,
val: k,
})
}
sort.Sort(bucket)
return bucket.GetKFreq(k)
}
type kfreq struct {
num int // 数量
val int
}
type kfreqs []kfreq
func (k kfreqs) Less(i, j int) bool { return k[i].num > k[j].num }
func (k kfreqs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
func (k kfreqs) Len() int { return len(k) }
func (k kfreqs) GetKFreq(i int) []int {
if !(len(k) >= i) {
return nil
}
var result []int
for iter := 0; iter < i; iter++ {
result = append(result, k[iter].val)
}
return result
}
/*
# Kth Largest Element in an Array
# https://leetcode.com/explore/interview/card/top-interview-questions-medium/110/sorting-and-searching/800/
Find the kth largest element in an unsorted array. Note that it is the kth largest element in the sorted order, not the kth distinct element.
Example 1:
Input: [3,2,1,5,6,4] and k = 2
Output: 5
Example 2:
Input: [3,2,3,1,2,4,5,5,6] and k = 4
Output: 4
Note:
You may assume k is always valid, 1 ≤ k ≤ array's length.
*/
func FindKthLargest(nums []int, k int) int {
return findKthLargest(nums, k)
}
func findKthLargest(nums []int, k int) int {
if len(nums) < k {
return -1
}
sort.Ints(nums)
return nums[len(nums)-k]
}
/*
# Find Peak Element
# https://leetcode.com/explore/interview/card/top-interview-questions-medium/110/sorting-and-searching/801/
A peak element is an element that is greater than its neighbors.
Given an input array nums, where nums[i] ≠ nums[i+1], find a peak element and return its index.
The array may contain multiple peaks, in that case return the index to any one of the peaks is fine.
You may imagine that nums[-1] = nums[n] = -∞.
Example 1:
Input: nums = [1,2,3,1]
Output: 2
Explanation: 3 is a peak element and your function should return the index number 2.
Example 2:
Input: nums = [1,2,1,3,5,6,4]
Output: 1 or 5
Explanation: Your function can return either index number 1 where the peak element is 2,
or index number 5 where the peak element is 6.
Note:
Your solution should be in logarithmic complexity.
*/
func FindPeakElement(nums []int) int {
return findPeakElement(nums)
}
func findPeakElement(nums []int) int {
if nums == nil {
return -1
}
if len(nums) <= 1 {
return 0
}
for i := 0; i < len(nums); i++ {
// 对0的处理
if i == 0 && nums[i] > nums[i+1] {
return i
} else if i == 0 {
continue
}
// 对n-1的处理
if i == len(nums)-1 && nums[i] > nums[i-1] {
return i
} else if i == len(nums)-1 {
continue
}
if nums[i] > nums[i-1] && nums[i] > nums[i+1] {
return i
}
}
return -1
}
/*
# Search for a Range
# https://leetcode.com/explore/interview/card/top-interview-questions-medium/110/sorting-and-searching/802/
Given an array of integers nums sorted in ascending order, find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
*/
func SearchRange(nums []int, target int) []int {
return searchRange(nums, target)
}
func searchRange(nums []int, target int) []int {
start, end := 0, len(nums)-1
var povit = -1
for start <= end {
mid := (start + end) / 2
if nums[mid] > target {
end = mid - 1
} else if nums[mid] < target {
start = mid + 1
} else {
povit = mid
break
}
}
return searchForRange(nums, povit)
}
func searchForRange(nums []int, povit int) []int {
if povit == -1 {
return []int{-1, -1}
}
origin := povit
left, right := povit, povit
for left >= 0 && nums[left] == nums[origin] {
left--
}
for right <= len(nums)-1 && nums[right] == nums[origin] {
right++
}
return []int{left + 1, right - 1}
}
/*
# Merge Intervals
# https://leetcode.com/explore/interview/card/top-interview-questions-medium/110/sorting-and-searching/803/
Given a collection of intervals, merge all overlapping intervals.
Example 1:
Input: [[1,3],[2,6],[8,10],[15,18]]
Output: [[1,6],[8,10],[15,18]]
Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6].
Example 2:
Input: [[1,4],[4,5]]
Output: [[1,5]]
Explanation: Intervals [1,4] and [4,5] are considered overlapping.
NOTE: input types have been changed on April 15, 2019. Please reset to default code definition to get new method signature.
*/
func Merge(intervals [][]int) [][]int {
return merge(intervals)
}
func merge(intervals [][]int) [][]int {
var result = intervals
var hasMerged bool = true
for hasMerged {
result, hasMerged = mergeIntervals(result)
}
return result
}
func mergeIntervals(intervals [][]int) ([][]int, bool) {
if len(intervals) <= 1 {
return intervals, false
}
var result [][]int
var hasMerged bool
for i, val := range intervals {
if i == 0 {
result = append(result, val)
continue
}
// 循环遍历result,判断其中值是否有修改过
var isChanged bool
for j := range result {
// {}
resLeft, resRight := result[j][0], result[j][1]
// ||
nowLeft, nowRight := val[0], val[1]
if nowLeft <= resRight && nowLeft >= resLeft && nowRight >= resRight {
// {|}|
result[j][1] = nowRight
hasMerged = true
isChanged = true
break
} else if nowLeft <= resLeft && nowRight >= resRight {
// |{}|
result[j][1] = nowRight
result[j][0] = nowLeft
hasMerged = true
isChanged = true
break
} else if nowLeft >= resLeft && nowRight <= resRight {
// {||}
isChanged = true
break
} else if nowRight >= resLeft && nowRight <= resRight && nowLeft <= resLeft {
// |{|}
result[j][0] = nowLeft
hasMerged = true
isChanged = true
break
}
}
// 未修改过,添加至result中
if !isChanged {
result = append(result, val)
}
}
return result, hasMerged
} | interview/medium/sortsearch/array.go | 0.815269 | 0.530662 | array.go | starcoder |
package utils
import (
"errors"
"fmt"
"reflect"
"time"
)
// Comparator imposes a total ordering on some collection of objects, and it allows precise control over the sort order.
type Comparator interface {
// Compare compares its two arguments for order.
// It returns a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than the second.
Compare(v1 interface{}, v2 interface{}) (int, error)
}
// Compare compares two arguments using the given Comparator. If the Comparator isn't provided, then the two values are compared according to their natural ordering.
// They must be the same type, otherwise returns an error in the second return value.
// It returns a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than the second.
func Compare(v1 interface{}, v2 interface{}, cmp Comparator) (int, error) {
if nil == v1 && nil == v2 {
return 0, nil
}
if nil == v1 || nil == v2 {
return 0, errors.New("a nil value can't be compared to a non-nil value")
}
k1, k2 := reflect.TypeOf(v1).Kind(), reflect.TypeOf(v2).Kind()
if k1 != k2 {
return 0, fmt.Errorf("two values of different type can't be compared, %s: %s", k1, k2)
}
// Compare the two values using the given customized comparator
if cmp != nil {
return cmp.Compare(v1, v2)
}
cmpRet := 0
switch k1 {
case reflect.Int:
cv1, cv2 := v1.(int), v2.(int)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Uint:
cv1, cv2 := v1.(uint), v2.(uint)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.String:
cv1, cv2 := v1.(string), v2.(string)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Float32:
cv1, cv2 := v1.(float32), v2.(float32)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Float64:
cv1, cv2 := v1.(float64), v2.(float64)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Bool:
// false < true
b1, b2 := v1.(bool), v2.(bool)
if !b1 && b2 { // b1 == false && b2 == true
cmpRet = -1
} else if b1 && !b2 { // b1 == true && b2 == false
cmpRet = 1
}
case reflect.Int8:
cv1, cv2 := v1.(int8), v2.(int8)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Int16:
cv1, cv2 := v1.(int16), v2.(int16)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Int32: // valid for both int32 and rune
cv1, cv2 := v1.(int32), v2.(int32)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Int64:
cv1, cv2 := v1.(int64), v2.(int64)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Uint8: // valid for both uint8 and byte
cv1, cv2 := v1.(uint8), v2.(uint8)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Uint16:
cv1, cv2 := v1.(uint16), v2.(uint16)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Uint32:
cv1, cv2 := v1.(uint32), v2.(uint32)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Uint64:
cv1, cv2 := v1.(uint64), v2.(uint64)
if cv1 < cv2 {
cmpRet = -1
} else if cv1 > cv2 {
cmpRet = 1
}
case reflect.Struct:
// compare time
isBothTime, timeCmpRet := CompareTime(v1, v2)
if isBothTime {
return timeCmpRet, nil
}
return 0, errors.New("please define a customized sort.Comparator for your struct")
default:
return 0, fmt.Errorf("type '%s' can't be compared", k1)
}
return cmpRet, nil
}
// CompareTime compares its two arguments if both of them are time.Time, and returns true
// and the comparison result; otherwise return false in the first return argument.
func CompareTime(v1 interface{}, v2 interface{}) (bool, int) {
time1, ok1 := v1.(time.Time)
time2, ok2 := v2.(time.Time)
if ok1 && ok2 {
if time1.Before(time2) {
return true, -1
}
if time1.After(time2) {
return true, 1
}
return true, 0
}
return false, 0
} | utils/comparator.go | 0.660829 | 0.50293 | comparator.go | starcoder |
package villa
import (
"fmt"
"github.com/golangplus/bytes"
)
/*
IntMatrix is 2D array of integers. Elements are store in a single int slice and slices of each row are created.
NOTE the matrix can be sized of 0x0, but never 0x10 or 10x0.
*/
type IntMatrix [][]int
// NewIntMatrix creates a new IntMatrix instance with specified number of rows and columns
func NewIntMatrix(nRow, nCol int) IntMatrix {
s := make([]int, nCol*nRow)
mat := make(IntMatrix, nRow)
for i, p := 0, 0; i < nRow; i++ {
mat[i] = s[p : p+nCol]
p += nCol
}
return mat
}
// Clone clones an IntMatrix
func (m IntMatrix) Clone() IntMatrix {
mat := NewIntMatrix(m.Rows(), m.Cols())
n := m.Rows() * m.Cols()
if n > 0 {
copy(mat[0][:n], m[0][:n])
}
return mat
}
// Cols returns the number of columns
func (m IntMatrix) Cols() int {
if len(m) == 0 {
return 0
}
return len(m[0])
}
// Rows returns the number of rows
func (m IntMatrix) Rows() int {
return len(m)
}
// PrettyString returns a pretty text form of the matrix.
// This function is mainly for debugging.
func (m IntMatrix) PrettyString() string {
sa := make([][]string, 0, m.Rows())
for _, row := range m {
sr := make([]string, 0, len(row))
for _, cell := range row {
sr = append(sr, fmt.Sprint(cell))
}
sa = append(sa, sr)
}
wds := make([]int, m.Cols())
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
if len(sa[i][j]) > wds[j] {
wds[j] = len(sa[i][j])
}
}
}
var res bytesp.Slice
for i, row := range sa {
if i == 0 {
res.WriteString("[")
} else {
res.WriteString(" ")
}
res.WriteString("[")
for j, cell := range row {
if j > 0 {
res.WriteString(" ")
}
fmt.Fprintf(&res, "%*s", wds[j], cell)
}
res.WriteString("]")
if i == len(sa)-1 {
fmt.Fprintf(&res, "](%dx%d)", m.Rows(), m.Cols())
}
res.WriteString("\n")
}
return string(res)
}
// Fill sets all elements of the matrix to a specified value
func (m IntMatrix) Fill(vl int) {
if len(m) == 0 {
return
}
n := m.Rows() * m.Cols()
IntSlice(m[0][:n]).Fill(0, n, vl)
} | intmat.go | 0.777596 | 0.512876 | intmat.go | starcoder |
package radolan
type spec struct {
px int // plain data dimensions
py int
dx int // data (layer) dimensions
dy int
rx float64 // resolution
ry float64
}
// local picture products do not provide dimensions in header
var dimensionCatalog = map[string]spec{
"OL": {200, 224, 200, 200, 2, 2}, // reflectivity (no clutter detection)
"OX": {200, 224, 200, 200, 1, 1}, // reflectivity (no clutter detection)
"PD": {200, 224, 200, 200, 1, 1}, // radial velocity
"PE": {200, 224, 200, 200, 2, 2}, // echotop
"PF": {200, 224, 200, 200, 1, 1}, // reflectivity (15 classes)
"PH": {200, 224, 200, 200, 1, 1}, // accumulated rainfall
"PL": {200, 224, 200, 200, 2, 2}, // reflectivity
"PM": {200, 224, 200, 200, 2, 2}, // max. reflectivity
"PR": {200, 224, 200, 200, 1, 1}, // radial velocity
"PU": {200, 2400, 200, 200, 1, 1}, // 3D radial velocity
"PV": {200, 224, 200, 200, 1, 1}, // radial velocity
"PX": {200, 224, 200, 200, 1, 1}, // reflectivity (6 classes)
"PY": {200, 224, 200, 200, 1, 1}, // accumulated rainfall
"PZ": {200, 2400, 200, 200, 2, 2}, // 3D reflectivity CAPPI
}
type Unit int
const (
Unit_unknown = iota
Unit_mm // mm/interval
Unit_dBZ // dBZ
Unit_km // km
Unit_mps // m/s
)
func (u Unit) String() string {
return []string{"unknown unit", "mm", "dBZ", "km", "m/s"}[u]
}
var unitCatalog = map[string]Unit{
"CH": Unit_mm,
"CX": Unit_dBZ,
"D2": Unit_mm,
"D3": Unit_mm,
"EA": Unit_dBZ,
"EB": Unit_mm,
"EC": Unit_mm,
"EH": Unit_mm,
"EM": Unit_mm,
"EW": Unit_mm,
"EX": Unit_dBZ,
"EY": Unit_mm,
"EZ": Unit_mm,
"FX": Unit_dBZ,
"FZ": Unit_dBZ,
"HX": Unit_dBZ,
"OL": Unit_dBZ,
"OX": Unit_dBZ,
"PA": Unit_dBZ,
"PC": Unit_dBZ,
"PD": Unit_mps,
"PE": Unit_km,
"PF": Unit_dBZ,
"PG": Unit_dBZ,
"PH": Unit_mm,
"PI": Unit_dBZ,
"PK": Unit_dBZ,
"PL": Unit_dBZ,
"PM": Unit_dBZ,
"PN": Unit_dBZ,
"PR": Unit_mps,
"PU": Unit_mps,
"PV": Unit_mps,
"PX": Unit_dBZ,
"PY": Unit_mm,
"PZ": Unit_dBZ,
"RA": Unit_mm,
"RB": Unit_mm,
"RE": Unit_mm,
"RH": Unit_mm,
"RK": Unit_mm,
"RL": Unit_mm,
"RM": Unit_mm,
"RN": Unit_mm,
"RQ": Unit_mm,
"RR": Unit_mm,
"RU": Unit_mm,
"RW": Unit_mm,
"RX": Unit_dBZ,
"RY": Unit_mm,
"RZ": Unit_mm,
"S2": Unit_mm,
"S3": Unit_mm,
"SF": Unit_mm,
"SH": Unit_mm,
"SQ": Unit_mm,
"TB": Unit_mm,
"TH": Unit_mm,
"TW": Unit_mm,
"TX": Unit_dBZ,
"TZ": Unit_mm,
"W1": Unit_mm,
"W2": Unit_mm,
"W3": Unit_mm,
"W4": Unit_mm,
"WX": Unit_dBZ,
} | catalog.go | 0.557966 | 0.586168 | catalog.go | starcoder |
package primitive
// edgeMap is a map of edges. edgeMap are not concurrency safe.
type edgeMap map[string]map[string]*Edge
func (e edgeMap) Types() []string {
var typs []string
for t, _ := range e {
typs = append(typs, t)
}
return typs
}
// RangeType executes the function over a list of edges with the given type. If the function returns false, the iteration stops.
func (e edgeMap) RangeType(typ Type, fn func(e *Edge) bool) {
if typ.Type() == AnyType {
for _, edges := range e {
for _, edge := range edges {
if !fn(edge) {
break
}
}
}
} else {
if e[typ.Type()] == nil {
return
}
for _, e := range e[typ.Type()] {
if !fn(e) {
break
}
}
}
}
// Range executes the function over every edge. If the function returns false, the iteration stops.
func (e edgeMap) Range(fn func(e *Edge) bool) {
for _, m := range e {
for _, e := range m {
if !fn(e) {
break
}
}
}
}
// Filter executes the function over every edge. If the function returns true, the edges will be added to the returned array of edges.
func (e edgeMap) Filter(fn func(e *Edge) bool) []*Edge {
var edges []*Edge
for _, m := range e {
for _, e := range m {
if fn(e) {
edges = append(edges, e)
}
}
}
return edges
}
// FilterType executes the function over every edge of the given type. If the function returns true, the edges will be added to the returned array of edges.
func (e edgeMap) FilterType(typ Type, fn func(e *Edge) bool) []*Edge {
var edges []*Edge
e.RangeType(typ, func(e *Edge) bool {
if fn(e) {
edges = append(edges, e)
}
return true
})
return edges
}
// DelEdge deletes the edge
func (e edgeMap) DelEdge(id TypedID) {
if _, ok := e[id.Type()]; !ok {
return
}
delete(e[id.Type()], id.ID())
}
// AddEdge adds the edge to the map
func (e edgeMap) AddEdge(edge *Edge) {
if _, ok := e[edge.Type()]; !ok {
e[edge.Type()] = map[string]*Edge{
edge.ID(): edge,
}
} else {
e[edge.Type()][edge.ID()] = edge
}
}
// HasEdge returns true if the edge exists
func (e edgeMap) HasEdge(id TypedID) bool {
_, ok := e.GetEdge(id)
return ok
}
// GetEdge gets an edge by id
func (e edgeMap) GetEdge(id TypedID) (*Edge, bool) {
if _, ok := e[id.Type()]; !ok {
return nil, false
}
if e, ok := e[id.Type()][id.ID()]; ok {
return e, true
}
return nil, false
}
// Len returns the number of edges of the given type
func (e edgeMap) Len(typ Type) int {
if rels, ok := e[typ.Type()]; ok {
return len(rels)
}
return 0
} | primitive/edges.go | 0.776369 | 0.638074 | edges.go | starcoder |
package tensor
import (
"reflect"
"unsafe"
"github.com/pkg/errors"
)
// Set sets the value of the underlying array at the index i.
func (a *array) Set(i int, x interface{}) {
switch a.t.Kind() {
case reflect.Bool:
xv := x.(bool)
a.SetB(i, xv)
case reflect.Int:
xv := x.(int)
a.SetI(i, xv)
case reflect.Int8:
xv := x.(int8)
a.SetI8(i, xv)
case reflect.Int16:
xv := x.(int16)
a.SetI16(i, xv)
case reflect.Int32:
xv := x.(int32)
a.SetI32(i, xv)
case reflect.Int64:
xv := x.(int64)
a.SetI64(i, xv)
case reflect.Uint:
xv := x.(uint)
a.SetU(i, xv)
case reflect.Uint8:
xv := x.(uint8)
a.SetU8(i, xv)
case reflect.Uint16:
xv := x.(uint16)
a.SetU16(i, xv)
case reflect.Uint32:
xv := x.(uint32)
a.SetU32(i, xv)
case reflect.Uint64:
xv := x.(uint64)
a.SetU64(i, xv)
case reflect.Uintptr:
xv := x.(uintptr)
a.SetUintptr(i, xv)
case reflect.Float32:
xv := x.(float32)
a.SetF32(i, xv)
case reflect.Float64:
xv := x.(float64)
a.SetF64(i, xv)
case reflect.Complex64:
xv := x.(complex64)
a.SetC64(i, xv)
case reflect.Complex128:
xv := x.(complex128)
a.SetC128(i, xv)
case reflect.String:
xv := x.(string)
a.SetStr(i, xv)
case reflect.UnsafePointer:
xv := x.(unsafe.Pointer)
a.SetUnsafePointer(i, xv)
default:
xv := reflect.ValueOf(x)
ptr := uintptr(a.Ptr)
want := ptr + uintptr(i)*a.t.Size()
val := reflect.NewAt(a.t, unsafe.Pointer(want))
val = reflect.Indirect(val)
val.Set(xv)
}
}
// Get returns the ith element of the underlying array of the *Dense tensor.
func (a *array) Get(i int) interface{} {
switch a.t.Kind() {
case reflect.Bool:
return a.GetB(i)
case reflect.Int:
return a.GetI(i)
case reflect.Int8:
return a.GetI8(i)
case reflect.Int16:
return a.GetI16(i)
case reflect.Int32:
return a.GetI32(i)
case reflect.Int64:
return a.GetI64(i)
case reflect.Uint:
return a.GetU(i)
case reflect.Uint8:
return a.GetU8(i)
case reflect.Uint16:
return a.GetU16(i)
case reflect.Uint32:
return a.GetU32(i)
case reflect.Uint64:
return a.GetU64(i)
case reflect.Uintptr:
return a.GetUintptr(i)
case reflect.Float32:
return a.GetF32(i)
case reflect.Float64:
return a.GetF64(i)
case reflect.Complex64:
return a.GetC64(i)
case reflect.Complex128:
return a.GetC128(i)
case reflect.String:
return a.GetStr(i)
case reflect.UnsafePointer:
return a.GetUnsafePointer(i)
default:
at := uintptr(a.Ptr) + uintptr(i)*a.t.Size()
val := reflect.NewAt(a.t, unsafe.Pointer(at))
val = reflect.Indirect(val)
return val.Interface()
}
}
// Memset sets all values in the array.
func (a *array) Memset(x interface{}) error {
switch a.t {
case Bool:
if xv, ok := x.(bool); ok {
data := a.Bools()
for i := range data {
data[i] = xv
}
return nil
}
case Int:
if xv, ok := x.(int); ok {
data := a.Ints()
for i := range data {
data[i] = xv
}
return nil
}
case Int8:
if xv, ok := x.(int8); ok {
data := a.Int8s()
for i := range data {
data[i] = xv
}
return nil
}
case Int16:
if xv, ok := x.(int16); ok {
data := a.Int16s()
for i := range data {
data[i] = xv
}
return nil
}
case Int32:
if xv, ok := x.(int32); ok {
data := a.Int32s()
for i := range data {
data[i] = xv
}
return nil
}
case Int64:
if xv, ok := x.(int64); ok {
data := a.Int64s()
for i := range data {
data[i] = xv
}
return nil
}
case Uint:
if xv, ok := x.(uint); ok {
data := a.Uints()
for i := range data {
data[i] = xv
}
return nil
}
case Uint8:
if xv, ok := x.(uint8); ok {
data := a.Uint8s()
for i := range data {
data[i] = xv
}
return nil
}
case Uint16:
if xv, ok := x.(uint16); ok {
data := a.Uint16s()
for i := range data {
data[i] = xv
}
return nil
}
case Uint32:
if xv, ok := x.(uint32); ok {
data := a.Uint32s()
for i := range data {
data[i] = xv
}
return nil
}
case Uint64:
if xv, ok := x.(uint64); ok {
data := a.Uint64s()
for i := range data {
data[i] = xv
}
return nil
}
case Uintptr:
if xv, ok := x.(uintptr); ok {
data := a.Uintptrs()
for i := range data {
data[i] = xv
}
return nil
}
case Float32:
if xv, ok := x.(float32); ok {
data := a.Float32s()
for i := range data {
data[i] = xv
}
return nil
}
case Float64:
if xv, ok := x.(float64); ok {
data := a.Float64s()
for i := range data {
data[i] = xv
}
return nil
}
case Complex64:
if xv, ok := x.(complex64); ok {
data := a.Complex64s()
for i := range data {
data[i] = xv
}
return nil
}
case Complex128:
if xv, ok := x.(complex128); ok {
data := a.Complex128s()
for i := range data {
data[i] = xv
}
return nil
}
case String:
if xv, ok := x.(string); ok {
data := a.Strings()
for i := range data {
data[i] = xv
}
return nil
}
case UnsafePointer:
if xv, ok := x.(unsafe.Pointer); ok {
data := a.UnsafePointers()
for i := range data {
data[i] = xv
}
return nil
}
}
xv := reflect.ValueOf(x)
ptr := uintptr(a.Ptr)
for i := 0; i < a.L; i++ {
want := ptr + uintptr(i)*a.t.Size()
val := reflect.NewAt(a.t, unsafe.Pointer(want))
val = reflect.Indirect(val)
val.Set(xv)
}
return nil
}
func (t *array) memsetIter(x interface{}, it Iterator) (err error) {
var i int
switch t.t {
case Bool:
xv, ok := x.(bool)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Bools()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Int:
xv, ok := x.(int)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Ints()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Int8:
xv, ok := x.(int8)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Int8s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Int16:
xv, ok := x.(int16)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Int16s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Int32:
xv, ok := x.(int32)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Int32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Int64:
xv, ok := x.(int64)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Int64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uint:
xv, ok := x.(uint)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uints()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uint8:
xv, ok := x.(uint8)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uint8s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uint16:
xv, ok := x.(uint16)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uint16s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uint32:
xv, ok := x.(uint32)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uint32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uint64:
xv, ok := x.(uint64)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uint64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Uintptr:
xv, ok := x.(uintptr)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Uintptrs()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Float32:
xv, ok := x.(float32)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Float32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Float64:
xv, ok := x.(float64)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Float64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Complex64:
xv, ok := x.(complex64)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Complex64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case Complex128:
xv, ok := x.(complex128)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Complex128s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case String:
xv, ok := x.(string)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.Strings()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
case UnsafePointer:
xv, ok := x.(unsafe.Pointer)
if !ok {
return errors.Errorf(dtypeMismatch, t.t, x)
}
data := t.UnsafePointers()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = xv
}
err = handleNoOp(err)
default:
xv := reflect.ValueOf(x)
ptr := uintptr(t.Ptr)
for i, err = it.Next(); err == nil; i, err = it.Next() {
want := ptr + uintptr(i)*t.t.Size()
val := reflect.NewAt(t.t, unsafe.Pointer(want))
val = reflect.Indirect(val)
val.Set(xv)
}
err = handleNoOp(err)
}
return
}
// Eq checks that any two arrays are equal
func (a array) Eq(other interface{}) bool {
if oa, ok := other.(*array); ok {
if oa.t != a.t {
return false
}
if oa.L != a.L {
return false
}
if oa.C != a.C {
return false
}
// same exact thing
if uintptr(oa.Ptr) == uintptr(a.Ptr) {
return true
}
switch a.t.Kind() {
case reflect.Bool:
for i, v := range a.Bools() {
if oa.GetB(i) != v {
return false
}
}
case reflect.Int:
for i, v := range a.Ints() {
if oa.GetI(i) != v {
return false
}
}
case reflect.Int8:
for i, v := range a.Int8s() {
if oa.GetI8(i) != v {
return false
}
}
case reflect.Int16:
for i, v := range a.Int16s() {
if oa.GetI16(i) != v {
return false
}
}
case reflect.Int32:
for i, v := range a.Int32s() {
if oa.GetI32(i) != v {
return false
}
}
case reflect.Int64:
for i, v := range a.Int64s() {
if oa.GetI64(i) != v {
return false
}
}
case reflect.Uint:
for i, v := range a.Uints() {
if oa.GetU(i) != v {
return false
}
}
case reflect.Uint8:
for i, v := range a.Uint8s() {
if oa.GetU8(i) != v {
return false
}
}
case reflect.Uint16:
for i, v := range a.Uint16s() {
if oa.GetU16(i) != v {
return false
}
}
case reflect.Uint32:
for i, v := range a.Uint32s() {
if oa.GetU32(i) != v {
return false
}
}
case reflect.Uint64:
for i, v := range a.Uint64s() {
if oa.GetU64(i) != v {
return false
}
}
case reflect.Uintptr:
for i, v := range a.Uintptrs() {
if oa.GetUintptr(i) != v {
return false
}
}
case reflect.Float32:
for i, v := range a.Float32s() {
if oa.GetF32(i) != v {
return false
}
}
case reflect.Float64:
for i, v := range a.Float64s() {
if oa.GetF64(i) != v {
return false
}
}
case reflect.Complex64:
for i, v := range a.Complex64s() {
if oa.GetC64(i) != v {
return false
}
}
case reflect.Complex128:
for i, v := range a.Complex128s() {
if oa.GetC128(i) != v {
return false
}
}
case reflect.String:
for i, v := range a.Strings() {
if oa.GetStr(i) != v {
return false
}
}
case reflect.UnsafePointer:
for i, v := range a.UnsafePointers() {
if oa.GetUnsafePointer(i) != v {
return false
}
}
default:
for i := 0; i < a.L; i++ {
if !reflect.DeepEqual(a.Get(i), oa.Get(i)) {
return false
}
}
}
return true
}
return false
}
func (t *array) zeroIter(it Iterator) (err error) {
var i int
switch t.t {
case Bool:
data := t.Bools()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = false
}
err = handleNoOp(err)
case Int:
data := t.Ints()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Int8:
data := t.Int8s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Int16:
data := t.Int16s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Int32:
data := t.Int32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Int64:
data := t.Int64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uint:
data := t.Uints()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uint8:
data := t.Uint8s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uint16:
data := t.Uint16s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uint32:
data := t.Uint32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uint64:
data := t.Uint64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Uintptr:
data := t.Uintptrs()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Float32:
data := t.Float32s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Float64:
data := t.Float64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Complex64:
data := t.Complex64s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case Complex128:
data := t.Complex128s()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = 0
}
err = handleNoOp(err)
case String:
data := t.Strings()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = ""
}
err = handleNoOp(err)
case UnsafePointer:
data := t.UnsafePointers()
for i, err = it.Next(); err == nil; i, err = it.Next() {
data[i] = nil
}
err = handleNoOp(err)
default:
ptr := uintptr(t.Ptr)
for i, err = it.Next(); err == nil; i, err = it.Next() {
want := ptr + uintptr(i)*t.t.Size()
val := reflect.NewAt(t.t, unsafe.Pointer(want))
val = reflect.Indirect(val)
val.Set(reflect.Zero(t.t))
}
err = handleNoOp(err)
}
return
} | vendor/gorgonia.org/tensor/array_getset.go | 0.533884 | 0.564038 | array_getset.go | starcoder |
package block
import (
"github.com/df-mc/dragonfly/server/block/cube"
"github.com/df-mc/dragonfly/server/entity/effect"
"github.com/df-mc/dragonfly/server/entity/physics"
"github.com/df-mc/dragonfly/server/internal/nbtconv"
"github.com/df-mc/dragonfly/server/item"
"github.com/df-mc/dragonfly/server/world"
"github.com/go-gl/mathgl/mgl64"
"math"
"time"
_ "unsafe" // For compiler directives.
)
// Beacon is a block that projects a light beam skyward, and can provide status effects such as Speed, Jump
// Boost, Haste, Regeneration, Resistance, or Strength to nearby players.
type Beacon struct {
solid
transparent
clicksAndSticks
// Primary and Secondary are the primary and secondary effects broadcast to nearby entities by the
// beacon.
Primary, Secondary effect.LastingType
// level is the amount of the pyramid's levels, it is defined by the mineral blocks which build up the
// pyramid, and can be 0-4.
level int
}
// BeaconSource represents a block which is capable of contributing to powering a beacon pyramid.
type BeaconSource interface {
// PowersBeacon returns a bool which indicates whether this block can contribute to powering up a
// beacon pyramid.
PowersBeacon() bool
}
// BreakInfo ...
func (b Beacon) BreakInfo() BreakInfo {
return newBreakInfo(3, alwaysHarvestable, nothingEffective, oneOf(b))
}
// Activate manages the opening of a beacon by activating it.
func (b Beacon) Activate(pos cube.Pos, _ cube.Face, _ *world.World, u item.User) bool {
if opener, ok := u.(ContainerOpener); ok {
opener.OpenBlockContainer(pos)
return true
}
return true
}
// DecodeNBT ...
func (b Beacon) DecodeNBT(data map[string]interface{}) interface{} {
b.level = int(nbtconv.MapInt32(data, "Levels"))
if primary, ok := effect.ByID(int(nbtconv.MapInt32(data, "Primary"))); ok {
b.Primary = primary.(effect.LastingType)
}
if secondary, ok := effect.ByID(int(nbtconv.MapInt32(data, "Secondary"))); ok {
b.Secondary = secondary.(effect.LastingType)
}
return b
}
// EncodeNBT ...
func (b Beacon) EncodeNBT() map[string]interface{} {
m := map[string]interface{}{
"Levels": int32(b.level),
}
if primary, ok := effect.ID(b.Primary); ok {
m["Primary"] = int32(primary)
}
if secondary, ok := effect.ID(b.Secondary); ok {
m["Secondary"] = int32(secondary)
}
return m
}
// CanDisplace ...
func (b Beacon) CanDisplace(l world.Liquid) bool {
_, water := l.(Water)
return water
}
// SideClosed ...
func (b Beacon) SideClosed(cube.Pos, cube.Pos, *world.World) bool {
return false
}
// LightEmissionLevel ...
func (Beacon) LightEmissionLevel() uint8 {
return 15
}
// Level returns an integer 0-4 which defines the current pyramid level of the beacon.
func (b Beacon) Level() int {
return b.level
}
// Tick recalculates level, recalculates the active state of the beacon, and powers players,
// once every 80 ticks (4 seconds).
func (b Beacon) Tick(currentTick int64, pos cube.Pos, w *world.World) {
if currentTick%80 == 0 {
before := b.level
// Recalculating pyramid level and powering up players in range once every 4 seconds.
b.level = b.recalculateLevel(pos, w)
if before != b.level {
w.SetBlock(pos, b)
}
if b.level == 0 {
return
}
if !b.obstructed(pos, w) {
b.broadcastBeaconEffects(pos, w)
}
}
}
// recalculateLevel recalculates the level of the beacon's pyramid and returns it. The level can be 0-4.
func (b Beacon) recalculateLevel(pos cube.Pos, w *world.World) int {
var lvl int
iter := 1
// This loop goes over all 4 possible pyramid levels.
for y := pos.Y() - 1; y >= pos.Y()-4; y-- {
for x := pos.X() - iter; x <= pos.X()+iter; x++ {
for z := pos.Z() - iter; z <= pos.Z()+iter; z++ {
if s, ok := w.Block(cube.Pos{x, y, z}).(BeaconSource); !ok || !s.PowersBeacon() {
return lvl
}
}
}
iter++
lvl++
}
return lvl
}
// obstructed determines whether the beacon is currently obstructed.
func (b Beacon) obstructed(pos cube.Pos, w *world.World) bool {
// Fast obstructed light calculation.
if w.SkyLight(pos.Add(cube.Pos{0, 1})) == 15 {
return false
}
// Slow obstructed light calculation, if the fast way out didn't suffice.
return w.HighestLightBlocker(pos.X(), pos.Z()) > pos[1]
}
// broadcastBeaconEffects determines the entities in range which could receive the beacon's powers, and
// determines the powers (effects) that these entities could get. Afterwards, the entities in range that are
// beaconAffected get their according effect(s).
func (b Beacon) broadcastBeaconEffects(pos cube.Pos, w *world.World) {
seconds := 9 + b.level*2
if b.level == 4 {
seconds--
}
dur := time.Duration(seconds) * time.Second
// Establishing what effects are active with the current amount of beacon levels.
primary, secondary := b.Primary, effect.LastingType(nil)
switch b.level {
case 0:
primary = nil
case 1:
switch primary.(type) {
case effect.Resistance, effect.JumpBoost, effect.Strength:
primary = nil
}
case 2:
if _, ok := primary.(effect.Strength); ok {
primary = nil
}
case 3:
// Accept all effects for primary, but leave secondary as nil.
default:
secondary = b.Secondary
}
var primaryEff, secondaryEff effect.Effect
// Determining whether the primary power is set.
if primary != nil {
primaryEff = effect.NewAmbient(primary, 1, dur)
// Secondary power can only be set if the primary power is set.
if secondary != nil {
// It is possible to select 2 primary powers if the beacon's level is 4. This then means that the effect
// should get a level of 2.
if primary == secondary {
primaryEff = effect.NewAmbient(primary, 2, dur)
} else {
secondaryEff = effect.NewAmbient(secondary, 1, dur)
}
}
}
// Finding entities in range.
r := 10 + (b.level * 10)
entitiesInRange := w.EntitiesWithin(physics.NewAABB(
mgl64.Vec3{float64(pos.X() - r), -math.MaxFloat64, float64(pos.Z() - r)},
mgl64.Vec3{float64(pos.X() + r), math.MaxFloat64, float64(pos.Z() + r)},
), nil)
for _, e := range entitiesInRange {
if p, ok := e.(beaconAffected); ok {
if primaryEff.Type() != nil {
p.AddEffect(primaryEff)
}
if secondaryEff.Type() != nil {
p.AddEffect(secondaryEff)
}
}
}
}
// beaconAffected represents an entity that can be powered by a beacon. Only players will implement this.
type beaconAffected interface {
// AddEffect adds a specific effect to the entity that implements this interface.
AddEffect(e effect.Effect)
// BeaconAffected returns whether this entity can be powered by a beacon.
BeaconAffected() bool
}
// EncodeItem ...
func (Beacon) EncodeItem() (name string, meta int16) {
return "minecraft:beacon", 0
}
// EncodeBlock ...
func (Beacon) EncodeBlock() (string, map[string]interface{}) {
return "minecraft:beacon", nil
} | server/block/beacon.go | 0.709019 | 0.432483 | beacon.go | starcoder |
package storage
import (
"math"
"github.com/tony2001/prometheus/v2/tsdb/chunkenc"
)
// MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element.
type MemoizedSeriesIterator struct {
it chunkenc.Iterator
delta int64
lastTime int64
ok bool
// Keep track of the previously returned value.
prevTime int64
prevValue float64
}
// NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator.
func NewMemoizedEmptyIterator(delta int64) *MemoizedSeriesIterator {
return NewMemoizedIterator(chunkenc.NewNopIterator(), delta)
}
// NewMemoizedIterator returns a new iterator that buffers the values within the
// time range of the current element and the duration of delta before.
func NewMemoizedIterator(it chunkenc.Iterator, delta int64) *MemoizedSeriesIterator {
bit := &MemoizedSeriesIterator{
delta: delta,
prevTime: math.MinInt64,
}
bit.Reset(it)
return bit
}
// Reset the internal state to reuse the wrapper with the provided iterator.
func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) {
b.it = it
b.lastTime = math.MinInt64
b.ok = true
b.prevTime = math.MinInt64
it.Next()
}
// PeekPrev returns the previous element of the iterator. If there is none buffered,
// ok is false.
func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, ok bool) {
if b.prevTime == math.MinInt64 {
return 0, 0, false
}
return b.prevTime, b.prevValue, true
}
// Seek advances the iterator to the element at time t or greater.
func (b *MemoizedSeriesIterator) Seek(t int64) bool {
t0 := t - b.delta
if t0 > b.lastTime {
// Reset the previously stored element because the seek advanced
// more than the delta.
b.prevTime = math.MinInt64
b.ok = b.it.Seek(t0)
if !b.ok {
return false
}
b.lastTime, _ = b.it.At()
}
if b.lastTime >= t {
return true
}
for b.Next() {
if b.lastTime >= t {
return true
}
}
return false
}
// Next advances the iterator to the next element.
func (b *MemoizedSeriesIterator) Next() bool {
if !b.ok {
return false
}
// Keep track of the previous element.
b.prevTime, b.prevValue = b.it.At()
b.ok = b.it.Next()
if b.ok {
b.lastTime, _ = b.it.At()
}
return b.ok
}
// Values returns the current element of the iterator.
func (b *MemoizedSeriesIterator) Values() (int64, float64) {
return b.it.At()
}
// Err returns the last encountered error.
func (b *MemoizedSeriesIterator) Err() error {
return b.it.Err()
} | v2/storage/memoized_iterator.go | 0.842863 | 0.436682 | memoized_iterator.go | starcoder |
package interpolate
import "math"
type Interp func(float64, float64, float64) float64
// Interpolate funcs
// https://play.golang.org/p/OKSM_h0zn-
func Linear(t, start, end float64) float64 {
return t*(end-start) + start
}
// For gradient Color correction
// http://youtu.be/LKnqECcg6Gw
func LinearSqr(t, start, end float64) float64 {
return math.Sqrt(Linear(t, math.Pow(start, 2), math.Pow(end, 2)))
}
func Cosine(t, start, end float64) float64 {
t = 0.5 - (math.Cos(math.Pi*t) / 2)
return Linear(t, start, end)
}
func Sine(t, start, end float64) float64 {
t = math.Sin((math.Pi * t) / 2)
return Linear(t, start, end)
}
func SmoothStep(t, start, end float64) float64 {
t = math.Pow(t, 2) * (3 - (2 * t))
return Linear(t, start, end)
}
func SmoothStepDouble(t, start, end float64) float64 {
t = SmoothStep(t, start, end)
return SmoothStep(t, start, end)
}
func Acceleration(t, start, end float64) float64 {
t = math.Pow(t, 2)
return Linear(t, start, end)
}
func CubicAcceleration(t, start, end float64) float64 {
t = math.Pow(t, 3)
return Linear(t, start, end)
}
func Deccelaration(t, start, end float64) float64 {
t = 1 - math.Pow(1-t, 2)
return Linear(t, start, end)
}
func CubicDeccelaration(t, start, end float64) float64 {
t = 1 - math.Pow(1-t, 3)
return Linear(t, start, end)
}
func Sigmoid(t, start, end float64) float64 {
t = 1 / (1 + math.Exp(-t))
return Linear(t, start, end)
}
//http://cubic-bezier.com
// NyuFx
func fact(x int) int {
if x == 0 {
return 1
}
return x * fact(x-1)
}
// Binomial coefficient
func binomial(i, n int) float64 {
return float64(fact(n) / (fact(i) * fact(n-i)))
}
// Bernstein polynom
func bernstein(t float64, i, n int) float64 {
return binomial(i, n) * math.Pow(t, float64(i)) * math.Pow((1-t), float64(n-i))
}
// Bezier Curve
func BezierCurve(t float64, p []float64) float64 {
// Calculate coordinate
n := len(p) - 1
num := 0.0
for i, position := range p {
num += position * bernstein(t, i, n)
}
return num
}
func CustomCurve(t float64, curve []float64, start, end float64) float64 {
t = BezierCurve(t, curve)
return Linear(t, start, end)
}
// http://matthewlein.com/ceaser/
func Ease(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.25, 0.1, 0.25, 1}, start, end)
}
func EaseIn(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.42, 0, 1, 1}, start, end)
}
func EaseOut(t, start, end float64) float64 {
return CustomCurve(t, []float64{0, 0, 0.58, 1}, start, end)
}
func EaseInOut(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.420, 0.000, 0.580, 1.000}, start, end)
}
// Penner Equation (aproximated)
func EaseInQuad(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.550, 0.085, 0.680, 0.530}, start, end)
}
func EaseInCubic(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.550, 0.055, 0.675, 0.190}, start, end)
}
func EaseInQuart(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.895, 0.030, 0.685, 0.220}, start, end)
}
func EaseInQuint(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.755, 0.050, 0.855, 0.060}, start, end)
}
func EaseInSine(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.470, 0.000, 0.745, 0.715}, start, end)
}
func EaseInExpo(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.950, 0.050, 0.795, 0.035}, start, end)
}
func EaseInCirc(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.600, 0.040, 0.980, 0.335}, start, end)
}
func EaseOutQuad(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.250, 0.460, 0.450, 0.940}, start, end)
}
func EaseOutCubic(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.215, 0.610, 0.355, 1.000}, start, end)
}
func EaseOutQuart(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.165, 0.840, 0.440, 1.000}, start, end)
}
func EaseOutQuint(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.230, 1.000, 0.320, 1.000}, start, end)
}
func EaseOutSine(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.390, 0.575, 0.565, 1.000}, start, end)
}
func EaseOutExpo(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.190, 1.000, 0.220, 1.000}, start, end)
}
func EaseOutCirc(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.075, 0.820, 0.165, 1.000}, start, end)
}
func EaseInOutQuad(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.455, 0.030, 0.515, 0.955}, start, end)
}
func EaseInOutCubic(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.645, 0.045, 0.355, 1.000}, start, end)
}
func EaseInOutQuart(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.770, 0.000, 0.175, 1.000}, start, end)
}
func EaseInOutQuint(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.860, 0.000, 0.070, 1.000}, start, end)
}
func EaseInOutSine(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.445, 0.050, 0.550, 0.950}, start, end)
}
func EaseInOutExpo(t, start, end float64) float64 {
return CustomCurve(t, []float64{1.000, 0.000, 0.000, 1.000}, start, end)
}
func EaseInOutCirc(t, start, end float64) float64 {
return CustomCurve(t, []float64{0.785, 0.135, 0.150, 0.860}, start, end)
}
// KAFX Equations
func Backstart(t, start, end float64) float64 {
return CustomCurve(
t, []float64{0, 0, 0.2, -0.3, 0.6, 0.26, 1, 1}, start, end)
}
func Boing(t, start, end float64) float64 {
return CustomCurve(
t, []float64{0, 0, 0.42, 0.0, 0.58, 1.5, 1, 1}, start, end)
}
// IRange
func IRange(n int, start, end float64, f Interp) (rng []float64) {
for i := 0; i < n; i++ {
t := float64(i) / float64(n-1)
rng = append(rng, f(t, start, end))
}
return rng
}
func ICircleRange(n int, f Interp) []float64 {
return IRange(n+1, 0.0, 360.0, f)[:n]
}
func BezierCurveRange(n int, points []float64) (rng []float64) {
for i := 0; i < n; i++ {
t := float64(i) / float64(n-1)
rng = append(rng, BezierCurve(t, points))
}
return rng
} | interpolate/interpolate.go | 0.893988 | 0.731346 | interpolate.go | starcoder |
package driver
type Read struct {
Key string
Raw []byte
}
type ResultsIterator interface {
// Next returns the next item in the result set. The `QueryResult` is expected to be nil when
// the iterator gets exhausted
Next() (*Read, error)
// Close releases resources occupied by the iterator
Close()
}
type VersionedRead struct {
Key string
Raw []byte
Block uint64
IndexInBlock int
}
type VersionedResultsIterator interface {
// Next returns the next item in the result set. The `QueryResult` is expected to be nil when
// the iterator gets exhausted
Next() (*VersionedRead, error)
// Close releases resources occupied by the iterator
Close()
}
// VersionedPersistence models a versioned key-value storage place
type VersionedPersistence interface {
// SetState sets the given value for the given namespace, key, and version
SetState(namespace, key string, value []byte, block, txnum uint64) error
// GetState gets the value and version for given namespace and key
GetState(namespace, key string) ([]byte, uint64, uint64, error)
// DeleteState deletes the given namespace and key
DeleteState(namespace, key string) error
// GetStateMetadata gets the metadata and version for given namespace and key
GetStateMetadata(namespace, key string) (map[string][]byte, uint64, uint64, error)
// SetStateMetadata sets the given metadata for the given namespace, key, and version
SetStateMetadata(namespace, key string, metadata map[string][]byte, block, txnum uint64) error
// GetStateRangeScanIterator returns an iterator that contains all the key-values between given key ranges.
// startKey is included in the results and endKey is excluded. An empty startKey refers to the first available key
// and an empty endKey refers to the last available key. For scanning all the keys, both the startKey and the endKey
// can be supplied as empty strings. However, a full scan should be used judiciously for performance reasons.
// The returned VersionedResultsIterator contains results of type *VersionedRead.
GetStateRangeScanIterator(namespace string, startKey string, endKey string) (VersionedResultsIterator, error)
// Close closes this persistence instance
Close() error
// BeginUpdate starts the session
BeginUpdate() error
// Commit commits the changes since BeginUpdate
Commit() error
// Discard discanrds the changes since BeginUpdate
Discard() error
}
// Persistence models a key-value storage place
type Persistence interface {
// SetState sets the given value for the given namespace and key
SetState(namespace, key string, value []byte) error
// GetState gets the value for given namespace and key
GetState(namespace, key string) ([]byte, error)
// DeleteState deletes the given namespace and key
DeleteState(namespace, key string) error
// GetStateRangeScanIterator returns an iterator that contains all the key-values between given key ranges.
// startKey is included in the results and endKey is excluded. An empty startKey refers to the first available key
// and an empty endKey refers to the last available key. For scanning all the keys, both the startKey and the endKey
// can be supplied as empty strings. However, a full scan should be used judiciously for performance reasons.
// The returned ResultsIterator contains results of type *Read.
GetStateRangeScanIterator(namespace string, startKey string, endKey string) (ResultsIterator, error)
// Close closes this persistence instance
Close() error
// BeginUpdate starts the session
BeginUpdate() error
// Commit commits the changes since BeginUpdate
Commit() error
// Discard discards the changes since BeginUpdate
Discard() error
}
type Driver interface {
// NewVersioned returns a new VersionedPersistence for the passed data source
NewVersioned(dataSourceName string) (VersionedPersistence, error)
// New returns a new Persistence for the passed data source
New(dataSourceName string) (Persistence, error)
} | platform/view/services/db/driver/driver.go | 0.679817 | 0.454048 | driver.go | starcoder |
package format
import (
"math"
"strconv"
"time"
)
// https://en.wikipedia.org/wiki/Measuring_network_throughput
// https://en.wikipedia.org/wiki/Data_rate_units
const (
kilo = float64(1000)
mega = float64(1000) * kilo
giga = float64(1000) * mega
tera = float64(1000) * giga
kibi = float64(1024)
mebi = float64(1024) * kibi
gibi = float64(1024) * mebi
tebi = float64(1024) * gibi
none = -1
)
type rate struct {
multiple float64
next float64
symbol string
}
var bitPrefixesDecimal = []rate{
rate{multiple: 1, next: kilo, symbol: "bit/s"},
rate{multiple: kilo, next: mega, symbol: "kbit/s"},
rate{multiple: mega, next: giga, symbol: "Mbit/s"},
rate{multiple: giga, next: tera, symbol: "Gbit/s"},
rate{multiple: tera, next: none, symbol: "Tbit/s"},
}
var bitPrefixesBinary = []rate{
rate{multiple: 1, next: kibi, symbol: "bit/s"},
rate{multiple: kibi, next: mebi, symbol: "Kibit/s"},
rate{multiple: mebi, next: gibi, symbol: "Mibit/s"},
rate{multiple: gibi, next: tebi, symbol: "Gibit/s"},
rate{multiple: tebi, next: none, symbol: "Tibit/s"},
}
var bytePrefixesDecimal = []rate{
rate{multiple: 1, next: kilo, symbol: "B/s"},
rate{multiple: kilo, next: mega, symbol: "kB/s"},
rate{multiple: mega, next: giga, symbol: "MB/s"},
rate{multiple: giga, next: tera, symbol: "GB/s"},
rate{multiple: tera, next: none, symbol: "TB/s"},
}
var bytePrefixesBinary = []rate{
rate{multiple: 1, next: kibi, symbol: "B/s"},
rate{multiple: kibi, next: mebi, symbol: "KiB/s"},
rate{multiple: mebi, next: gibi, symbol: "MiB/s"},
rate{multiple: gibi, next: tebi, symbol: "GiB/s"},
rate{multiple: tebi, next: none, symbol: "TiB/s"},
}
// FormatBitsDecimal outputs n measured in bit per second (bit/s), kilobits per
// second (kbit/s), megabits per second (Mbit/s) or gigabits per second (Gbit/s)
// whichever is best represented. 1 kbit is defined as 1000 bits.
func FormatBitsDecimal(n float64, period time.Duration, decimals int) string {
return format(n, period, decimals, bitPrefixesDecimal)
}
// FormatBitsBinary outputs n measured in bit per second (bit/s), kibibit per
// second (Kibit/s), mebibit per second (Mibit/s) or gibibit per second (Gibit)
// whichever is best represented. 1 kibibit is defined as 1024 bits.
func FormatBitsBinary(n float64, period time.Duration, decimals int) string {
return format(n, period, decimals, bitPrefixesBinary)
}
// FormatBytesDecimal outputs n measured in byte per second (B/s), kilobyte per
// second (kB/s), megabyte per second (MB/s) or gigabyte per second (GB/s)
// whichever is best represented. 1 kilobyte is defined as 1000 bytes.
func FormatBytesDecimal(n float64, period time.Duration, decimals int) string {
return format(n, period, decimals, bytePrefixesDecimal)
}
// FormatBytesBinary outputs n measured in byte per second (B/s), kibibyte per
// second (KiB/s), mebibyte per second (MiB/s) or gibibyte per second (GiB)
// whichever is best represented. 1 kibibyte is defined as 1024 bytes.
func FormatBytesBinary(n float64, period time.Duration, decimals int) string {
return format(n, period, decimals, bytePrefixesBinary)
}
func format(n float64, period time.Duration, decimals int, rates []rate) string {
var r rate
bps := (n / float64(period)) * float64(time.Second)
for _, cr := range rates {
if bps < cr.next || cr.next == none {
r = cr
break
}
}
measure := (bps / r.multiple)
measure *= math.Pow10(decimals)
measure = math.Trunc(measure)
measure /= math.Pow10(decimals)
return strconv.FormatFloat(measure, 'f', decimals, 64) + " " + r.symbol
} | pkg/helper/format/throughput.go | 0.754825 | 0.457137 | throughput.go | starcoder |
Package iState is used to easily manage perform CRUD operations
on states/assets in Hyperledger Fabric chaincode.
It also can be used to easily enable encryption when storing
states and auto decryption when reading from state db.
The main purpose of this package is to enable high performance
Rich Queries when using levelDB as state db.
Note: To enable high performance queries, it has an indexing mechanism
that may take extra storage space.
Requirement:
- primary key tag ("primary") must be present in structure
- primary key should be universally unique and it is not handled by this package.
It should be handled by the application that imports this package.
- "istate" tag must be present for the fields in struct that needs to be available for query support.
- Original marshalled structures will be stored with primary key as the key in db.
External application may fetch the structure based on key directly using GetState() API. // This will not be true once original key optimization is done.
Restrictions:
- Cannot use type "interface{}" for fields
- Cannot use the following ascii characters in the struct names or field values:
- "\000"
- "\001"
- "\002"
- "~" (or) "\176"
- "\177"
- Cannot use these in struct field names:
- "."
- "*"
- ".docType"
- ".keyref"
- ".value"
- ".fieldName"
- (For future) It is good to avoid having field names starting with "." in the structs
To be noted:
- CreateState, ReadState, UpdateState and DeleteState functions does not validate if state exists or not.
Validation must be handled by the external program.
- Query:
- If an array/slice/map of elemets needs to be queried, the following applies:
- eq -> atleast one element in array/slice/map is equal to the value given.
- neq -> atleast one element in array/slice/map is not equal to the value given.
- *eq -> all the elements in array/slice/map must be equal to the value given.
- *neq -> all the elements in array/slice/map must be not equal to the value given.
Note: Here, map implies, value part of map needs to be queries without knowing the key part of map.
Eg: "aMap.*":"eq somevalue" as opposed to "aMap.key1": "eq somevalue"
- Useful ENV for peer container:
- CORE_LEDGER_STATE_TOTALQUERYLIMIT=1000000 // Query limit
- CORE_CHAINCODE_EXECUTETIMEOUT=300s // To avoid timeout during compaction
- CORE_VM_DOCKER_HOSTCONFIG_MEMORY=5368709120 // To raise RAM limit of container
Known Limitations and Issues:
Fixed:
- Indexing: A map with integer / number as key type will still be
considered as string when indexing.
*/
package istate
import (
// "github.com/hyperledger/fabric/core/mocks/txvalidator"
)
// Debts:
// Cleanup *stub - remove stub from all internal functions
// Cleanup Errors when have time (error.go)
// fetchCmplx and evalCmplx function is different from others, and needs extra info, try clean it up for symmetry
// Encryption support?
// Enable Load Cache !! Important !!
// Adding prefix to orig key - saves 500ms for 6000 record fetch
// Enable Compaction support
// 1. Include data in index as optional
// 2. Options to activate / deactivate / load cache
// 3. Protobuf
// 4. Trying out GetMultipleStates()
// 5. Fix fieldJSONIndexMap and other meta data
// 6. Load Docs Counter from db | doc.go | 0.538741 | 0.493042 | doc.go | starcoder |
package transformation
import (
"math"
"time"
)
var (
emptyDatapoint = Datapoint{Value: math.NaN()}
)
// Datapoint is a metric data point containing a timestamp in
// Unix nanoseconds since epoch and a value.
type Datapoint struct {
TimeNanos int64
Value float64
}
// IsEmpty returns whether this is an empty datapoint.
func (dp Datapoint) IsEmpty() bool { return math.IsNaN(dp.Value) }
// UnaryTransform is a unary transformation that takes a single
// datapoint as input and transforms it into a datapoint as output.
// It can keep state if it requires.
type UnaryTransform interface {
Evaluate(dp Datapoint) Datapoint
}
// UnaryTransformFn implements UnaryTransform as a function.
type UnaryTransformFn func(dp Datapoint) Datapoint
// Evaluate implements UnaryTransform as a function.
func (fn UnaryTransformFn) Evaluate(dp Datapoint) Datapoint {
return fn(dp)
}
// FeatureFlags holds options passed into transformations from
// the aggregator configuration file.
// nolint:gofumpt
type FeatureFlags struct {
}
// BinaryTransform is a binary transformation that takes the
// previous and the current datapoint as input and produces
// a single datapoint as the transformation result.
// It can keep state if it requires.
type BinaryTransform interface {
Evaluate(prev, curr Datapoint, flags FeatureFlags) Datapoint
}
// BinaryTransformFn implements BinaryTransform as a function.
type BinaryTransformFn func(prev, curr Datapoint, flags FeatureFlags) Datapoint
// Evaluate implements BinaryTransform as a function.
func (fn BinaryTransformFn) Evaluate(prev, curr Datapoint, flags FeatureFlags) Datapoint {
return fn(prev, curr, flags)
}
// UnaryMultiOutputTransform is like UnaryTransform, but can output an additional datapoint.
// The additional datapoint is not passed to subsequent transforms.
type UnaryMultiOutputTransform interface {
// Evaluate applies the transform on the provided datapoint.
Evaluate(dp Datapoint, resolution time.Duration) (Datapoint, Datapoint)
}
// UnaryMultiOutputTransformFn implements UnaryMultiOutputTransform as a function.
type UnaryMultiOutputTransformFn func(dp Datapoint, resolution time.Duration) (Datapoint, Datapoint)
// Evaluate applies the transform on the provided datapoint.
func (fn UnaryMultiOutputTransformFn) Evaluate(dp Datapoint, resolution time.Duration) (Datapoint, Datapoint) {
return fn(dp, resolution)
} | src/metrics/transformation/func.go | 0.875654 | 0.688763 | func.go | starcoder |
package radius
import (
"math"
"github.com/dayaftereh/stargen/mathf"
"github.com/dayaftereh/stargen/stargen/constants"
"github.com/dayaftereh/stargen/types"
)
// volumeRadius calculates the radius from the volume. The mass is in units of solar masses, and the density is in units of grams/cc.
// The radius returned is in units of km.
func VolumeRadius(mass float64, density float64) float64 {
mass = mass * constants.SolarMassInGrams
volume := mass / density
return math.Pow((3.0*volume)/(4.0*math.Pi), (1.0/3.0)) / constants.CMPerKM
}
func FractionRadius(mass, imf, rmf, cmf float64) float64 {
mass /= constants.SunMassInEarthMasses
iceFraction := imf * constants.IceDensity
carbonFraction := (cmf * rmf) * constants.CarbonDensity
silicateFraction := (rmf - (cmf * rmf)) * constants.RockDensity
ironFraction := (1.0 - (rmf + imf)) * constants.IronDensity
density := iceFraction + silicateFraction + carbonFraction + ironFraction
radius := VolumeRadius(mass, density) / constants.EarthRadiusInKM
return radius
}
func RangeAdjust(x, y1, y2, lower, upper float64) float64 {
r := upper - lower
upperFraction := (x - lower) / r
lowerFraction := 1.0 - upperFraction
return (lowerFraction * y1) + (upperFraction * y2)
}
func FudgedRadius(planet *types.Planet) float64 {
mass := planet.Mass * constants.SunMassInEarthMasses
imf := planet.IceMassFraction
rmf := planet.RockMassFraction
cmf := planet.CarbonMassFraction
var iceRockRadius, nonIceRockRadius float64
if rmf <= 0.5 {
r := 0.5 - 0.0
upperFraction := rmf / r
lowerFraction := 1.0 - upperFraction
nonIceRockRadius = (upperFraction * halfRockHalfIronRadius(mass, cmf)) + (lowerFraction + ironRadius(mass))
} else {
r := 1.0 - 0.5
rmf += mathf.QuadTrend(-3.0, 4.5, -1.5, rmf)
upperFraction := rmf / r
lowerFraction := 1.0 - upperFraction
nonIceRockRadius = (upperFraction * rockRadius(mass, cmf)) + (lowerFraction + halfRockHalfIronRadius(mass, cmf))
}
if imf <= 0.5 {
r := 0.5 - 0.0
upperFraction := imf / r
lowerFraction := 1.0 - upperFraction
iceRockRadius = (upperFraction * halfRockHalfWaterRadius(mass, cmf)) + (lowerFraction * rockRadius(mass, cmf))
} else if imf <= 0.75 {
r := 0.75 - 0.5
upperFraction := (imf - 0.5) / r
lowerFraction := 1.0 - upperFraction
iceRockRadius = (upperFraction * oneQuaterRockThreeFourthsWaterRadius(mass, cmf)) + (lowerFraction * halfRockHalfWaterRadius(mass, cmf))
} else {
r := 1.0 - 0.75
upperFraction := (imf - 0.75) / r
lowerFraction := 1.0 - upperFraction
iceRockRadius = (upperFraction * waterRadius(mass)) + (lowerFraction * oneQuaterRockThreeFourthsWaterRadius(mass, cmf))
}
radius := (iceRockRadius * imf) + (nonIceRockRadius * (1.0 - imf))
radius *= constants.EarthRadiusInKM
return radius
}
func RadiusImproved(mass float64, planet *types.Planet) float64 {
mass = mass * constants.SunMassInEarthMasses
imf := planet.IceMassFraction
rmf := planet.RockMassFraction
cmf := planet.CarbonMassFraction
ironRatio := 0.0
if imf < 1.0 {
ironRatio = (1.0 - imf - rmf) / (1.0 - rmf)
}
if mathf.CloseZero(mass) {
mass = constants.ProtoPlanetMass
}
nonIceRockRadiiZero := ironRadius(mass)
nonIceRockRadiiHalf := halfRockHalfIronRadius(mass, cmf)
nonIceRockRadiiFull := rockRadius(mass, cmf)
if mathf.CloseZero(imf) {
var radius float64
if rmf < 0.5 {
radius = PlanetRadiusHelper(rmf, 0.0, nonIceRockRadiiZero, 0.5, nonIceRockRadiiHalf, 1.0, nonIceRockRadiiFull)
} else {
radius1 := PlanetRadiusHelper(rmf, 0.0, nonIceRockRadiiZero, 0.5, nonIceRockRadiiHalf, 1.0, nonIceRockRadiiFull)
radius2 := PlanetRadiusHelper2(rmf, 0.5, nonIceRockRadiiHalf, 1.0, nonIceRockRadiiFull)
radius = RangeAdjust(rmf, radius1, radius2, 0.5, 1.0)
}
return radius * constants.EarthRadiusInKM
}
iceRockRadiiZero := rockRadius(mass, cmf)
iceRockRadiiHalf := halfRockHalfWaterRadius(mass, cmf)
iceRockRadiiQuater := oneQuaterRockThreeFourthsWaterRadius(mass, cmf)
iceRockRadiiFull := waterRadius(mass)
var iceRockRadius float64
if imf < 0.5 {
iceRockRadius = PlanetRadiusHelper(imf, 0.0, iceRockRadiiZero, 0.5, iceRockRadiiHalf, 0.75, iceRockRadiiQuater)
} else if imf < 0.75 {
radius1 := PlanetRadiusHelper(imf, 0.0, iceRockRadiiZero, 0.5, iceRockRadiiHalf, 0.75, iceRockRadiiQuater)
radius2 := PlanetRadiusHelper(imf, 0.5, iceRockRadiiHalf, 0.75, iceRockRadiiQuater, 1.0, iceRockRadiiFull)
iceRockRadius = RangeAdjust(imf, radius1, radius2, 0.5, 0.75)
} else {
radius1 := PlanetRadiusHelper(imf, 0.5, iceRockRadiiHalf, 0.75, iceRockRadiiQuater, 1.0, iceRockRadiiFull)
radius2 := PlanetRadiusHelper2(imf, 0.75, iceRockRadiiQuater, 1.0, iceRockRadiiFull)
iceRockRadius = RangeAdjust(imf, radius1, radius2, 0.5, 0.75)
}
iceIronRadiiZero := ironRadius(mass)
iceIronRadii047 := solid0point953Iron0point047WaterRadius(mass)
iceIronRadii49 := solid0point51Iron0point49WaterRadius(mass)
iceIronRadii736 := solid0point264Iron0point736WaterRadius(mass)
iceIronRadiiFull := iceRockRadiiFull
var iceIronRadius float64
if imf < 0.047 {
iceIronRadius = PlanetRadiusHelper(imf, 0.0, iceIronRadiiZero, 0.047, iceIronRadii047, 0.49, iceIronRadii49)
} else if imf < 0.49 {
radius1 := PlanetRadiusHelper(imf, 0.0, iceIronRadiiZero, 0.047, iceIronRadii047, 0.49, iceIronRadii49)
radius2 := PlanetRadiusHelper(imf, 0.047, iceIronRadii047, 0.49, iceIronRadii49, 0.736, iceIronRadii736)
iceIronRadius = RangeAdjust(imf, radius1, radius2, 0.047, 0.49)
} else if imf < 0.736 {
radius1 := PlanetRadiusHelper(imf, 0.047, iceIronRadii047, 0.49, iceIronRadii49, 0.736, iceIronRadii736)
radius2 := PlanetRadiusHelper(imf, 0.49, iceIronRadii49, 0.736, iceIronRadii736, 1.0, iceIronRadiiFull)
iceIronRadius = RangeAdjust(imf, radius1, radius2, 0.49, 0.736)
} else {
radius1 := PlanetRadiusHelper(imf, 0.49, iceIronRadii49, 0.736, iceIronRadii736, 1.0, iceIronRadiiFull)
radius2 := PlanetRadiusHelper2(imf, 0.736, iceIronRadii736, 1.0, iceIronRadiiFull)
iceIronRadius = RangeAdjust(imf, radius1, radius2, 0.736, 1.0)
}
halfMassFactor := 1.0
if mass > 0.0 {
average := (nonIceRockRadiiZero + nonIceRockRadiiFull) / 2.0
halfMassFactor = nonIceRockRadiiHalf / average
}
averageIceRockRadius := ((iceRockRadius + iceIronRadius) / 2.0) * halfMassFactor
radius := PlanetRadiusHelper(ironRatio, 0.0, iceRockRadius, 0.5, averageIceRockRadius, 1.0, iceIronRadius)
return radius * constants.EarthRadiusInKM
}
func PlanetRadiusHelper2(planetMass, mass1, radius1, mass2, radius2 float64) float64 {
a, b := mathf.LogFix(mass1, radius1, mass2, radius2)
radius := mathf.LnTrend(a, b, planetMass)
return radius
}
func PlanetRadiusHelper(planetMass, mass1, radius1, mass2, radius2, mass3, radius3 float64) float64 {
a, b, c := mathf.QuadFix(mass1, radius1, mass2, radius2, mass3, radius3)
radius := mathf.QuadTrend(a, b, c, planetMass)
return radius
}
func PlanetRadiusHelper3(temperature, temperature1, radius1, temperature2, radius2 float64) float64 {
adjustedTemperature := temperature / 1000.0
adjustedTemperature1 := temperature1 / 1000.0
adjustedTemperature2 := temperature2 / 1000.0
a, b := mathf.EFix(adjustedTemperature1, radius1, adjustedTemperature2, radius2)
radius := mathf.ETrend(a, b, adjustedTemperature)
return radius
}
func CalculateLuminosity(planet *types.Planet, sun *types.Sun) float64 {
starLuminosity := sun.Luminosity
return math.Pow(1.0/planet.SemiMajorAxis, 2.0) * starLuminosity
} | stargen/radius/radius.go | 0.820685 | 0.506836 | radius.go | starcoder |
// Package types contains most of the data structures available to/from Noms.
package types
import (
"context"
"github.com/liquidata-inc/dolt/go/store/d"
"github.com/liquidata-inc/dolt/go/store/hash"
)
// Type defines and describes Noms types, both built-in and user-defined.
// Desc provides the composition of the type. It may contain only a types.NomsKind, in the case of
// primitives, or it may contain additional information -- e.g. element Types for compound type
// specializations, field descriptions for structs, etc. Either way, checking Kind() allows code
// to understand how to interpret the rest of the data.
// If Kind() refers to a primitive, then Desc has no more info.
// If Kind() refers to List, Map, Ref, Set, or Union, then Desc is a list of Types describing the element type(s).
// If Kind() refers to Struct, then Desc contains a []field.
type Type struct {
Desc TypeDesc
}
func newType(desc TypeDesc) *Type {
return &Type{desc}
}
// Describe generate text that should parse into the struct being described.
func (t *Type) Describe(ctx context.Context) (string, error) {
return EncodedValue(ctx, t)
}
func (t *Type) TargetKind() NomsKind {
return t.Desc.Kind()
}
// Value interface
func (t *Type) Value(ctx context.Context) (Value, error) {
if t.Kind() == UnknownKind {
return nil, ErrUnknownType
}
return t, nil
}
func (t *Type) Equals(other Value) (res bool) {
// This is highly optimized to not having to encode a *Type unless we have too.
if t == other {
return true
}
if otherType, ok := other.(*Type); ok {
h, err := t.Hash(Format_7_18)
// TODO - fix panics
d.PanicIfError(err)
oh, err := other.Hash(Format_7_18)
// TODO - fix panics
d.PanicIfError(err)
return t.TargetKind() == otherType.TargetKind() && h == oh
}
return false
}
func (t *Type) Less(nbf *NomsBinFormat, other LesserValuable) (bool, error) {
return valueLess(nbf, t, other.(Value))
}
func (t *Type) Hash(nbf *NomsBinFormat) (hash.Hash, error) {
return getHash(t, nbf)
}
func (t *Type) isPrimitive() bool {
return true
}
func (t *Type) writeTo(w nomsWriter, nbf *NomsBinFormat) error {
err := TypeKind.writeTo(w, nbf)
if err != nil {
return err
}
return t.writeToAsType(w, map[string]*Type{}, nbf)
}
func (t *Type) writeToAsType(w nomsWriter, seensStructs map[string]*Type, nbf *NomsBinFormat) error {
return t.Desc.writeTo(w, nbf, t, seensStructs)
}
func (t *Type) WalkValues(ctx context.Context, cb ValueCallback) error {
return t.Desc.walkValues(cb)
}
func (t *Type) WalkRefs(nbf *NomsBinFormat, cb RefCallback) error {
return nil
}
func (t *Type) typeOf() (*Type, error) {
return PrimitiveTypeMap[TypeKind], nil
}
func (t *Type) Kind() NomsKind {
return TypeKind
}
func (t *Type) valueReadWriter() ValueReadWriter {
return nil
}
// TypeOf returns the type describing the value. This is not an exact type but
// often a simplification of the concrete type.
func TypeOf(v Value) (*Type, error) {
t, err := v.typeOf()
if err != nil {
return nil, err
}
return simplifyType(t, false)
}
// HasStructCycles determines if the type contains any struct cycles.
func HasStructCycles(t *Type) bool {
return hasStructCycles(t, nil)
}
func hasStructCycles(t *Type, visited []*Type) bool {
if _, found := indexOfType(t, visited); found {
return true
}
switch desc := t.Desc.(type) {
case CompoundDesc:
for _, et := range desc.ElemTypes {
b := hasStructCycles(et, visited)
if b {
return true
}
}
case StructDesc:
for _, f := range desc.fields {
b := hasStructCycles(f.Type, append(visited, t))
if b {
return true
}
}
case CycleDesc:
panic("unexpected unresolved cycle")
}
return false
}
func indexOfType(t *Type, tl []*Type) (uint32, bool) {
for i, tt := range tl {
if tt == t {
return uint32(i), true
}
}
return 0, false
}
func (t *Type) readFrom(nbf *NomsBinFormat, b *binaryNomsReader) (Value, error) {
panic("unreachable")
}
func (t *Type) skip(nbf *NomsBinFormat, b *binaryNomsReader) {
panic("unreachable")
}
func (t *Type) String() string {
panic("unreachable")
}
func (t *Type) HumanReadableString() string {
panic("unreachable")
} | go/store/types/type.go | 0.714429 | 0.551091 | type.go | starcoder |
package timer
import (
"math"
"github.com/shasderias/ilysa/ease"
"github.com/shasderias/ilysa/scale"
)
type Range interface {
B() float64 // current beat
// T is the current time in the current range on a 0-1 scale. As a special case,
// T returns 1 when the range only has 1 step.
T() float64
Ordinal() int // ordinal number of the current iteration, starting from 0
StartB() float64 // first beat of the current sequence
EndB() float64 // last beat of the current sequence
Duration() float64 // duration of the current sequence, in beats
First() bool // true if this is the first iteration
Last() bool // true if this is the last iteration
Next() bool
ToRange() Range
ToSequence() Sequence
}
type Ranger struct {
startBeat float64
endBeat float64
steps int
easeFn func(float64) float64
tToBeat func(float64) float64
}
func Rng(startBeat, endBeat float64, steps int, fn ease.Func) Ranger {
return Ranger{
startBeat: startBeat,
endBeat: endBeat,
steps: steps,
easeFn: fn,
tToBeat: scale.FromUnitClamp(startBeat, endBeat),
}
}
func RngInterval(startBeat, endBeat, interval float64, fn ease.Func) Ranger {
steps := int(math.RoundToEven(endBeat-startBeat)*interval) + 1
if steps < 1 {
steps = 1
}
return Rng(startBeat, endBeat, steps, fn)
}
func (r Ranger) Iterate() Range {
return &RangeIterator{
Ranger: r,
ordinal: -1,
beatScaler: func(m float64) float64 {
m = r.easeFn(m)
return scale.FromUnitClamp(r.startBeat, r.endBeat)(m)
},
}
}
func (r Ranger) Idx(i int) float64 {
t := float64(i) / float64(r.steps-1)
return r.tToBeat(r.easeFn(t))
}
func (r Ranger) Len() int {
return r.steps
}
type RangeIterator struct {
Ranger
ordinal int
beatScaler scale.Fn
}
func (i *RangeIterator) Next() bool {
i.ordinal++
if i.ordinal == i.steps {
return false
}
return true
}
func (i *RangeIterator) B() float64 { return i.beatScaler(i.T()) }
func (i *RangeIterator) T() float64 { return float64(i.ordinal) / float64(i.steps-1) }
func (i *RangeIterator) Ordinal() int { return i.ordinal }
func (i *RangeIterator) StartB() float64 { return i.startBeat }
func (i *RangeIterator) EndB() float64 { return i.endBeat }
func (i *RangeIterator) Duration() float64 { return i.endBeat - i.startBeat }
func (i *RangeIterator) First() bool { return i.B() == i.StartB() }
func (i *RangeIterator) Last() bool { return i.B() == i.EndB() }
// Sequence Methods
func (i *RangeIterator) SeqT() float64 { return float64(i.ordinal) / float64(i.Len()-1) }
func (i *RangeIterator) SeqOrdinal() int { return i.ordinal }
func (i *RangeIterator) SeqLen() int { return i.Len() }
func (i *RangeIterator) SeqNextB() float64 { return i.Idx(i.ordinal + 1) }
func (i *RangeIterator) SeqNextBOffset() float64 {
if i.SeqLast() {
// approximation
return i.Idx(i.ordinal) - i.Idx(i.ordinal-1)
}
return i.Idx(i.ordinal+1) - i.Idx(i.ordinal)
}
func (i *RangeIterator) SeqPrevB() float64 { return i.Idx(i.ordinal - 1) }
func (i *RangeIterator) SeqPrevBOffset() float64 { return i.Idx(i.ordinal) - i.Idx(i.ordinal-1) }
func (i *RangeIterator) SeqFirst() bool { return i.ordinal == 0 }
func (i *RangeIterator) SeqLast() bool { return i.ordinal == i.Len()-1 }
func (i *RangeIterator) ToRange() Range { return i }
func (i *RangeIterator) ToSequence() Sequence { return i } | timer/range.go | 0.805441 | 0.579281 | range.go | starcoder |
package generator
// singlesChains removes candidates by two methods. Prior to removing any candidates, chains are created between cells that contain the only two occurances of a digit in a unit (box, row, or column). The chains connect the units together through the doubly occurring digits. Starting at an arbitrary location in the chain, the cells are alternately colored with two different colors. "Twice in a unit": if the same color occurs twice in a single unit, all cells marked with that color anywhere in the puzzle can be removed. "Two colors elsewhere": if a non-chain cell containing the digit can "see" two cells colored with opposite colors, the digit can be removing from the non-chain cell.
func (g *Grid) singlesChains(verbose uint) (res bool) {
// Create a pairs set containing cells where the cells contain the only two occurrances of a digit in the unit. We use a set so that the pairs are unique.
var pairMaps [10]map[pair]bool
g.unitPairs(&pairMaps)
// Color the points in the chains.
for d := 1; d <= 9; d++ {
pairMap := pairMaps[d]
for len(pairMap) != 0 {
colors := make(map[point]color)
setBoth := true
for {
changed := false
for p := range pairMap {
set, del := setColors(p, &colors, setBoth)
if set {
changed = true
setBoth = false
}
if del {
delete(pairMap, p)
}
}
if !changed {
break
}
}
// Separate the colors into two slices.
var blues, reds []point
for p, c := range colors {
switch c {
case blue:
blues = append(blues, p)
case red:
reds = append(reds, p)
}
}
// Search for "Twice in a unit".
if g.twiceInAUnit(blues) {
for _, p := range blues {
if g.pt(p).andNot(1 << d) {
g.cellChange(&res, verbose, "singlesChain: in %s, removing %d for twice in a unit\n", p, d)
}
}
} else if g.twiceInAUnit(reds) {
for _, p := range reds {
if g.pt(p).andNot(1 << d) {
g.cellChange(&res, verbose, "singlesChain: in %s, removing %d for twice in a unit\n", p, d)
}
}
}
// Search for "Two colors elsewhere".
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
p := point{r, c}
if *g.pt(p)&(1<<d) == 0 {
continue
}
if _, ok := colors[p]; ok { // Skip if part of chain.
continue
}
b := boxOf(r, c)
var seesBlue *point
for _, blue := range blues {
if b == boxOfPoint(blue) || c == blue.c || r == blue.r {
pt := blue
seesBlue = &pt
}
}
var seesRed *point
for _, red := range reds {
if b == boxOfPoint(red) || c == red.c || r == red.r {
pt := red
seesRed = &pt
}
}
if seesBlue != nil && seesRed != nil {
if g.pt(p).andNot(1 << d) {
g.cellChange(&res, verbose, "singlesChain: in %s, removing %d for two colors elsewhere (%s, %s)\n", p, d, *seesBlue, *seesRed)
}
}
}
}
}
}
return
}
func (g *Grid) twiceInAUnit(colors []point) bool {
for _, p1 := range colors {
for _, p2 := range colors {
if p1 == p2 {
continue
}
if boxOfPoint(p1) == boxOfPoint(p2) || p1.c == p2.c || p1.r == p2.r {
return true
}
}
}
return false
}
func setColors(p pair, colors *map[point]color, colorBoth bool) (bool, bool) {
colorLeft := (*colors)[p.left]
colorRight := (*colors)[p.right]
if colorLeft == black && colorRight == black {
if colorBoth {
(*colors)[p.left] = red
(*colors)[p.right] = blue
return true, true
}
return false, false
}
if colorLeft == red && colorRight == black {
(*colors)[p.right] = blue
return true, true
}
if colorLeft == blue && colorRight == black {
(*colors)[p.right] = red
return true, true
}
if colorRight == red && colorLeft == black {
(*colors)[p.left] = blue
return true, true
}
if colorRight == blue && colorLeft == black {
(*colors)[p.left] = red
return true, true
}
return false, true
} | generator/singlesChains.go | 0.836921 | 0.733762 | singlesChains.go | starcoder |
package ecc
import (
"fmt"
"math/big"
)
type Point struct {
X FieldInterface
Y FieldInterface
A FieldInterface
B FieldInterface
Err error
}
func NewPoint(x FieldInterface, y FieldInterface, a FieldInterface, b FieldInterface) (*Point, error) {
if x == nil || y == nil {
return &Point{X: nil, Y: nil, A: a, B: b}, nil
}
left, right := y.Copy(), x.Copy()
_, err := left.Pow(left, big.NewInt(2)).Calc()
if err != nil {
return nil, err
}
_, err = right.Pow(right, big.NewInt(3)).Add(right, x.Copy().Mul(x, a)).Add(right, b).Calc()
if err != nil {
return nil, err
}
if left.Ne(right) {
return nil, fmt.Errorf("(%#v, %#v) is not on the curve", x, y)
}
return &Point{X: x, Y: y, A: a, B: b}, nil
}
func (p *Point) String() string {
if p.X == nil {
return "Point(infinity)"
}
return fmt.Sprintf("Point(%s, %s)_%s_%s", p.X, p.Y, p.A, p.B)
}
func (p *Point) Eq(other *Point) bool {
if p.X == nil {
return other.X == nil
}
return p.X.Eq(other.X) && p.Y.Eq(other.Y) && p.A.Eq(other.A) && p.B.Eq(other.B)
}
func (p *Point) Ne(other *Point) bool {
return !p.Eq(other)
}
func (p *Point) Calc() (*Point, error) {
return p, p.Err
}
func (p *Point) Add(p1, p2 *Point) *Point {
if p1.A.Ne(p2.A) || p1.B.Ne(p2.B) {
*p = Point{
X: p.X,
Y: p.Y,
A: p.A,
B: p.B,
Err: fmt.Errorf("points %s, %s are not on the same curve", p1, p2),
}
return p
}
a, b := p1.A, p1.B
if p1.X == nil {
*p = Point{X: p2.X, Y: p2.Y, A: a, B: b, Err: p.Err}
return p
}
if p2.X == nil {
*p = Point{X: p1.X, Y: p1.Y, A: a, B: b, Err: p.Err}
return p
}
// p1.x == p2.x, p1.y != p2.y
if p1.X.Eq(p2.X) && p1.Y.Ne(p2.Y) {
*p = Point{X: nil, Y: nil, A: a, B: b, Err: p.Err}
return p
}
x1, y1 := p1.X, p1.Y
x2, y2 := p2.X, p2.Y
// x1 != x2
if x1.Ne(x2) {
s := y2.Copy()
s.Sub(y2, y1)
tmp := x2.Copy()
tmp.Sub(x2, x1)
s.Div(s, tmp)
x3 := s.Copy()
x3.Pow(x3, big.NewInt(2)).Sub(x3, x1).Sub(x3, x2)
y3 := s.Copy()
y3.Mul(y3, tmp.Sub(x1, x3)).Sub(y3, y1)
*p = Point{X: x3, Y: y3, A: a, B: b, Err: p.Err}
return p
}
// p1 == p2, y == 0
zero := x1.Copy()
zero.RMul(zero, big.NewInt(0))
if p1.Eq(p2) && p1.Y.Eq(zero) {
*p = Point{X: nil, Y: nil, A: a, B: b, Err: p.Err}
return p
}
// p1 == p2
s := x1.Copy()
s.Pow(x1, big.NewInt(2)).RMul(s, big.NewInt(3)).Add(s, a)
tmp := y1.Copy()
tmp.RMul(y1, big.NewInt(2))
s.Div(s, tmp)
x3 := s.Copy()
x3.Pow(s, big.NewInt(2)).Sub(x3, tmp.RMul(x1, big.NewInt(2)))
y3 := s.Copy()
y3.Mul(s, tmp.Sub(x1, x3)).Sub(y3, y1)
*p = Point{X: x3, Y: y3, A: a, B: b, Err: p.Err}
return p
}
func (p *Point) RMul(r *Point, n *big.Int) *Point {
coef := new(big.Int).Set(n)
current := &Point{X: r.X, Y: r.Y, A: r.A, B: r.B, Err: r.Err}
result := &Point{X: nil, Y: nil, A: r.A, B: r.B, Err: r.Err}
for coef.Sign() > 0 {
if coef.Bit(0) == 1 {
result.Add(result, current)
}
current.Add(current, current)
coef.Rsh(coef, 1)
}
*p = *result
return p
} | ecc/point.go | 0.689828 | 0.469642 | point.go | starcoder |
package simpleregtest
import (
"fmt"
"reflect"
"strconv"
"testing"
"time"
"github.com/decred/dcrd/dcrjson"
"github.com/decred/dcrd/integration"
"github.com/decred/dcrd/integration/harness"
"github.com/decred/dcrd/rpcclient"
)
// JoinType is an enum representing a particular type of "node join". A node
// join is a synchronization tool used to wait until a subset of nodes have a
// consistent state with respect to an attribute.
type JoinType uint8
const (
// Blocks is a JoinType which waits until all nodes share the same
// block height.
Blocks JoinType = iota
// Mempools is a JoinType which blocks until all nodes have identical
// mempool.
Mempools
)
// JoinNodes is a synchronization tool used to block until all passed nodes are
// fully synced with respect to an attribute. This function will block for a
// period of time, finally returning once all nodes are synced according to the
// passed JoinType. This function be used to to ensure all active test
// harnesses are at a consistent state before proceeding to an assertion or
// check within rpc tests.
func JoinNodes(nodes []*harness.Harness, joinType JoinType) error {
switch joinType {
case Blocks:
return syncBlocks(nodes)
case Mempools:
return syncMempools(nodes)
}
return nil
}
// syncMempools blocks until all nodes have identical mempools.
func syncMempools(nodes []*harness.Harness) error {
poolsMatch := false
for !poolsMatch {
retry:
firstPool, err := nodes[0].NodeRPCClient().GetRawMempool(dcrjson.GRMAll)
if err != nil {
return err
}
// If all nodes have an identical mempool with respect to the
// first node, then we're done. Otherwise, drop back to the top
// of the loop and retry after a short wait period.
for _, node := range nodes[1:] {
nodePool, err := node.NodeRPCClient().GetRawMempool(dcrjson.GRMAll)
if err != nil {
return err
}
if !reflect.DeepEqual(firstPool, nodePool) {
time.Sleep(time.Millisecond * 100)
goto retry
}
}
poolsMatch = true
}
return nil
}
// syncBlocks blocks until all nodes report the same block height.
func syncBlocks(nodes []*harness.Harness) error {
blocksMatch := false
for !blocksMatch {
retry:
blockHeights := make(map[int64]struct{})
for _, node := range nodes {
blockHeight, err := node.NodeRPCClient().GetBlockCount()
if err != nil {
return err
}
blockHeights[blockHeight] = struct{}{}
if len(blockHeights) > 1 {
time.Sleep(time.Millisecond * 100)
goto retry
}
}
blocksMatch = true
}
return nil
}
// ConnectNode establishes a new peer-to-peer connection between the "from"
// harness and the "to" harness. The connection made is flagged as persistent,
// therefore in the case of disconnects, "from" will attempt to reestablish a
// connection to the "to" harness.
func ConnectNode(from *harness.Harness, to *harness.Harness) error {
peerInfo, err := from.NodeRPCClient().GetPeerInfo()
if err != nil {
return err
}
numPeers := len(peerInfo)
targetAddr := to.P2PAddress()
if err := from.NodeRPCClient().AddNode(targetAddr, rpcclient.ANAdd); err != nil {
return err
}
// Block until a new connection has been established.
for attempts := 5; attempts > 0; attempts-- {
peerInfo, err = from.NodeRPCClient().GetPeerInfo()
if err != nil {
return err
}
if len(peerInfo) > numPeers {
return nil
}
integration.Sleep(1000)
}
return fmt.Errorf("failed to connet node")
}
// Create a test chain with the desired number of mature coinbase outputs
func generateTestChain(numToGenerate uint32, node *rpcclient.Client) error {
fmt.Printf("Generating %v blocks...\n", numToGenerate)
_, err := node.Generate(numToGenerate)
if err != nil {
return err
}
fmt.Println("Block generation complete.")
return nil
}
func assertConnectedTo(t *testing.T, nodeA *harness.Harness, nodeB *harness.Harness) {
nodeAPeers, err := nodeA.NodeRPCClient().GetPeerInfo()
if err != nil {
t.Fatalf("unable to get nodeA's peer info")
}
nodeAddr := nodeB.P2PAddress()
addrFound := false
for _, peerInfo := range nodeAPeers {
if peerInfo.Addr == nodeAddr {
addrFound = true
break
}
}
if !addrFound {
t.Fatal("nodeA not connected to nodeB")
}
}
// Waits for wallet to sync to the target height
func syncWalletTo(rpcClient *rpcclient.Client, desiredHeight int64) (int64, error) {
var count int64
var err error
for count != desiredHeight {
//rpctest.Sleep(100)
count, err = rpcClient.GetBlockCount()
if err != nil {
return -1, err
}
fmt.Println(" sync to: " + strconv.FormatInt(count, 10))
}
return count, nil
} | integration/harness/simpleregtest/helpers.go | 0.591841 | 0.431045 | helpers.go | starcoder |
package rfc6979
import (
"bytes"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/hmac"
"crypto/sha256"
"errors"
"hash"
"math/big"
)
var (
// Used in RFC6979 implementation when testing the nonce for correctness
one = big.NewInt(1)
// oneInitializer is used to fill a byte slice with byte 0x01. It is provided
// here to avoid the need to create it multiple times.
oneInitializer = []byte{0x01}
)
// SignWithNonce .
func SignWithNonce(privateKey *ecdsa.PrivateKey, hash []byte, nonce int) (*big.Int, *big.Int, error) {
N := privateKey.Curve.Params().N
halfOrder := new(big.Int).Rsh(N, 1)
k := nonceRFC6979(privateKey.Curve, privateKey.D, hash, nonce)
inv := new(big.Int).ModInverse(k, N)
r, _ := privateKey.Curve.ScalarBaseMult(k.Bytes())
if r.Cmp(N) == 1 {
r.Sub(r, N)
}
if r.Sign() == 0 {
return nil, nil, errors.New("calculated R is zero")
}
e := hashToInt(hash, privateKey.Curve)
s := new(big.Int).Mul(privateKey.D, r)
s.Add(s, e)
s.Mul(s, inv)
s.Mod(s, N)
if s.Cmp(halfOrder) == 1 {
s.Sub(N, s)
}
if s.Sign() == 0 {
return nil, nil, errors.New("calculated S is zero")
}
return r, s, nil
}
// Sign .
func Sign(privateKey *ecdsa.PrivateKey, hash []byte) (*big.Int, *big.Int, error) {
return SignWithNonce(privateKey, hash, 0)
}
// nonceRFC6979 generates an ECDSA nonce (`k`) deterministically according to RFC 6979.
// It takes a 32-byte hash as an input and returns 32-byte nonce to be used in ECDSA algorithm.
func nonceRFC6979(curve elliptic.Curve, privkey *big.Int, hash []byte, nonce int) *big.Int {
if nonce > 0 {
moreHash := sha256.New()
moreHash.Write(hash)
moreHash.Write(bytes.Repeat([]byte{0x00}, nonce))
hash = moreHash.Sum(nil)
}
q := curve.Params().N
x := privkey
alg := sha256.New
qlen := q.BitLen()
holen := alg().Size()
rolen := (qlen + 7) >> 3
bx := append(int2octets(x, rolen), bits2octets(hash, curve, rolen)...)
// Step B
v := bytes.Repeat(oneInitializer, holen)
// Step C (Go zeroes the all allocated memory)
k := make([]byte, holen)
// Step D
k = mac(alg, k, append(append(v, 0x00), bx...))
// Step E
v = mac(alg, k, v)
// Step F
k = mac(alg, k, append(append(v, 0x01), bx...))
// Step G
v = mac(alg, k, v)
// Step H
for {
// Step H1
var t []byte
// Step H2
for len(t)*8 < qlen {
v = mac(alg, k, v)
t = append(t, v...)
}
// Step H3
secret := hashToInt(t, curve)
if secret.Cmp(one) >= 0 && secret.Cmp(q) < 0 {
return secret
}
k = mac(alg, k, append(v, 0x00))
v = mac(alg, k, v)
}
}
// mac returns an HMAC of the given key and message.
func mac(alg func() hash.Hash, k, m []byte) []byte {
h := hmac.New(alg, k)
h.Write(m)
return h.Sum(nil)
}
// https://tools.ietf.org/html/rfc6979#section-2.3.3
func int2octets(v *big.Int, rolen int) []byte {
out := v.Bytes()
// left pad with zeros if it's too short
if len(out) < rolen {
out2 := make([]byte, rolen)
copy(out2[rolen-len(out):], out)
return out2
}
// drop most significant bytes if it's too long
if len(out) > rolen {
out2 := make([]byte, rolen)
copy(out2, out[len(out)-rolen:])
return out2
}
return out
}
// https://tools.ietf.org/html/rfc6979#section-2.3.4
func bits2octets(in []byte, curve elliptic.Curve, rolen int) []byte {
z1 := hashToInt(in, curve)
z2 := new(big.Int).Sub(z1, curve.Params().N)
if z2.Sign() < 0 {
return int2octets(z1, rolen)
}
return int2octets(z2, rolen)
}
// hashToInt converts a hash value to an integer. There is some disagreement
// about how this is done. [NSA] suggests that this is done in the obvious
// manner, but [SECG] truncates the hash to the bit-length of the curve order
// first. We follow [SECG] because that's what OpenSSL does. Additionally,
// OpenSSL right shifts excess bits from the number if the hash is too large
// and we mirror that too.
// This is borrowed from crypto/ecdsa.
func hashToInt(hash []byte, c elliptic.Curve) *big.Int {
orderBits := c.Params().N.BitLen()
orderBytes := (orderBits + 7) / 8
if len(hash) > orderBytes {
hash = hash[:orderBytes]
}
ret := new(big.Int).SetBytes(hash)
excess := len(hash)*8 - orderBits
if excess > 0 {
ret.Rsh(ret, uint(excess))
}
return ret
} | ecdsa/rfc6979/rfc6979.go | 0.779154 | 0.41834 | rfc6979.go | starcoder |
package aggregation
// https://docs.mongodb.com/manual/reference/operator/aggregation/#trigonometry-expression-operators
// Sin returns the sine of a value that is measured in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/sin/
func Sin(number interface{}) M {
return M{"$sin": number}
}
// Cos returns the cosine of a value that is measured in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/cos/
func Cos(number interface{}) M {
return M{"$cos": number}
}
// Tan returns the tangent of a value that is measured in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/tan/
func Tan(number interface{}) M {
return M{"$tan": number}
}
// ASin returns the inverse sin (arc sine) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/asin/
func ASin(number interface{}) M {
return M{"$asin": number}
}
// ACos returns the inverse cosine (arc cosine) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/acos/
func ACos(number interface{}) M {
return M{"$acos": number}
}
// ATan returns the inverse tangent (arc tangent) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/atan/
func ATan(number interface{}) M {
return M{"$atan": number}
}
// ATan2 returns the inverse tangent (arc tangent) of y / x in radians,
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/atan2/
func ATan2(y, x interface{}) M {
return M{"$atan2": A{y, x}}
}
// ASinH returns the inverse hyperbolic sine (hyperbolic arc sine) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/asin/
func ASinH(number interface{}) M {
return M{"$asinh": number}
}
// ACosH returns the inverse hyperbolic cosine (hyperbolic arc cosine) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/acos/
func ACosH(number interface{}) M {
return M{"$acosh": number}
}
// ATanH returns the inverse hyperbolic tangent (hyperbolic arc tangent) of a value in radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/atan/
func ATanH(number interface{}) M {
return M{"$atanh": number}
}
// DegreesToRadians converts a value from degrees to radians.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/degreesToRadians/
func DegreesToRadians(number interface{}) M {
return M{"$degreesToRadians": number}
}
// RadiansToDegrees converts a value from radians to degrees.
// New in version 4.2.
// https://docs.mongodb.com/manual/reference/operator/aggregation/radiansToDegrees/
func RadiansToDegrees(number interface{}) M {
return M{"$radiansToDegrees": number}
} | pkg/aggregation/trigonometry.go | 0.93528 | 0.423875 | trigonometry.go | starcoder |
package anomaly
import (
"github.com/luuphu25/data-sidecar/stat"
)
func anomalyLabels(labels map[string]string, model string) map[string]string {
anomalyLabels := make(map[string]string)
for xx, yy := range labels {
if xx == "ft_target" {
continue
}
anomalyLabels[xx] = yy
}
anomalyLabels["__name__"] = "anomaly"
anomalyLabels["ft_model"] = model
anomalyLabels["ft_metric"] = labels["__name__"]
return anomalyLabels
}
func anomalyHelper(aName string, fire bool, name map[string]string, record *[]map[string]string) {
// this once did more, and could again...
if fire {
*record = append(*record, anomalyLabels(name, aName))
}
}
// Nelson computes the nelson rules on a slice of data.
func Nelson(data []float64, name map[string]string) []map[string]string {
// need enough information to do nelson rules on.
// calculate quantiles instead of using the mean+std approach.
var (
mean, std = stat.MeanStdDev(data)
mm3sd = mean - 3*std
mm2sd = mean - 2*std
mm1sd = mean - 1*std
mp1sd = mean + 1*std
mp2sd = mean + 2*std
mp3sd = mean + 3*std
)
record := make([]map[string]string, 0)
// of the nelson rules, we found that only these three really hold up in general as useful
// indicators of anything.
anomalyHelper("nelson_large_ooc", NelsonLargeOoC(data, mm3sd, mp3sd), name, &record)
anomalyHelper("nelson_medium_ooc", NelsonMediumOoC(data, mm2sd, mp2sd), name, &record)
anomalyHelper("nelson_small_ooc", NelsonSmallOoC(data, mm1sd, mp1sd), name, &record)
return record
}
// NelsonLargeOoC reports if the most recent point is outside of the
// equivalent of 3sds of the mean
func NelsonLargeOoC(data []float64, low, high float64) bool {
if low == high {
return false
}
if data[len(data)-1] > high {
return true
}
if data[len(data)-1] < low {
return true
}
return false
}
// NelsonMediumOoC reports if 2 of the most recent 3 points are outside of the
// equivalent of 2sds of the mean
func NelsonMediumOoC(data []float64, low, high float64) bool {
if len(data) < 3 {
return false
}
if low == high {
return false
}
lows := 0
highs := 0
for ii := 0; ii < 3; ii++ {
if data[len(data)-1-ii] < low {
lows++
}
if data[len(data)-1-ii] > high {
highs++
}
}
if (lows > 1) || (highs > 1) {
return true
}
return false
}
// NelsonSmallOoC reports 4 of the 5 most recent point is outside of the
// equivalent of 1sds of the mean
func NelsonSmallOoC(data []float64, low, high float64) bool {
if len(data) < 5 {
return false
}
if low == high {
return false
}
lows := 0
highs := 0
for ii := 0; ii < 5; ii++ {
if data[len(data)-1-ii] < low {
lows++
}
if data[len(data)-1-ii] > high {
highs++
}
}
if (lows > 3) || (highs > 3) {
return true
}
return false
} | scoring/anomaly/nelson.go | 0.697609 | 0.453867 | nelson.go | starcoder |
package uatype
func init() {
nodeInfoMap[1] = nodeInfo{
displayName: "Boolean",
class: NodeClassDataType,
description: "Describes a value that is either TRUE or FALSE.",
}
nodeInfoMap[2] = nodeInfo{
displayName: "SByte",
class: NodeClassDataType,
description: "Describes a value that is an integer between -128 and 127.",
}
nodeInfoMap[3] = nodeInfo{
displayName: "Byte",
class: NodeClassDataType,
description: "Describes a value that is an integer between 0 and 255.",
}
nodeInfoMap[4] = nodeInfo{
displayName: "Int16",
class: NodeClassDataType,
description: "Describes a value that is an integer between −32,768 and 32,767.",
}
nodeInfoMap[5] = nodeInfo{
displayName: "UInt16",
class: NodeClassDataType,
description: "Describes a value that is an integer between 0 and 65535.",
}
nodeInfoMap[6] = nodeInfo{
displayName: "Int32",
class: NodeClassDataType,
description: "Describes a value that is an integer between −2,147,483,648 and 2,147,483,647.",
}
nodeInfoMap[7] = nodeInfo{
displayName: "UInt32",
class: NodeClassDataType,
description: "Describes a value that is an integer between 0 and 4,294,967,295.",
}
nodeInfoMap[8] = nodeInfo{
displayName: "Int64",
class: NodeClassDataType,
description: "Describes a value that is an integer between −9,223,372,036,854,775,808 and 9,223,372,036,854,775,807.",
}
nodeInfoMap[9] = nodeInfo{
displayName: "UInt64",
class: NodeClassDataType,
description: "Describes a value that is an integer between 0 and 18,446,744,073,709,551,615.",
}
nodeInfoMap[10] = nodeInfo{
displayName: "Float",
class: NodeClassDataType,
description: "Describes a value that is an IEEE 754-1985 single precision floating point number.",
}
nodeInfoMap[11] = nodeInfo{
displayName: "Double",
class: NodeClassDataType,
description: "Describes a value that is an IEEE 754-1985 double precision floating point number.",
}
nodeInfoMap[12] = nodeInfo{
displayName: "String",
class: NodeClassDataType,
description: "Describes a value that is a sequence of printable Unicode characters.",
}
nodeInfoMap[13] = nodeInfo{
displayName: "DateTime",
class: NodeClassDataType,
description: "Describes a value that is a Gregorian calender date and time.",
}
nodeInfoMap[14] = nodeInfo{
displayName: "Guid",
class: NodeClassDataType,
description: "Describes a value that is a 128-bit globally unique identifier.",
}
nodeInfoMap[15] = nodeInfo{
displayName: "ByteString",
class: NodeClassDataType,
description: "Describes a value that is a sequence of bytes.",
}
nodeInfoMap[16] = nodeInfo{
displayName: "XmlElement",
class: NodeClassDataType,
description: "Describes a value that is an XML element.",
}
nodeInfoMap[17] = nodeInfo{
displayName: "NodeId",
class: NodeClassDataType,
description: "Describes a value that is an identifier for a node within a Server address space.",
}
nodeInfoMap[18] = nodeInfo{
displayName: "ExpandedNodeId",
class: NodeClassDataType,
description: "Describes a value that is an absolute identifier for a node.",
}
nodeInfoMap[19] = nodeInfo{
displayName: "StatusCode",
class: NodeClassDataType,
description: "Describes a value that is a code representing the outcome of an operation by a Server.",
}
nodeInfoMap[20] = nodeInfo{
displayName: "QualifiedName",
class: NodeClassDataType,
description: "Describes a value that is a name qualified by a namespace.",
}
nodeInfoMap[21] = nodeInfo{
displayName: "LocalizedText",
class: NodeClassDataType,
description: "Describes a value that is human readable Unicode text with a locale identifier.",
}
nodeInfoMap[22] = nodeInfo{
displayName: "Structure",
class: NodeClassDataType,
description: "Describes a value that is any type of structure that can be described with a data encoding.",
}
nodeInfoMap[23] = nodeInfo{
displayName: "DataValue",
class: NodeClassDataType,
description: "Describes a value that is a structure containing a value, a status code and timestamps.",
}
nodeInfoMap[24] = nodeInfo{
displayName: "BaseDataType",
class: NodeClassDataType,
description: "Describes a value that can have any valid DataType.",
}
nodeInfoMap[25] = nodeInfo{
displayName: "DiagnosticInfo",
class: NodeClassDataType,
description: "Describes a value that is a structure containing diagnostics associated with a StatusCode.",
}
nodeInfoMap[26] = nodeInfo{
displayName: "Number",
class: NodeClassDataType,
description: "Describes a value that can have any numeric DataType.",
}
nodeInfoMap[27] = nodeInfo{
displayName: "Integer",
class: NodeClassDataType,
description: "Describes a value that can have any integer DataType.",
}
nodeInfoMap[28] = nodeInfo{
displayName: "UInteger",
class: NodeClassDataType,
description: "Describes a value that can have any unsigned integer DataType.",
}
nodeInfoMap[29] = nodeInfo{
displayName: "Enumeration",
class: NodeClassDataType,
description: "Describes a value that is an enumerated DataType.",
}
nodeInfoMap[30] = nodeInfo{
displayName: "Image",
class: NodeClassDataType,
description: "Describes a value that is an image encoded as a string of bytes.",
}
nodeInfoMap[31] = nodeInfo{
displayName: "References",
class: NodeClassReferenceType,
description: "The abstract base type for all references.",
}
nodeInfoMap[32] = nodeInfo{
displayName: "NonHierarchicalReferences",
class: NodeClassReferenceType,
description: "The abstract base type for all non-hierarchical references.",
}
nodeInfoMap[33] = nodeInfo{
displayName: "HierarchicalReferences",
class: NodeClassReferenceType,
description: "The abstract base type for all hierarchical references.",
}
nodeInfoMap[34] = nodeInfo{
displayName: "HasChild",
class: NodeClassReferenceType,
description: "The abstract base type for all non-looping hierarchical references.",
}
nodeInfoMap[35] = nodeInfo{
displayName: "Organizes",
class: NodeClassReferenceType,
description: "The type for hierarchical references that are used to organize nodes.",
}
nodeInfoMap[36] = nodeInfo{
displayName: "HasEventSource",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical references that are used to organize event sources.",
}
nodeInfoMap[37] = nodeInfo{
displayName: "HasModellingRule",
class: NodeClassReferenceType,
description: "The type for references from instance declarations to modelling rule nodes.",
}
nodeInfoMap[38] = nodeInfo{
displayName: "HasEncoding",
class: NodeClassReferenceType,
description: "The type for references from data type nodes to to data type encoding nodes.",
}
nodeInfoMap[39] = nodeInfo{
displayName: "HasDescription",
class: NodeClassReferenceType,
description: "The type for references from data type encoding nodes to data type description nodes.",
}
nodeInfoMap[40] = nodeInfo{
displayName: "HasTypeDefinition",
class: NodeClassReferenceType,
description: "The type for references from a instance node its type defintion node.",
}
nodeInfoMap[41] = nodeInfo{
displayName: "GeneratesEvent",
class: NodeClassReferenceType,
description: "The type for references from a node to an event type that is raised by node.",
}
nodeInfoMap[44] = nodeInfo{
displayName: "Aggregates",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical references that are used to aggregate nodes into complex types.",
}
nodeInfoMap[45] = nodeInfo{
displayName: "HasSubtype",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical references that are used to define sub types.",
}
nodeInfoMap[46] = nodeInfo{
displayName: "HasProperty",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical reference from a node to its property.",
}
nodeInfoMap[47] = nodeInfo{
displayName: "HasComponent",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical reference from a node to its component.",
}
nodeInfoMap[48] = nodeInfo{
displayName: "HasNotifier",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical references that are used to indicate how events propagate from node to node.",
}
nodeInfoMap[49] = nodeInfo{
displayName: "HasOrderedComponent",
class: NodeClassReferenceType,
description: "The type for non-looping hierarchical reference from a node to its component when the order of references matters.",
}
nodeInfoMap[51] = nodeInfo{
displayName: "FromState",
class: NodeClassReferenceType,
description: "The type for a reference to the state before a transition.",
}
nodeInfoMap[52] = nodeInfo{
displayName: "ToState",
class: NodeClassReferenceType,
description: "The type for a reference to the state after a transition.",
}
nodeInfoMap[53] = nodeInfo{
displayName: "HasCause",
class: NodeClassReferenceType,
description: "The type for a reference to a method that can cause a transition to occur.",
}
nodeInfoMap[54] = nodeInfo{
displayName: "HasEffect",
class: NodeClassReferenceType,
description: "The type for a reference to an event that may be raised when a transition occurs.",
}
nodeInfoMap[56] = nodeInfo{
displayName: "HasHistoricalConfiguration",
class: NodeClassReferenceType,
description: "The type for a reference to the historical configuration for a data variable.",
}
nodeInfoMap[58] = nodeInfo{
displayName: "BaseObjectType",
class: NodeClassObjectType,
description: "The base type for all object nodes.",
}
nodeInfoMap[61] = nodeInfo{
displayName: "FolderType",
class: NodeClassObjectType,
description: "The type for objects that organize other nodes.",
}
nodeInfoMap[62] = nodeInfo{
displayName: "BaseVariableType",
class: NodeClassVariableType,
description: "The abstract base type for all variable nodes.",
}
nodeInfoMap[63] = nodeInfo{
displayName: "BaseDataVariableType",
class: NodeClassVariableType,
description: "The type for variable that represents a process value.",
}
nodeInfoMap[68] = nodeInfo{
displayName: "PropertyType",
class: NodeClassVariableType,
description: "The type for variable that represents a property of another node.",
}
nodeInfoMap[69] = nodeInfo{
displayName: "DataTypeDescriptionType",
class: NodeClassVariableType,
description: "The type for variable that represents the description of a data type encoding.",
}
nodeInfoMap[72] = nodeInfo{
displayName: "DataTypeDictionaryType",
class: NodeClassVariableType,
description: "The type for variable that represents the collection of data type decriptions.",
}
nodeInfoMap[75] = nodeInfo{
displayName: "DataTypeSystemType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[76] = nodeInfo{
displayName: "DataTypeEncodingType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[77] = nodeInfo{
displayName: "ModellingRuleType",
class: NodeClassObjectType,
description: "The type for an object that describes how an instance declaration is used when a type is instantiated.",
}
nodeInfoMap[78] = nodeInfo{
displayName: "Mandatory",
class: NodeClassObject,
description: "Specifies that an instance with the attributes and references of the instance declaration must appear when a type is instantiated.",
}
nodeInfoMap[79] = nodeInfo{
displayName: "MandatoryShared",
class: NodeClassObject,
description: "Specifies that a reference to a shared instance must appear in when a type is instantiated.",
}
nodeInfoMap[80] = nodeInfo{
displayName: "Optional",
class: NodeClassObject,
description: "Specifies that an instance with the attributes and references of the instance declaration may appear when a type is instantiated.",
}
nodeInfoMap[83] = nodeInfo{
displayName: "ExposesItsArray",
class: NodeClassObject,
description: "Specifies that an instance appears for each element of the containing array variable.",
}
nodeInfoMap[84] = nodeInfo{
displayName: "Root",
class: NodeClassObject,
description: "The root of the server address space.",
}
nodeInfoMap[85] = nodeInfo{
displayName: "Objects",
class: NodeClassObject,
description: "The browse entry point when looking for objects in the server address space.",
}
nodeInfoMap[86] = nodeInfo{
displayName: "Types",
class: NodeClassObject,
description: "The browse entry point when looking for types in the server address space.",
}
nodeInfoMap[87] = nodeInfo{
displayName: "Views",
class: NodeClassObject,
description: "The browse entry point when looking for views in the server address space.",
}
nodeInfoMap[88] = nodeInfo{
displayName: "ObjectTypes",
class: NodeClassObject,
description: "The browse entry point when looking for object types in the server address space.",
}
nodeInfoMap[89] = nodeInfo{
displayName: "VariableTypes",
class: NodeClassObject,
description: "The browse entry point when looking for variable types in the server address space.",
}
nodeInfoMap[90] = nodeInfo{
displayName: "DataTypes",
class: NodeClassObject,
description: "The browse entry point when looking for data types in the server address space.",
}
nodeInfoMap[91] = nodeInfo{
displayName: "ReferenceTypes",
class: NodeClassObject,
description: "The browse entry point when looking for reference types in the server address space.",
}
nodeInfoMap[92] = nodeInfo{
displayName: "XML Schema",
class: NodeClassObject,
description: "A type system which uses XML schema to describe the encoding of data types.",
}
nodeInfoMap[93] = nodeInfo{
displayName: "OPC Binary",
class: NodeClassObject,
description: "A type system which uses OPC binary schema to describe the encoding of data types.",
}
nodeInfoMap[104] = nodeInfo{
displayName: "DataTypeVersion",
class: NodeClassVariable,
description: "The version number for the data type description.",
}
nodeInfoMap[105] = nodeInfo{
displayName: "DictionaryFragment",
class: NodeClassVariable,
description: "A fragment of a data type dictionary that defines the data type.",
}
nodeInfoMap[106] = nodeInfo{
displayName: "DataTypeVersion",
class: NodeClassVariable,
description: "The version number for the data type dictionary.",
}
nodeInfoMap[107] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "A URI that uniquely identifies the dictionary.",
}
nodeInfoMap[111] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[112] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[113] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[114] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[116] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[117] = nodeInfo{
displayName: "HasSubStateMachine",
class: NodeClassReferenceType,
description: "The type for a reference to a substate for a state.",
}
nodeInfoMap[120] = nodeInfo{
displayName: "NamingRuleType",
class: NodeClassDataType,
description: "Describes a value that specifies the significance of the BrowseName for an instance declaration.",
}
nodeInfoMap[121] = nodeInfo{
displayName: "Decimal128",
class: NodeClassDataType,
description: "Describes a 128-bit decimal value.",
}
nodeInfoMap[256] = nodeInfo{
displayName: "IdType",
class: NodeClassDataType,
description: "The type of identifier used in a node id.",
}
nodeInfoMap[257] = nodeInfo{
displayName: "NodeClass",
class: NodeClassDataType,
description: "A mask specifying the class of the node.",
}
nodeInfoMap[288] = nodeInfo{
displayName: "IntegerId",
class: NodeClassDataType,
description: "A numeric identifier for an object.",
}
nodeInfoMap[289] = nodeInfo{
displayName: "Counter",
class: NodeClassDataType,
description: "A monotonically increasing value.",
}
nodeInfoMap[290] = nodeInfo{
displayName: "Duration",
class: NodeClassDataType,
description: "A period of time measured in milliseconds.",
}
nodeInfoMap[291] = nodeInfo{
displayName: "NumericRange",
class: NodeClassDataType,
description: "Specifies a range of array indexes.",
}
nodeInfoMap[292] = nodeInfo{
displayName: "Time",
class: NodeClassDataType,
description: "A time value specified as HH:MM:SS.SSS.",
}
nodeInfoMap[293] = nodeInfo{
displayName: "Date",
class: NodeClassDataType,
description: "A date value.",
}
nodeInfoMap[294] = nodeInfo{
displayName: "UtcTime",
class: NodeClassDataType,
description: "A date/time value specified in Universal Coordinated Time (UTC).",
}
nodeInfoMap[295] = nodeInfo{
displayName: "LocaleId",
class: NodeClassDataType,
description: "An identifier for a user locale.",
}
nodeInfoMap[296] = nodeInfo{
displayName: "Argument",
class: NodeClassDataType,
description: "An argument for a method.",
}
nodeInfoMap[297] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[298] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[299] = nodeInfo{
displayName: "StatusResult",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[300] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[301] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[302] = nodeInfo{
displayName: "MessageSecurityMode",
class: NodeClassDataType,
description: "The type of security to use on a message.",
}
nodeInfoMap[303] = nodeInfo{
displayName: "UserTokenType",
class: NodeClassDataType,
description: "The possible user token types.",
}
nodeInfoMap[304] = nodeInfo{
displayName: "UserTokenPolicy",
class: NodeClassDataType,
description: "Describes a user token that can be used with a server.",
}
nodeInfoMap[305] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[306] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[307] = nodeInfo{
displayName: "ApplicationType",
class: NodeClassDataType,
description: "The types of applications.",
}
nodeInfoMap[308] = nodeInfo{
displayName: "ApplicationDescription",
class: NodeClassDataType,
description: "Describes an application and how to find it.",
}
nodeInfoMap[309] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[310] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[311] = nodeInfo{
displayName: "ApplicationInstanceCertificate",
class: NodeClassDataType,
description: "A certificate for an instance of an application.",
}
nodeInfoMap[312] = nodeInfo{
displayName: "EndpointDescription",
class: NodeClassDataType,
description: "The description of a endpoint that can be used to access a server.",
}
nodeInfoMap[313] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[314] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[315] = nodeInfo{
displayName: "SecurityTokenRequestType",
class: NodeClassDataType,
description: "Indicates whether a token if being created or renewed.",
}
nodeInfoMap[316] = nodeInfo{
displayName: "UserIdentityToken",
class: NodeClassDataType,
description: "A base type for a user identity token.",
}
nodeInfoMap[317] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[318] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[319] = nodeInfo{
displayName: "AnonymousIdentityToken",
class: NodeClassDataType,
description: "A token representing an anonymous user.",
}
nodeInfoMap[320] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[321] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[322] = nodeInfo{
displayName: "UserNameIdentityToken",
class: NodeClassDataType,
description: "A token representing a user identified by a user name and password.",
}
nodeInfoMap[323] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[324] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[325] = nodeInfo{
displayName: "X509IdentityToken",
class: NodeClassDataType,
description: "A token representing a user identified by an X509 certificate.",
}
nodeInfoMap[326] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[327] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[331] = nodeInfo{
displayName: "EndpointConfiguration",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[332] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[333] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[338] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[339] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[340] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[344] = nodeInfo{
displayName: "SignedSoftwareCertificate",
class: NodeClassDataType,
description: "A software certificate with a digital signature.",
}
nodeInfoMap[345] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[346] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[347] = nodeInfo{
displayName: "AttributeWriteMask",
class: NodeClassDataType,
description: "Define bits used to indicate which attributes are writable.",
}
nodeInfoMap[348] = nodeInfo{
displayName: "NodeAttributesMask",
class: NodeClassDataType,
description: "The bits used to specify default attributes for a new node.",
}
nodeInfoMap[376] = nodeInfo{
displayName: "AddNodesItem",
class: NodeClassDataType,
description: "A request to add a node to the server address space.",
}
nodeInfoMap[377] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[378] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[379] = nodeInfo{
displayName: "AddReferencesItem",
class: NodeClassDataType,
description: "A request to add a reference to the server address space.",
}
nodeInfoMap[380] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[381] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[382] = nodeInfo{
displayName: "DeleteNodesItem",
class: NodeClassDataType,
description: "A request to delete a node to the server address space.",
}
nodeInfoMap[383] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[384] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[385] = nodeInfo{
displayName: "DeleteReferencesItem",
class: NodeClassDataType,
description: "A request to delete a node from the server address space.",
}
nodeInfoMap[386] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[387] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[388] = nodeInfo{
displayName: "SessionAuthenticationToken",
class: NodeClassDataType,
description: "A unique identifier for a session used to authenticate requests.",
}
nodeInfoMap[432] = nodeInfo{
displayName: "RegisteredServer",
class: NodeClassDataType,
description: "The information required to register a server with a discovery server.",
}
nodeInfoMap[433] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[434] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[521] = nodeInfo{
displayName: "ContinuationPoint",
class: NodeClassDataType,
description: "An identifier for a suspended query or browse operation.",
}
nodeInfoMap[537] = nodeInfo{
displayName: "RelativePathElement",
class: NodeClassDataType,
description: "An element in a relative path.",
}
nodeInfoMap[538] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[539] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[540] = nodeInfo{
displayName: "RelativePath",
class: NodeClassDataType,
description: "A relative path constructed from reference types and browse names.",
}
nodeInfoMap[541] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[542] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[576] = nodeInfo{
displayName: "FilterOperator",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[583] = nodeInfo{
displayName: "ContentFilterElement",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[584] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[585] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[586] = nodeInfo{
displayName: "ContentFilter",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[587] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[588] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[589] = nodeInfo{
displayName: "FilterOperand",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[590] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[591] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[592] = nodeInfo{
displayName: "ElementOperand",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[593] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[594] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[595] = nodeInfo{
displayName: "LiteralOperand",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[596] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[597] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[598] = nodeInfo{
displayName: "AttributeOperand",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[599] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[600] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[601] = nodeInfo{
displayName: "SimpleAttributeOperand",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[602] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[603] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[659] = nodeInfo{
displayName: "HistoryEvent",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[660] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[661] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[719] = nodeInfo{
displayName: "MonitoringFilter",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[720] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[721] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[725] = nodeInfo{
displayName: "EventFilter",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[726] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[727] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[851] = nodeInfo{
displayName: "RedundancySupport",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[852] = nodeInfo{
displayName: "ServerState",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[853] = nodeInfo{
displayName: "RedundantServerDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[854] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[855] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[856] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[857] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[858] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[859] = nodeInfo{
displayName: "ServerDiagnosticsSummaryDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[860] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[861] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[862] = nodeInfo{
displayName: "ServerStatusDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[863] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[864] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[865] = nodeInfo{
displayName: "SessionDiagnosticsDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[866] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[867] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[868] = nodeInfo{
displayName: "SessionSecurityDiagnosticsDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[869] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[870] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[871] = nodeInfo{
displayName: "ServiceCounterDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[872] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[873] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[874] = nodeInfo{
displayName: "SubscriptionDiagnosticsDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[875] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[876] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[877] = nodeInfo{
displayName: "ModelChangeStructureDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[878] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[879] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[884] = nodeInfo{
displayName: "Range",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[885] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[886] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[887] = nodeInfo{
displayName: "EUInformation",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[888] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[889] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[890] = nodeInfo{
displayName: "ExceptionDeviationFormat",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[891] = nodeInfo{
displayName: "Annotation",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[892] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[893] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[894] = nodeInfo{
displayName: "ProgramDiagnosticDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[895] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[896] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[897] = nodeInfo{
displayName: "SemanticChangeStructureDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[898] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[899] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[920] = nodeInfo{
displayName: "HistoryEventFieldList",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[921] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[922] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[938] = nodeInfo{
displayName: "IssuedIdentityToken",
class: NodeClassDataType,
description: "A token representing a user identified by a WS-Security XML token.",
}
nodeInfoMap[939] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[940] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[948] = nodeInfo{
displayName: "AggregateConfiguration",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[949] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[950] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2000] = nodeInfo{
displayName: "ImageBMP",
class: NodeClassDataType,
description: "An image encoded in BMP format.",
}
nodeInfoMap[2001] = nodeInfo{
displayName: "ImageGIF",
class: NodeClassDataType,
description: "An image encoded in GIF format.",
}
nodeInfoMap[2002] = nodeInfo{
displayName: "ImageJPG",
class: NodeClassDataType,
description: "An image encoded in JPEG format.",
}
nodeInfoMap[2003] = nodeInfo{
displayName: "ImagePNG",
class: NodeClassDataType,
description: "An image encoded in PNG format.",
}
nodeInfoMap[2004] = nodeInfo{
displayName: "ServerType",
class: NodeClassObjectType,
description: "Specifies the current status and capabilities of the server.",
}
nodeInfoMap[2005] = nodeInfo{
displayName: "ServerArray",
class: NodeClassVariable,
description: "The list of server URIs used by the server.",
}
nodeInfoMap[2006] = nodeInfo{
displayName: "NamespaceArray",
class: NodeClassVariable,
description: "The list of namespace URIs used by the server.",
}
nodeInfoMap[2007] = nodeInfo{
displayName: "ServerStatus",
class: NodeClassVariable,
description: "The current status of the server.",
}
nodeInfoMap[2008] = nodeInfo{
displayName: "ServiceLevel",
class: NodeClassVariable,
description: "A value indicating the level of service the server can provide. 255 indicates the best.",
}
nodeInfoMap[2009] = nodeInfo{
displayName: "ServerCapabilities",
class: NodeClassObject,
description: "Describes capabilities supported by the server.",
}
nodeInfoMap[2010] = nodeInfo{
displayName: "ServerDiagnostics",
class: NodeClassObject,
description: "Reports diagnostics about the server.",
}
nodeInfoMap[2011] = nodeInfo{
displayName: "VendorServerInfo",
class: NodeClassObject,
description: "Server information provided by the vendor.",
}
nodeInfoMap[2012] = nodeInfo{
displayName: "ServerRedundancy",
class: NodeClassObject,
description: "Describes the redundancy capabilities of the server.",
}
nodeInfoMap[2013] = nodeInfo{
displayName: "ServerCapabilitiesType",
class: NodeClassObjectType,
description: "Describes the capabilities supported by the server.",
}
nodeInfoMap[2014] = nodeInfo{
displayName: "ServerProfileArray",
class: NodeClassVariable,
description: "A list of profiles supported by the server.",
}
nodeInfoMap[2016] = nodeInfo{
displayName: "LocaleIdArray",
class: NodeClassVariable,
description: "A list of locales supported by the server.",
}
nodeInfoMap[2017] = nodeInfo{
displayName: "MinSupportedSampleRate",
class: NodeClassVariable,
description: "The minimum sampling interval supported by the server.",
}
nodeInfoMap[2019] = nodeInfo{
displayName: "ModellingRules",
class: NodeClassObject,
description: "A folder for the modelling rules supported by the server.",
}
nodeInfoMap[2020] = nodeInfo{
displayName: "ServerDiagnosticsType",
class: NodeClassObjectType,
description: "The diagnostics information for a server.",
}
nodeInfoMap[2021] = nodeInfo{
displayName: "ServerDiagnosticsSummary",
class: NodeClassVariable,
description: "A summary of server level diagnostics.",
}
nodeInfoMap[2022] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each sampling interval supported by the server.",
}
nodeInfoMap[2023] = nodeInfo{
displayName: "SubscriptionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active subscription.",
}
nodeInfoMap[2025] = nodeInfo{
displayName: "EnabledFlag",
class: NodeClassVariable,
description: "If TRUE the diagnostics collection is enabled.",
}
nodeInfoMap[2026] = nodeInfo{
displayName: "SessionsDiagnosticsSummaryType",
class: NodeClassObjectType,
description: "Provides a summary of session level diagnostics.",
}
nodeInfoMap[2027] = nodeInfo{
displayName: "SessionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active session.",
}
nodeInfoMap[2028] = nodeInfo{
displayName: "SessionSecurityDiagnosticsArray",
class: NodeClassVariable,
description: "A list of security related diagnostics for each active session.",
}
nodeInfoMap[2029] = nodeInfo{
displayName: "SessionDiagnosticsObjectType",
class: NodeClassObjectType,
description: "A container for session level diagnostics information.",
}
nodeInfoMap[2030] = nodeInfo{
displayName: "SessionDiagnostics",
class: NodeClassVariable,
description: "Diagnostics information for an active session.",
}
nodeInfoMap[2031] = nodeInfo{
displayName: "SessionSecurityDiagnostics",
class: NodeClassVariable,
description: "Security related diagnostics information for an active session.",
}
nodeInfoMap[2032] = nodeInfo{
displayName: "SubscriptionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each subscription owned by the session.",
}
nodeInfoMap[2033] = nodeInfo{
displayName: "VendorServerInfoType",
class: NodeClassObjectType,
description: "A base type for vendor specific server information.",
}
nodeInfoMap[2034] = nodeInfo{
displayName: "ServerRedundancyType",
class: NodeClassObjectType,
description: "A base type for an object that describe how a server supports redundancy.",
}
nodeInfoMap[2035] = nodeInfo{
displayName: "RedundancySupport",
class: NodeClassVariable,
description: "Indicates what style of redundancy is supported by the server.",
}
nodeInfoMap[2036] = nodeInfo{
displayName: "TransparentRedundancyType",
class: NodeClassObjectType,
description: "Identifies the capabilties of server that supports transparent redundancy.",
}
nodeInfoMap[2037] = nodeInfo{
displayName: "CurrentServerId",
class: NodeClassVariable,
description: "The ID of the server that is currently in use.",
}
nodeInfoMap[2038] = nodeInfo{
displayName: "RedundantServerArray",
class: NodeClassVariable,
description: "A list of servers in the same redundant set.",
}
nodeInfoMap[2039] = nodeInfo{
displayName: "NonTransparentRedundancyType",
class: NodeClassObjectType,
description: "Identifies the capabilties of server that supports non-transparent redundancy.",
}
nodeInfoMap[2040] = nodeInfo{
displayName: "ServerUriArray",
class: NodeClassVariable,
description: "A list of servers in the same redundant set.",
}
nodeInfoMap[2041] = nodeInfo{
displayName: "BaseEventType",
class: NodeClassObjectType,
description: "The base type for all events.",
}
nodeInfoMap[2042] = nodeInfo{
displayName: "EventId",
class: NodeClassVariable,
description: "A globally unique identifier for the event.",
}
nodeInfoMap[2043] = nodeInfo{
displayName: "EventType",
class: NodeClassVariable,
description: "The identifier for the event type.",
}
nodeInfoMap[2044] = nodeInfo{
displayName: "SourceNode",
class: NodeClassVariable,
description: "The source of the event.",
}
nodeInfoMap[2045] = nodeInfo{
displayName: "SourceName",
class: NodeClassVariable,
description: "A description of the source of the event.",
}
nodeInfoMap[2046] = nodeInfo{
displayName: "Time",
class: NodeClassVariable,
description: "When the event occurred.",
}
nodeInfoMap[2047] = nodeInfo{
displayName: "ReceiveTime",
class: NodeClassVariable,
description: "When the server received the event from the underlying system.",
}
nodeInfoMap[2050] = nodeInfo{
displayName: "Message",
class: NodeClassVariable,
description: "A localized description of the event.",
}
nodeInfoMap[2051] = nodeInfo{
displayName: "Severity",
class: NodeClassVariable,
description: "Indicates how urgent an event is.",
}
nodeInfoMap[2052] = nodeInfo{
displayName: "AuditEventType",
class: NodeClassObjectType,
description: "A base type for events used to track client initiated changes to the server state.",
}
nodeInfoMap[2053] = nodeInfo{
displayName: "ActionTimeStamp",
class: NodeClassVariable,
description: "When the action triggering the event occurred.",
}
nodeInfoMap[2054] = nodeInfo{
displayName: "Status",
class: NodeClassVariable,
description: "If TRUE the action was performed. If FALSE the action failed and the server state did not change.",
}
nodeInfoMap[2055] = nodeInfo{
displayName: "ServerId",
class: NodeClassVariable,
description: "The unique identifier for the server generating the event.",
}
nodeInfoMap[2056] = nodeInfo{
displayName: "ClientAuditEntryId",
class: NodeClassVariable,
description: "The log entry id provided in the request that initiated the action.",
}
nodeInfoMap[2057] = nodeInfo{
displayName: "ClientUserId",
class: NodeClassVariable,
description: "The user identity associated with the session that initiated the action.",
}
nodeInfoMap[2058] = nodeInfo{
displayName: "AuditSecurityEventType",
class: NodeClassObjectType,
description: "A base type for events used to track security related changes.",
}
nodeInfoMap[2059] = nodeInfo{
displayName: "AuditChannelEventType",
class: NodeClassObjectType,
description: "A base type for events used to track related changes to a secure channel.",
}
nodeInfoMap[2060] = nodeInfo{
displayName: "AuditOpenSecureChannelEventType",
class: NodeClassObjectType,
description: "An event that is raised when a secure channel is opened.",
}
nodeInfoMap[2061] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "The certificate provided by the client.",
}
nodeInfoMap[2062] = nodeInfo{
displayName: "RequestType",
class: NodeClassVariable,
description: "The type of request (NEW or RENEW).",
}
nodeInfoMap[2063] = nodeInfo{
displayName: "SecurityPolicyUri",
class: NodeClassVariable,
description: "The security policy used by the channel.",
}
nodeInfoMap[2065] = nodeInfo{
displayName: "SecurityMode",
class: NodeClassVariable,
description: "The security mode used by the channel.",
}
nodeInfoMap[2066] = nodeInfo{
displayName: "RequestedLifetime",
class: NodeClassVariable,
description: "The lifetime of the channel requested by the client.",
}
nodeInfoMap[2069] = nodeInfo{
displayName: "AuditSessionEventType",
class: NodeClassObjectType,
description: "A base type for events used to track related changes to a session.",
}
nodeInfoMap[2070] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "The unique identifier for the session,.",
}
nodeInfoMap[2071] = nodeInfo{
displayName: "AuditCreateSessionEventType",
class: NodeClassObjectType,
description: "An event that is raised when a session is created.",
}
nodeInfoMap[2072] = nodeInfo{
displayName: "SecureChannelId",
class: NodeClassVariable,
description: "The secure channel associated with the session.",
}
nodeInfoMap[2073] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "The certificate provided by the client.",
}
nodeInfoMap[2074] = nodeInfo{
displayName: "RevisedSessionTimeout",
class: NodeClassVariable,
description: "The timeout for the session.",
}
nodeInfoMap[2075] = nodeInfo{
displayName: "AuditActivateSessionEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2076] = nodeInfo{
displayName: "ClientSoftwareCertificates",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2077] = nodeInfo{
displayName: "UserIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2078] = nodeInfo{
displayName: "AuditCancelEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2079] = nodeInfo{
displayName: "RequestHandle",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2080] = nodeInfo{
displayName: "AuditCertificateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2081] = nodeInfo{
displayName: "Certificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2082] = nodeInfo{
displayName: "AuditCertificateDataMismatchEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2083] = nodeInfo{
displayName: "InvalidHostname",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2084] = nodeInfo{
displayName: "InvalidUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2085] = nodeInfo{
displayName: "AuditCertificateExpiredEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2086] = nodeInfo{
displayName: "AuditCertificateInvalidEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2087] = nodeInfo{
displayName: "AuditCertificateUntrustedEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2088] = nodeInfo{
displayName: "AuditCertificateRevokedEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2089] = nodeInfo{
displayName: "AuditCertificateMismatchEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2090] = nodeInfo{
displayName: "AuditNodeManagementEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2091] = nodeInfo{
displayName: "AuditAddNodesEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2092] = nodeInfo{
displayName: "NodesToAdd",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2093] = nodeInfo{
displayName: "AuditDeleteNodesEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2094] = nodeInfo{
displayName: "NodesToDelete",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2095] = nodeInfo{
displayName: "AuditAddReferencesEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2096] = nodeInfo{
displayName: "ReferencesToAdd",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2097] = nodeInfo{
displayName: "AuditDeleteReferencesEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2098] = nodeInfo{
displayName: "ReferencesToDelete",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2099] = nodeInfo{
displayName: "AuditUpdateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2100] = nodeInfo{
displayName: "AuditWriteUpdateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2101] = nodeInfo{
displayName: "IndexRange",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2102] = nodeInfo{
displayName: "OldValue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2103] = nodeInfo{
displayName: "NewValue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2104] = nodeInfo{
displayName: "AuditHistoryUpdateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2127] = nodeInfo{
displayName: "AuditUpdateMethodEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2128] = nodeInfo{
displayName: "MethodId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2129] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2130] = nodeInfo{
displayName: "SystemEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2131] = nodeInfo{
displayName: "DeviceFailureEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2132] = nodeInfo{
displayName: "BaseModelChangeEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2133] = nodeInfo{
displayName: "GeneralModelChangeEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2134] = nodeInfo{
displayName: "Changes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2137] = nodeInfo{
displayName: "ServerVendorCapabilityType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2138] = nodeInfo{
displayName: "ServerStatusType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2139] = nodeInfo{
displayName: "StartTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2140] = nodeInfo{
displayName: "CurrentTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2141] = nodeInfo{
displayName: "State",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2142] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2150] = nodeInfo{
displayName: "ServerDiagnosticsSummaryType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2151] = nodeInfo{
displayName: "ServerViewCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2152] = nodeInfo{
displayName: "CurrentSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2153] = nodeInfo{
displayName: "CumulatedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2154] = nodeInfo{
displayName: "SecurityRejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2155] = nodeInfo{
displayName: "RejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2156] = nodeInfo{
displayName: "SessionTimeoutCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2157] = nodeInfo{
displayName: "SessionAbortCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2159] = nodeInfo{
displayName: "PublishingIntervalCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2160] = nodeInfo{
displayName: "CurrentSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2161] = nodeInfo{
displayName: "CumulatedSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2162] = nodeInfo{
displayName: "SecurityRejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2163] = nodeInfo{
displayName: "RejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2164] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsArrayType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2165] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2166] = nodeInfo{
displayName: "SamplingInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2171] = nodeInfo{
displayName: "SubscriptionDiagnosticsArrayType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2172] = nodeInfo{
displayName: "SubscriptionDiagnosticsType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2173] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2174] = nodeInfo{
displayName: "SubscriptionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2175] = nodeInfo{
displayName: "Priority",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2176] = nodeInfo{
displayName: "PublishingInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2177] = nodeInfo{
displayName: "MaxKeepAliveCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2179] = nodeInfo{
displayName: "MaxNotificationsPerPublish",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2180] = nodeInfo{
displayName: "PublishingEnabled",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2181] = nodeInfo{
displayName: "ModifyCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2182] = nodeInfo{
displayName: "EnableCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2183] = nodeInfo{
displayName: "DisableCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2184] = nodeInfo{
displayName: "RepublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2185] = nodeInfo{
displayName: "RepublishMessageRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2186] = nodeInfo{
displayName: "RepublishMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2187] = nodeInfo{
displayName: "TransferRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2188] = nodeInfo{
displayName: "TransferredToAltClientCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2189] = nodeInfo{
displayName: "TransferredToSameClientCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2190] = nodeInfo{
displayName: "PublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2191] = nodeInfo{
displayName: "DataChangeNotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2193] = nodeInfo{
displayName: "NotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2196] = nodeInfo{
displayName: "SessionDiagnosticsArrayType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2197] = nodeInfo{
displayName: "SessionDiagnosticsVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2198] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2199] = nodeInfo{
displayName: "SessionName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2200] = nodeInfo{
displayName: "ClientDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2201] = nodeInfo{
displayName: "ServerUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2202] = nodeInfo{
displayName: "EndpointUrl",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2203] = nodeInfo{
displayName: "LocaleIds",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2204] = nodeInfo{
displayName: "ActualSessionTimeout",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2205] = nodeInfo{
displayName: "ClientConnectionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2206] = nodeInfo{
displayName: "ClientLastContactTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2207] = nodeInfo{
displayName: "CurrentSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2208] = nodeInfo{
displayName: "CurrentMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2209] = nodeInfo{
displayName: "CurrentPublishRequestsInQueue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2217] = nodeInfo{
displayName: "ReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2218] = nodeInfo{
displayName: "HistoryReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2219] = nodeInfo{
displayName: "WriteCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2220] = nodeInfo{
displayName: "HistoryUpdateCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2221] = nodeInfo{
displayName: "CallCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2222] = nodeInfo{
displayName: "CreateMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2223] = nodeInfo{
displayName: "ModifyMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2224] = nodeInfo{
displayName: "SetMonitoringModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2225] = nodeInfo{
displayName: "SetTriggeringCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2226] = nodeInfo{
displayName: "DeleteMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2227] = nodeInfo{
displayName: "CreateSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2228] = nodeInfo{
displayName: "ModifySubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2229] = nodeInfo{
displayName: "SetPublishingModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2230] = nodeInfo{
displayName: "PublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2231] = nodeInfo{
displayName: "RepublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2232] = nodeInfo{
displayName: "TransferSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2233] = nodeInfo{
displayName: "DeleteSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2234] = nodeInfo{
displayName: "AddNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2235] = nodeInfo{
displayName: "AddReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2236] = nodeInfo{
displayName: "DeleteNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2237] = nodeInfo{
displayName: "DeleteReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2238] = nodeInfo{
displayName: "BrowseCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2239] = nodeInfo{
displayName: "BrowseNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2240] = nodeInfo{
displayName: "TranslateBrowsePathsToNodeIdsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2241] = nodeInfo{
displayName: "QueryFirstCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2242] = nodeInfo{
displayName: "QueryNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2243] = nodeInfo{
displayName: "SessionSecurityDiagnosticsArrayType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2244] = nodeInfo{
displayName: "SessionSecurityDiagnosticsType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2245] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2246] = nodeInfo{
displayName: "ClientUserIdOfSession",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2247] = nodeInfo{
displayName: "ClientUserIdHistory",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2248] = nodeInfo{
displayName: "AuthenticationMechanism",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2249] = nodeInfo{
displayName: "Encoding",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2250] = nodeInfo{
displayName: "TransportProtocol",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2251] = nodeInfo{
displayName: "SecurityMode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2252] = nodeInfo{
displayName: "SecurityPolicyUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2253] = nodeInfo{
displayName: "Server",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2254] = nodeInfo{
displayName: "ServerArray",
class: NodeClassVariable,
description: "The list of server URIs used by the server.",
}
nodeInfoMap[2255] = nodeInfo{
displayName: "NamespaceArray",
class: NodeClassVariable,
description: "The list of namespace URIs used by the server.",
}
nodeInfoMap[2256] = nodeInfo{
displayName: "ServerStatus",
class: NodeClassVariable,
description: "The current status of the server.",
}
nodeInfoMap[2257] = nodeInfo{
displayName: "StartTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2258] = nodeInfo{
displayName: "CurrentTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2259] = nodeInfo{
displayName: "State",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2260] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2261] = nodeInfo{
displayName: "ProductName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2262] = nodeInfo{
displayName: "ProductUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2263] = nodeInfo{
displayName: "ManufacturerName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2264] = nodeInfo{
displayName: "SoftwareVersion",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2265] = nodeInfo{
displayName: "BuildNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2266] = nodeInfo{
displayName: "BuildDate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2267] = nodeInfo{
displayName: "ServiceLevel",
class: NodeClassVariable,
description: "A value indicating the level of service the server can provide. 255 indicates the best.",
}
nodeInfoMap[2268] = nodeInfo{
displayName: "ServerCapabilities",
class: NodeClassObject,
description: "Describes capabilities supported by the server.",
}
nodeInfoMap[2269] = nodeInfo{
displayName: "ServerProfileArray",
class: NodeClassVariable,
description: "A list of profiles supported by the server.",
}
nodeInfoMap[2271] = nodeInfo{
displayName: "LocaleIdArray",
class: NodeClassVariable,
description: "A list of locales supported by the server.",
}
nodeInfoMap[2272] = nodeInfo{
displayName: "MinSupportedSampleRate",
class: NodeClassVariable,
description: "The minimum sampling interval supported by the server.",
}
nodeInfoMap[2274] = nodeInfo{
displayName: "ServerDiagnostics",
class: NodeClassObject,
description: "Reports diagnostics about the server.",
}
nodeInfoMap[2275] = nodeInfo{
displayName: "ServerDiagnosticsSummary",
class: NodeClassVariable,
description: "A summary of server level diagnostics.",
}
nodeInfoMap[2276] = nodeInfo{
displayName: "ServerViewCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2277] = nodeInfo{
displayName: "CurrentSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2278] = nodeInfo{
displayName: "CumulatedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2279] = nodeInfo{
displayName: "SecurityRejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2281] = nodeInfo{
displayName: "SessionTimeoutCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2282] = nodeInfo{
displayName: "SessionAbortCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2284] = nodeInfo{
displayName: "PublishingIntervalCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2285] = nodeInfo{
displayName: "CurrentSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2286] = nodeInfo{
displayName: "CumulatedSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2287] = nodeInfo{
displayName: "SecurityRejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2288] = nodeInfo{
displayName: "RejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2289] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each sampling interval supported by the server.",
}
nodeInfoMap[2290] = nodeInfo{
displayName: "SubscriptionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active subscription.",
}
nodeInfoMap[2294] = nodeInfo{
displayName: "EnabledFlag",
class: NodeClassVariable,
description: "If TRUE the diagnostics collection is enabled.",
}
nodeInfoMap[2295] = nodeInfo{
displayName: "VendorServerInfo",
class: NodeClassObject,
description: "Server information provided by the vendor.",
}
nodeInfoMap[2296] = nodeInfo{
displayName: "ServerRedundancy",
class: NodeClassObject,
description: "Describes the redundancy capabilities of the server.",
}
nodeInfoMap[2299] = nodeInfo{
displayName: "StateMachineType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2307] = nodeInfo{
displayName: "StateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2308] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2309] = nodeInfo{
displayName: "InitialStateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2310] = nodeInfo{
displayName: "TransitionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2311] = nodeInfo{
displayName: "TransitionEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2312] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2315] = nodeInfo{
displayName: "AuditUpdateStateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2318] = nodeInfo{
displayName: "HistoricalDataConfigurationType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2323] = nodeInfo{
displayName: "Stepped",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2324] = nodeInfo{
displayName: "Definition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2325] = nodeInfo{
displayName: "MaxTimeInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2326] = nodeInfo{
displayName: "MinTimeInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2327] = nodeInfo{
displayName: "ExceptionDeviation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2328] = nodeInfo{
displayName: "ExceptionDeviationFormat",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2330] = nodeInfo{
displayName: "HistoryServerCapabilitiesType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2331] = nodeInfo{
displayName: "AccessHistoryDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2332] = nodeInfo{
displayName: "AccessHistoryEventsCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2334] = nodeInfo{
displayName: "InsertDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2335] = nodeInfo{
displayName: "ReplaceDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2336] = nodeInfo{
displayName: "UpdateDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2337] = nodeInfo{
displayName: "DeleteRawCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2338] = nodeInfo{
displayName: "DeleteAtTimeCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2340] = nodeInfo{
displayName: "AggregateFunctionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2341] = nodeInfo{
displayName: "Interpolative",
class: NodeClassObject,
description: "At the beginning of each interval, retrieve the calculated value from the data points on either side of the requested timestamp.",
}
nodeInfoMap[2342] = nodeInfo{
displayName: "Average",
class: NodeClassObject,
description: "Retrieve the average value of the data over the interval.",
}
nodeInfoMap[2343] = nodeInfo{
displayName: "TimeAverage",
class: NodeClassObject,
description: "Retrieve the time weighted average data over the interval using Interpolated Bounding Values.",
}
nodeInfoMap[2344] = nodeInfo{
displayName: "Total",
class: NodeClassObject,
description: "Retrieve the total (time integral) of the data over the interval using Interpolated Bounding Values.",
}
nodeInfoMap[2346] = nodeInfo{
displayName: "Minimum",
class: NodeClassObject,
description: "Retrieve the minimum raw value in the interval with the timestamp of the start of the interval.",
}
nodeInfoMap[2347] = nodeInfo{
displayName: "Maximum",
class: NodeClassObject,
description: "Retrieve the maximum raw value in the interval with the timestamp of the start of the interval.",
}
nodeInfoMap[2348] = nodeInfo{
displayName: "MinimumActualTime",
class: NodeClassObject,
description: "Retrieve the minimum value in the interval and the Timestamp of the minimum value.",
}
nodeInfoMap[2349] = nodeInfo{
displayName: "MaximumActualTime",
class: NodeClassObject,
description: "Retrieve the maximum value in the interval and the Timestamp of the maximum value.",
}
nodeInfoMap[2350] = nodeInfo{
displayName: "Range",
class: NodeClassObject,
description: "Retrieve the difference between the minimum and maximum Value over the interval.",
}
nodeInfoMap[2351] = nodeInfo{
displayName: "AnnotationCount",
class: NodeClassObject,
description: "Retrieve the number of Annotations in the interval.",
}
nodeInfoMap[2352] = nodeInfo{
displayName: "Count",
class: NodeClassObject,
description: "Retrieve the number of raw values over the interval.",
}
nodeInfoMap[2355] = nodeInfo{
displayName: "NumberOfTransitions",
class: NodeClassObject,
description: "Retrieve the number of changes between zero and non-zero that a Boolean or Numeric value experienced in the interval.",
}
nodeInfoMap[2357] = nodeInfo{
displayName: "Start",
class: NodeClassObject,
description: "Retrieve the value at the beginning of the interval using Interpolated Bounding Values.",
}
nodeInfoMap[2358] = nodeInfo{
displayName: "End",
class: NodeClassObject,
description: "Retrieve the value at the end of the interval using Interpolated Bounding Values.",
}
nodeInfoMap[2359] = nodeInfo{
displayName: "Delta",
class: NodeClassObject,
description: "Retrieve the difference between the Start and End value in the interval.",
}
nodeInfoMap[2360] = nodeInfo{
displayName: "DurationGood",
class: NodeClassObject,
description: "Retrieve the total duration of time in the interval during which the data is good.",
}
nodeInfoMap[2361] = nodeInfo{
displayName: "DurationBad",
class: NodeClassObject,
description: "Retrieve the total duration of time in the interval during which the data is bad.",
}
nodeInfoMap[2362] = nodeInfo{
displayName: "PercentGood",
class: NodeClassObject,
description: "Retrieve the percent of data (0 to 100) in the interval which has a good StatusCode.",
}
nodeInfoMap[2363] = nodeInfo{
displayName: "PercentBad",
class: NodeClassObject,
description: "Retrieve the percent of data (0 to 100) in the interval which has a bad StatusCode.",
}
nodeInfoMap[2364] = nodeInfo{
displayName: "WorstQuality",
class: NodeClassObject,
description: "Retrieve the worst StatusCode of data in the interval.",
}
nodeInfoMap[2365] = nodeInfo{
displayName: "DataItemType",
class: NodeClassVariableType,
description: "A variable that contains live automation data.",
}
nodeInfoMap[2366] = nodeInfo{
displayName: "Definition",
class: NodeClassVariable,
description: "A vendor-specific, human readable string that specifies how the value of this DataItem is calculated.",
}
nodeInfoMap[2367] = nodeInfo{
displayName: "ValuePrecision",
class: NodeClassVariable,
description: "The maximum precision that the server can maintain for the item based on restrictions in the target environment.",
}
nodeInfoMap[2368] = nodeInfo{
displayName: "AnalogItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2369] = nodeInfo{
displayName: "EURange",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2370] = nodeInfo{
displayName: "InstrumentRange",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2371] = nodeInfo{
displayName: "EngineeringUnits",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2372] = nodeInfo{
displayName: "DiscreteItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2373] = nodeInfo{
displayName: "TwoStateDiscreteType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2374] = nodeInfo{
displayName: "FalseState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2375] = nodeInfo{
displayName: "TrueState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2376] = nodeInfo{
displayName: "MultiStateDiscreteType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2377] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2378] = nodeInfo{
displayName: "ProgramTransitionEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2379] = nodeInfo{
displayName: "IntermediateResult",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2380] = nodeInfo{
displayName: "ProgramDiagnosticType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2381] = nodeInfo{
displayName: "CreateSessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2382] = nodeInfo{
displayName: "CreateClientName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2383] = nodeInfo{
displayName: "InvocationCreationTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2384] = nodeInfo{
displayName: "LastTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2385] = nodeInfo{
displayName: "LastMethodCall",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2386] = nodeInfo{
displayName: "LastMethodSessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2387] = nodeInfo{
displayName: "LastMethodInputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2388] = nodeInfo{
displayName: "LastMethodOutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2389] = nodeInfo{
displayName: "LastMethodCallTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2390] = nodeInfo{
displayName: "LastMethodReturnStatus",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2391] = nodeInfo{
displayName: "ProgramStateMachineType",
class: NodeClassObjectType,
description: "A state machine for a program.",
}
nodeInfoMap[2392] = nodeInfo{
displayName: "Creatable",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2393] = nodeInfo{
displayName: "Deletable",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2394] = nodeInfo{
displayName: "AutoDelete",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2395] = nodeInfo{
displayName: "RecycleCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2396] = nodeInfo{
displayName: "InstanceCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2397] = nodeInfo{
displayName: "MaxInstanceCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2398] = nodeInfo{
displayName: "MaxRecycleCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2399] = nodeInfo{
displayName: "ProgramDiagnostics",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2400] = nodeInfo{
displayName: "Ready",
class: NodeClassObject,
description: "The Program is properly initialized and may be started.",
}
nodeInfoMap[2401] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2402] = nodeInfo{
displayName: "Running",
class: NodeClassObject,
description: "The Program is executing making progress towards completion.",
}
nodeInfoMap[2403] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2404] = nodeInfo{
displayName: "Suspended",
class: NodeClassObject,
description: "The Program has been stopped prior to reaching a terminal state but may be resumed.",
}
nodeInfoMap[2405] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2406] = nodeInfo{
displayName: "Halted",
class: NodeClassObject,
description: "The Program is in a terminal or failed state, and it cannot be started or resumed without being reset.",
}
nodeInfoMap[2407] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2408] = nodeInfo{
displayName: "HaltedToReady",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2409] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2410] = nodeInfo{
displayName: "ReadyToRunning",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2411] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2412] = nodeInfo{
displayName: "RunningToHalted",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2413] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2414] = nodeInfo{
displayName: "RunningToReady",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2415] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2416] = nodeInfo{
displayName: "RunningToSuspended",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2417] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2418] = nodeInfo{
displayName: "SuspendedToRunning",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2419] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2420] = nodeInfo{
displayName: "SuspendedToHalted",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2421] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2422] = nodeInfo{
displayName: "SuspendedToReady",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2423] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2424] = nodeInfo{
displayName: "ReadyToHalted",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2425] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2426] = nodeInfo{
displayName: "Start",
class: NodeClassMethod,
description: "Causes the Program to transition from the Ready state to the Running state.",
}
nodeInfoMap[2427] = nodeInfo{
displayName: "Suspend",
class: NodeClassMethod,
description: "Causes the Program to transition from the Running state to the Suspended state.",
}
nodeInfoMap[2428] = nodeInfo{
displayName: "Resume",
class: NodeClassMethod,
description: "Causes the Program to transition from the Suspended state to the Running state.",
}
nodeInfoMap[2429] = nodeInfo{
displayName: "Halt",
class: NodeClassMethod,
description: "Causes the Program to transition from the Ready, Running or Suspended state to the Halted state.",
}
nodeInfoMap[2430] = nodeInfo{
displayName: "Reset",
class: NodeClassMethod,
description: "Causes the Program to transition from the Halted state to the Ready state.",
}
nodeInfoMap[2730] = nodeInfo{
displayName: "RegisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2731] = nodeInfo{
displayName: "UnregisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2732] = nodeInfo{
displayName: "MaxBrowseContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Browse operations per session.",
}
nodeInfoMap[2733] = nodeInfo{
displayName: "MaxQueryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Query operations per session.",
}
nodeInfoMap[2734] = nodeInfo{
displayName: "MaxHistoryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for ReadHistory operations per session.",
}
nodeInfoMap[2735] = nodeInfo{
displayName: "MaxBrowseContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Browse operations per session.",
}
nodeInfoMap[2736] = nodeInfo{
displayName: "MaxQueryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Query operations per session.",
}
nodeInfoMap[2737] = nodeInfo{
displayName: "MaxHistoryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for ReadHistory operations per session.",
}
nodeInfoMap[2738] = nodeInfo{
displayName: "SemanticChangeEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2739] = nodeInfo{
displayName: "Changes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2742] = nodeInfo{
displayName: "Auditing",
class: NodeClassVariable,
description: "A flag indicating whether the server is currently generating audit events.",
}
nodeInfoMap[2744] = nodeInfo{
displayName: "SessionsDiagnosticsSummary",
class: NodeClassObject,
description: "A summary of session level diagnostics.",
}
nodeInfoMap[2745] = nodeInfo{
displayName: "SecureChannelId",
class: NodeClassVariable,
description: "The identifier for the secure channel that was changed.",
}
nodeInfoMap[2746] = nodeInfo{
displayName: "ClientCertificateThumbprint",
class: NodeClassVariable,
description: "The thumbprint for certificate provided by the client.",
}
nodeInfoMap[2747] = nodeInfo{
displayName: "ClientCertificateThumbprint",
class: NodeClassVariable,
description: "The thumbprint of the certificate provided by the client.",
}
nodeInfoMap[2748] = nodeInfo{
displayName: "AuditUrlMismatchEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2749] = nodeInfo{
displayName: "EndpointUrl",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2750] = nodeInfo{
displayName: "AttributeId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2751] = nodeInfo{
displayName: "ParameterDataTypeId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2752] = nodeInfo{
displayName: "SecondsTillShutdown",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2753] = nodeInfo{
displayName: "ShutdownReason",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2754] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "A folder for the real time aggregates supported by the server.",
}
nodeInfoMap[2755] = nodeInfo{
displayName: "StateVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2756] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2757] = nodeInfo{
displayName: "Name",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2758] = nodeInfo{
displayName: "Number",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2759] = nodeInfo{
displayName: "EffectiveDisplayName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2760] = nodeInfo{
displayName: "FiniteStateVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2761] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2762] = nodeInfo{
displayName: "TransitionVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2763] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2764] = nodeInfo{
displayName: "Name",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2765] = nodeInfo{
displayName: "Number",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2766] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2767] = nodeInfo{
displayName: "FiniteTransitionVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[2768] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2769] = nodeInfo{
displayName: "CurrentState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2770] = nodeInfo{
displayName: "LastTransition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2771] = nodeInfo{
displayName: "FiniteStateMachineType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2772] = nodeInfo{
displayName: "CurrentState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2773] = nodeInfo{
displayName: "LastTransition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2774] = nodeInfo{
displayName: "Transition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2775] = nodeInfo{
displayName: "FromState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2776] = nodeInfo{
displayName: "ToState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2777] = nodeInfo{
displayName: "OldStateId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2778] = nodeInfo{
displayName: "NewStateId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2782] = nodeInfo{
displayName: "ConditionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2787] = nodeInfo{
displayName: "RefreshStartEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2788] = nodeInfo{
displayName: "RefreshEndEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2789] = nodeInfo{
displayName: "RefreshRequiredEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2790] = nodeInfo{
displayName: "AuditConditionEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2803] = nodeInfo{
displayName: "AuditConditionEnableEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2829] = nodeInfo{
displayName: "AuditConditionCommentEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2830] = nodeInfo{
displayName: "DialogConditionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2831] = nodeInfo{
displayName: "Prompt",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2881] = nodeInfo{
displayName: "AcknowledgeableConditionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2915] = nodeInfo{
displayName: "AlarmConditionType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2929] = nodeInfo{
displayName: "ShelvedStateMachineType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2930] = nodeInfo{
displayName: "Unshelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2932] = nodeInfo{
displayName: "TimedShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2933] = nodeInfo{
displayName: "OneShotShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2935] = nodeInfo{
displayName: "UnshelvedToTimedShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2936] = nodeInfo{
displayName: "UnshelvedToOneShotShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2940] = nodeInfo{
displayName: "TimedShelvedToUnshelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2942] = nodeInfo{
displayName: "TimedShelvedToOneShotShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2943] = nodeInfo{
displayName: "OneShotShelvedToUnshelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2945] = nodeInfo{
displayName: "OneShotShelvedToTimedShelved",
class: NodeClassObject,
description: "",
}
nodeInfoMap[2947] = nodeInfo{
displayName: "Unshelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[2948] = nodeInfo{
displayName: "OneShotShelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[2949] = nodeInfo{
displayName: "TimedShelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[2955] = nodeInfo{
displayName: "LimitAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[2991] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2992] = nodeInfo{
displayName: "SecondsTillShutdown",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2993] = nodeInfo{
displayName: "ShutdownReason",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2994] = nodeInfo{
displayName: "Auditing",
class: NodeClassVariable,
description: "A flag indicating whether the server is currently generating audit events.",
}
nodeInfoMap[2996] = nodeInfo{
displayName: "ModellingRules",
class: NodeClassObject,
description: "A folder for the modelling rules supported by the server.",
}
nodeInfoMap[2997] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "A folder for the real time aggregates supported by the server.",
}
nodeInfoMap[2998] = nodeInfo{
displayName: "EventNotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[2999] = nodeInfo{
displayName: "AuditHistoryEventUpdateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3003] = nodeInfo{
displayName: "Filter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3006] = nodeInfo{
displayName: "AuditHistoryValueUpdateEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3012] = nodeInfo{
displayName: "AuditHistoryDeleteEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3014] = nodeInfo{
displayName: "AuditHistoryRawModifyDeleteEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3015] = nodeInfo{
displayName: "IsDeleteModified",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3016] = nodeInfo{
displayName: "StartTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3017] = nodeInfo{
displayName: "EndTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3019] = nodeInfo{
displayName: "AuditHistoryAtTimeDeleteEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3020] = nodeInfo{
displayName: "ReqTimes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3021] = nodeInfo{
displayName: "OldValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3022] = nodeInfo{
displayName: "AuditHistoryEventDeleteEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3023] = nodeInfo{
displayName: "EventIds",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3024] = nodeInfo{
displayName: "OldValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3025] = nodeInfo{
displayName: "UpdatedNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3026] = nodeInfo{
displayName: "UpdatedNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3027] = nodeInfo{
displayName: "UpdatedNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3028] = nodeInfo{
displayName: "PerformInsertReplace",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3029] = nodeInfo{
displayName: "NewValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3030] = nodeInfo{
displayName: "OldValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3031] = nodeInfo{
displayName: "PerformInsertReplace",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3032] = nodeInfo{
displayName: "NewValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3033] = nodeInfo{
displayName: "OldValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3034] = nodeInfo{
displayName: "OldValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3035] = nodeInfo{
displayName: "EventQueueOverflowEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3048] = nodeInfo{
displayName: "EventTypes",
class: NodeClassObject,
description: "",
}
nodeInfoMap[3049] = nodeInfo{
displayName: "SoftwareCertificates",
class: NodeClassVariable,
description: "The software certificates owned by the server.",
}
nodeInfoMap[3050] = nodeInfo{
displayName: "MaxResponseMessageSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3051] = nodeInfo{
displayName: "BuildInfoType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[3052] = nodeInfo{
displayName: "ProductUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3053] = nodeInfo{
displayName: "ManufacturerName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3054] = nodeInfo{
displayName: "ProductName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3055] = nodeInfo{
displayName: "SoftwareVersion",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3056] = nodeInfo{
displayName: "BuildNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3057] = nodeInfo{
displayName: "BuildDate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3058] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3059] = nodeInfo{
displayName: "AggregateConfiguration",
class: NodeClassObject,
description: "",
}
nodeInfoMap[3062] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "The default binary encoding for a data type.",
}
nodeInfoMap[3063] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "The default XML encoding for a data type.",
}
nodeInfoMap[3065] = nodeInfo{
displayName: "AlwaysGeneratesEvent",
class: NodeClassReferenceType,
description: "The type for references from a node to an event type that is always raised by node.",
}
nodeInfoMap[3067] = nodeInfo{
displayName: "Icon",
class: NodeClassVariable,
description: "A small image representing the object.",
}
nodeInfoMap[3068] = nodeInfo{
displayName: "NodeVersion",
class: NodeClassVariable,
description: "The version number of the node (used to indicate changes to references of the owning node).",
}
nodeInfoMap[3069] = nodeInfo{
displayName: "LocalTime",
class: NodeClassVariable,
description: "The local time where the owning variable value was collected.",
}
nodeInfoMap[3070] = nodeInfo{
displayName: "AllowNulls",
class: NodeClassVariable,
description: "Whether the value of the owning variable is allowed to be null.",
}
nodeInfoMap[3071] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "The human readable strings associated with the values of an enumerated value (when values have no sequence).",
}
nodeInfoMap[3072] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "The input arguments for a method.",
}
nodeInfoMap[3073] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "The output arguments for a method.",
}
nodeInfoMap[3074] = nodeInfo{
displayName: "StartTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3075] = nodeInfo{
displayName: "CurrentTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3076] = nodeInfo{
displayName: "State",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3077] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3078] = nodeInfo{
displayName: "ProductUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3079] = nodeInfo{
displayName: "ManufacturerName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3080] = nodeInfo{
displayName: "ProductName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3081] = nodeInfo{
displayName: "SoftwareVersion",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3082] = nodeInfo{
displayName: "BuildNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3083] = nodeInfo{
displayName: "BuildDate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3084] = nodeInfo{
displayName: "SecondsTillShutdown",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3085] = nodeInfo{
displayName: "ShutdownReason",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3086] = nodeInfo{
displayName: "ServerProfileArray",
class: NodeClassVariable,
description: "A list of profiles supported by the server.",
}
nodeInfoMap[3087] = nodeInfo{
displayName: "LocaleIdArray",
class: NodeClassVariable,
description: "A list of locales supported by the server.",
}
nodeInfoMap[3088] = nodeInfo{
displayName: "MinSupportedSampleRate",
class: NodeClassVariable,
description: "The minimum sampling interval supported by the server.",
}
nodeInfoMap[3089] = nodeInfo{
displayName: "MaxBrowseContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Browse operations per session.",
}
nodeInfoMap[3090] = nodeInfo{
displayName: "MaxQueryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for Query operations per session.",
}
nodeInfoMap[3091] = nodeInfo{
displayName: "MaxHistoryContinuationPoints",
class: NodeClassVariable,
description: "The maximum number of continuation points for ReadHistory operations per session.",
}
nodeInfoMap[3092] = nodeInfo{
displayName: "SoftwareCertificates",
class: NodeClassVariable,
description: "The software certificates owned by the server.",
}
nodeInfoMap[3093] = nodeInfo{
displayName: "ModellingRules",
class: NodeClassObject,
description: "A folder for the modelling rules supported by the server.",
}
nodeInfoMap[3094] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "A folder for the real time aggregates supported by the server.",
}
nodeInfoMap[3095] = nodeInfo{
displayName: "ServerDiagnosticsSummary",
class: NodeClassVariable,
description: "A summary of server level diagnostics.",
}
nodeInfoMap[3096] = nodeInfo{
displayName: "ServerViewCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3097] = nodeInfo{
displayName: "CurrentSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3098] = nodeInfo{
displayName: "CumulatedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3099] = nodeInfo{
displayName: "SecurityRejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3100] = nodeInfo{
displayName: "RejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3101] = nodeInfo{
displayName: "SessionTimeoutCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3102] = nodeInfo{
displayName: "SessionAbortCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3104] = nodeInfo{
displayName: "PublishingIntervalCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3105] = nodeInfo{
displayName: "CurrentSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3106] = nodeInfo{
displayName: "CumulatedSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3107] = nodeInfo{
displayName: "SecurityRejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3108] = nodeInfo{
displayName: "RejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3110] = nodeInfo{
displayName: "SubscriptionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active subscription.",
}
nodeInfoMap[3111] = nodeInfo{
displayName: "SessionsDiagnosticsSummary",
class: NodeClassObject,
description: "A summary of session level diagnostics.",
}
nodeInfoMap[3112] = nodeInfo{
displayName: "SessionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active session.",
}
nodeInfoMap[3113] = nodeInfo{
displayName: "SessionSecurityDiagnosticsArray",
class: NodeClassVariable,
description: "A list of security related diagnostics for each active session.",
}
nodeInfoMap[3114] = nodeInfo{
displayName: "EnabledFlag",
class: NodeClassVariable,
description: "If TRUE the diagnostics collection is enabled.",
}
nodeInfoMap[3115] = nodeInfo{
displayName: "RedundancySupport",
class: NodeClassVariable,
description: "Indicates what style of redundancy is supported by the server.",
}
nodeInfoMap[3116] = nodeInfo{
displayName: "ServerViewCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3117] = nodeInfo{
displayName: "CurrentSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3118] = nodeInfo{
displayName: "CumulatedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3119] = nodeInfo{
displayName: "SecurityRejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3120] = nodeInfo{
displayName: "RejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3121] = nodeInfo{
displayName: "SessionTimeoutCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3122] = nodeInfo{
displayName: "SessionAbortCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3124] = nodeInfo{
displayName: "PublishingIntervalCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3125] = nodeInfo{
displayName: "CurrentSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3126] = nodeInfo{
displayName: "CumulatedSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3127] = nodeInfo{
displayName: "SecurityRejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3128] = nodeInfo{
displayName: "RejectedRequestsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3129] = nodeInfo{
displayName: "SessionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active session.",
}
nodeInfoMap[3130] = nodeInfo{
displayName: "SessionSecurityDiagnosticsArray",
class: NodeClassVariable,
description: "A list of security related diagnostics for each active session.",
}
nodeInfoMap[3131] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3132] = nodeInfo{
displayName: "SessionName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3133] = nodeInfo{
displayName: "ClientDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3134] = nodeInfo{
displayName: "ServerUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3135] = nodeInfo{
displayName: "EndpointUrl",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3136] = nodeInfo{
displayName: "LocaleIds",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3137] = nodeInfo{
displayName: "ActualSessionTimeout",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3138] = nodeInfo{
displayName: "MaxResponseMessageSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3139] = nodeInfo{
displayName: "ClientConnectionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3140] = nodeInfo{
displayName: "ClientLastContactTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3141] = nodeInfo{
displayName: "CurrentSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3142] = nodeInfo{
displayName: "CurrentMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3143] = nodeInfo{
displayName: "CurrentPublishRequestsInQueue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3151] = nodeInfo{
displayName: "ReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3152] = nodeInfo{
displayName: "HistoryReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3153] = nodeInfo{
displayName: "WriteCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3154] = nodeInfo{
displayName: "HistoryUpdateCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3155] = nodeInfo{
displayName: "CallCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3156] = nodeInfo{
displayName: "CreateMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3157] = nodeInfo{
displayName: "ModifyMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3158] = nodeInfo{
displayName: "SetMonitoringModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3159] = nodeInfo{
displayName: "SetTriggeringCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3160] = nodeInfo{
displayName: "DeleteMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3161] = nodeInfo{
displayName: "CreateSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3162] = nodeInfo{
displayName: "ModifySubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3163] = nodeInfo{
displayName: "SetPublishingModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3164] = nodeInfo{
displayName: "PublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3165] = nodeInfo{
displayName: "RepublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3166] = nodeInfo{
displayName: "TransferSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3167] = nodeInfo{
displayName: "DeleteSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3168] = nodeInfo{
displayName: "AddNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3169] = nodeInfo{
displayName: "AddReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3170] = nodeInfo{
displayName: "DeleteNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3171] = nodeInfo{
displayName: "DeleteReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3172] = nodeInfo{
displayName: "BrowseCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3173] = nodeInfo{
displayName: "BrowseNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3174] = nodeInfo{
displayName: "TranslateBrowsePathsToNodeIdsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3175] = nodeInfo{
displayName: "QueryFirstCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3176] = nodeInfo{
displayName: "QueryNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3177] = nodeInfo{
displayName: "RegisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3178] = nodeInfo{
displayName: "UnregisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3179] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3180] = nodeInfo{
displayName: "ClientUserIdOfSession",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3181] = nodeInfo{
displayName: "ClientUserIdHistory",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3182] = nodeInfo{
displayName: "AuthenticationMechanism",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3183] = nodeInfo{
displayName: "Encoding",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3184] = nodeInfo{
displayName: "TransportProtocol",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3185] = nodeInfo{
displayName: "SecurityMode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3186] = nodeInfo{
displayName: "SecurityPolicyUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3187] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3190] = nodeInfo{
displayName: "LocalTime",
class: NodeClassVariable,
description: "Information about the local time where the event originated.",
}
nodeInfoMap[3698] = nodeInfo{
displayName: "ProductUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3699] = nodeInfo{
displayName: "ManufacturerName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3700] = nodeInfo{
displayName: "ProductName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3701] = nodeInfo{
displayName: "SoftwareVersion",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3702] = nodeInfo{
displayName: "BuildNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3703] = nodeInfo{
displayName: "BuildDate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3704] = nodeInfo{
displayName: "SoftwareCertificates",
class: NodeClassVariable,
description: "The software certificates owned by the server.",
}
nodeInfoMap[3705] = nodeInfo{
displayName: "RejectedSessionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3706] = nodeInfo{
displayName: "SessionsDiagnosticsSummary",
class: NodeClassObject,
description: "A summary of session level diagnostics.",
}
nodeInfoMap[3707] = nodeInfo{
displayName: "SessionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each active session.",
}
nodeInfoMap[3708] = nodeInfo{
displayName: "SessionSecurityDiagnosticsArray",
class: NodeClassVariable,
description: "A list of security related diagnostics for each active session.",
}
nodeInfoMap[3709] = nodeInfo{
displayName: "RedundancySupport",
class: NodeClassVariable,
description: "Indicates what style of redundancy is supported by the server.",
}
nodeInfoMap[3720] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3724] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3728] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3732] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3746] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3750] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3754] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3806] = nodeInfo{
displayName: "ProgramTransitionAuditEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[3825] = nodeInfo{
displayName: "Transition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3826] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3830] = nodeInfo{
displayName: "CurrentState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3831] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3833] = nodeInfo{
displayName: "Number",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3835] = nodeInfo{
displayName: "LastTransition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3836] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3838] = nodeInfo{
displayName: "Number",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3839] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3840] = nodeInfo{
displayName: "CreateSessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3841] = nodeInfo{
displayName: "CreateClientName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3842] = nodeInfo{
displayName: "InvocationCreationTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3843] = nodeInfo{
displayName: "LastTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3844] = nodeInfo{
displayName: "LastMethodCall",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3845] = nodeInfo{
displayName: "LastMethodSessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3846] = nodeInfo{
displayName: "LastMethodInputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3847] = nodeInfo{
displayName: "LastMethodOutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3848] = nodeInfo{
displayName: "LastMethodCallTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3849] = nodeInfo{
displayName: "LastMethodReturnStatus",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3850] = nodeInfo{
displayName: "FinalResultData",
class: NodeClassObject,
description: "",
}
nodeInfoMap[3874] = nodeInfo{
displayName: "Retain",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[3875] = nodeInfo{
displayName: "ConditionRefresh",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[3876] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[4170] = nodeInfo{
displayName: "EventId",
class: NodeClassVariable,
description: "A globally unique identifier for the event.",
}
nodeInfoMap[6098] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[6100] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[6101] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7591] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7594] = nodeInfo{
displayName: "EnumValueType",
class: NodeClassDataType,
description: "A mapping between a value of an enumerated type and a name and description.",
}
nodeInfoMap[7595] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7596] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7597] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7598] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7605] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7611] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7612] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7614] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7616] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[7617] = nodeInfo{
displayName: "Opc.Ua",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7619] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "A URI that uniquely identifies the dictionary.",
}
nodeInfoMap[7650] = nodeInfo{
displayName: "Argument",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7656] = nodeInfo{
displayName: "EnumValueType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7659] = nodeInfo{
displayName: "StatusResult",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7662] = nodeInfo{
displayName: "UserTokenPolicy",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7665] = nodeInfo{
displayName: "ApplicationDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7668] = nodeInfo{
displayName: "EndpointDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7671] = nodeInfo{
displayName: "UserIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7674] = nodeInfo{
displayName: "AnonymousIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7677] = nodeInfo{
displayName: "UserNameIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7680] = nodeInfo{
displayName: "X509IdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7683] = nodeInfo{
displayName: "IssuedIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7686] = nodeInfo{
displayName: "EndpointConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7692] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7698] = nodeInfo{
displayName: "SignedSoftwareCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7728] = nodeInfo{
displayName: "AddNodesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7731] = nodeInfo{
displayName: "AddReferencesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7734] = nodeInfo{
displayName: "DeleteNodesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7737] = nodeInfo{
displayName: "DeleteReferencesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7782] = nodeInfo{
displayName: "RegisteredServer",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7929] = nodeInfo{
displayName: "ContentFilterElement",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7932] = nodeInfo{
displayName: "ContentFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7935] = nodeInfo{
displayName: "FilterOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7938] = nodeInfo{
displayName: "ElementOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7941] = nodeInfo{
displayName: "LiteralOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7944] = nodeInfo{
displayName: "AttributeOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[7947] = nodeInfo{
displayName: "SimpleAttributeOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8004] = nodeInfo{
displayName: "HistoryEvent",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8067] = nodeInfo{
displayName: "MonitoringFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8073] = nodeInfo{
displayName: "EventFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8076] = nodeInfo{
displayName: "AggregateConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8172] = nodeInfo{
displayName: "HistoryEventFieldList",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8208] = nodeInfo{
displayName: "RedundantServerDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8211] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8214] = nodeInfo{
displayName: "ServerDiagnosticsSummaryDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8217] = nodeInfo{
displayName: "ServerStatusDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8220] = nodeInfo{
displayName: "SessionDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8223] = nodeInfo{
displayName: "SessionSecurityDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8226] = nodeInfo{
displayName: "ServiceCounterDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8229] = nodeInfo{
displayName: "SubscriptionDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8232] = nodeInfo{
displayName: "ModelChangeStructureDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8235] = nodeInfo{
displayName: "SemanticChangeStructureDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8238] = nodeInfo{
displayName: "Range",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8241] = nodeInfo{
displayName: "EUInformation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8244] = nodeInfo{
displayName: "Annotation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8247] = nodeInfo{
displayName: "ProgramDiagnosticDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8251] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[8252] = nodeInfo{
displayName: "Opc.Ua",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8254] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "A URI that uniquely identifies the dictionary.",
}
nodeInfoMap[8285] = nodeInfo{
displayName: "Argument",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8291] = nodeInfo{
displayName: "EnumValueType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8294] = nodeInfo{
displayName: "StatusResult",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8297] = nodeInfo{
displayName: "UserTokenPolicy",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8300] = nodeInfo{
displayName: "ApplicationDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8303] = nodeInfo{
displayName: "EndpointDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8306] = nodeInfo{
displayName: "UserIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8309] = nodeInfo{
displayName: "AnonymousIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8312] = nodeInfo{
displayName: "UserNameIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8315] = nodeInfo{
displayName: "X509IdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8318] = nodeInfo{
displayName: "IssuedIdentityToken",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8321] = nodeInfo{
displayName: "EndpointConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8327] = nodeInfo{
displayName: "BuildInfo",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8333] = nodeInfo{
displayName: "SignedSoftwareCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8363] = nodeInfo{
displayName: "AddNodesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8366] = nodeInfo{
displayName: "AddReferencesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8369] = nodeInfo{
displayName: "DeleteNodesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8372] = nodeInfo{
displayName: "DeleteReferencesItem",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8417] = nodeInfo{
displayName: "RegisteredServer",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8564] = nodeInfo{
displayName: "ContentFilterElement",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8567] = nodeInfo{
displayName: "ContentFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8570] = nodeInfo{
displayName: "FilterOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8573] = nodeInfo{
displayName: "ElementOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8576] = nodeInfo{
displayName: "LiteralOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8579] = nodeInfo{
displayName: "AttributeOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8582] = nodeInfo{
displayName: "SimpleAttributeOperand",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8639] = nodeInfo{
displayName: "HistoryEvent",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8702] = nodeInfo{
displayName: "MonitoringFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8708] = nodeInfo{
displayName: "EventFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8711] = nodeInfo{
displayName: "AggregateConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8807] = nodeInfo{
displayName: "HistoryEventFieldList",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8843] = nodeInfo{
displayName: "RedundantServerDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8846] = nodeInfo{
displayName: "SamplingIntervalDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8849] = nodeInfo{
displayName: "ServerDiagnosticsSummaryDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8852] = nodeInfo{
displayName: "ServerStatusDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8855] = nodeInfo{
displayName: "SessionDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8858] = nodeInfo{
displayName: "SessionSecurityDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8861] = nodeInfo{
displayName: "ServiceCounterDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8864] = nodeInfo{
displayName: "SubscriptionDiagnosticsDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8867] = nodeInfo{
displayName: "ModelChangeStructureDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8870] = nodeInfo{
displayName: "SemanticChangeStructureDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8873] = nodeInfo{
displayName: "Range",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8876] = nodeInfo{
displayName: "EUInformation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8879] = nodeInfo{
displayName: "Annotation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8882] = nodeInfo{
displayName: "ProgramDiagnosticDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8888] = nodeInfo{
displayName: "MaxLifetimeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8889] = nodeInfo{
displayName: "LatePublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8890] = nodeInfo{
displayName: "CurrentKeepAliveCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8891] = nodeInfo{
displayName: "CurrentLifetimeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8892] = nodeInfo{
displayName: "UnacknowledgedMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8893] = nodeInfo{
displayName: "DiscardedMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8894] = nodeInfo{
displayName: "MonitoredItemCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8895] = nodeInfo{
displayName: "DisabledMonitoredItemCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8896] = nodeInfo{
displayName: "MonitoringQueueOverflowCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8897] = nodeInfo{
displayName: "NextSequenceNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8898] = nodeInfo{
displayName: "TotalRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8900] = nodeInfo{
displayName: "TotalRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8902] = nodeInfo{
displayName: "EventQueueOverFlowCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8912] = nodeInfo{
displayName: "TimeZoneDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[8913] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[8914] = nodeInfo{
displayName: "TimeZoneDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8917] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[8918] = nodeInfo{
displayName: "TimeZoneDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[8927] = nodeInfo{
displayName: "AuditConditionRespondEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[8944] = nodeInfo{
displayName: "AuditConditionAcknowledgeEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[8945] = nodeInfo{
displayName: "EventId",
class: NodeClassVariable,
description: "A globally unique identifier for the event.",
}
nodeInfoMap[8961] = nodeInfo{
displayName: "AuditConditionConfirmEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[8962] = nodeInfo{
displayName: "EventId",
class: NodeClassVariable,
description: "A globally unique identifier for the event.",
}
nodeInfoMap[8995] = nodeInfo{
displayName: "TwoStateVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[8996] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9000] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9001] = nodeInfo{
displayName: "EffectiveTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9002] = nodeInfo{
displayName: "ConditionVariableType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[9003] = nodeInfo{
displayName: "SourceTimestamp",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9004] = nodeInfo{
displayName: "HasTrueSubState",
class: NodeClassReferenceType,
description: "",
}
nodeInfoMap[9005] = nodeInfo{
displayName: "HasFalseSubState",
class: NodeClassReferenceType,
description: "",
}
nodeInfoMap[9006] = nodeInfo{
displayName: "HasCondition",
class: NodeClassReferenceType,
description: "",
}
nodeInfoMap[9009] = nodeInfo{
displayName: "ConditionName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9010] = nodeInfo{
displayName: "BranchId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9011] = nodeInfo{
displayName: "EnabledState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9012] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9015] = nodeInfo{
displayName: "EffectiveDisplayName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9016] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9017] = nodeInfo{
displayName: "EffectiveTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9020] = nodeInfo{
displayName: "Quality",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9021] = nodeInfo{
displayName: "SourceTimestamp",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9022] = nodeInfo{
displayName: "LastSeverity",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9023] = nodeInfo{
displayName: "SourceTimestamp",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9024] = nodeInfo{
displayName: "Comment",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9025] = nodeInfo{
displayName: "SourceTimestamp",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9026] = nodeInfo{
displayName: "ClientUserId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9027] = nodeInfo{
displayName: "Enable",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9028] = nodeInfo{
displayName: "Disable",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9029] = nodeInfo{
displayName: "AddComment",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9030] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9035] = nodeInfo{
displayName: "EnabledState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9036] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9055] = nodeInfo{
displayName: "DialogState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9056] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9060] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9064] = nodeInfo{
displayName: "ResponseOptionSet",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9065] = nodeInfo{
displayName: "DefaultResponse",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9066] = nodeInfo{
displayName: "OkResponse",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9067] = nodeInfo{
displayName: "CancelResponse",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9068] = nodeInfo{
displayName: "LastResponse",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9069] = nodeInfo{
displayName: "Respond",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9070] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9073] = nodeInfo{
displayName: "EnabledState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9074] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9093] = nodeInfo{
displayName: "AckedState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9094] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9098] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9102] = nodeInfo{
displayName: "ConfirmedState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9103] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9107] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9111] = nodeInfo{
displayName: "Acknowledge",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9112] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9113] = nodeInfo{
displayName: "Confirm",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9114] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9115] = nodeInfo{
displayName: "UnshelveTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9118] = nodeInfo{
displayName: "EnabledState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9119] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9160] = nodeInfo{
displayName: "ActiveState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9161] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9164] = nodeInfo{
displayName: "EffectiveDisplayName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9165] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9166] = nodeInfo{
displayName: "EffectiveTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9169] = nodeInfo{
displayName: "SuppressedState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9170] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9174] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9178] = nodeInfo{
displayName: "ShelvingState",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9179] = nodeInfo{
displayName: "CurrentState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9180] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9184] = nodeInfo{
displayName: "LastTransition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9185] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9188] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9189] = nodeInfo{
displayName: "UnshelveTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9211] = nodeInfo{
displayName: "Unshelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9212] = nodeInfo{
displayName: "OneShotShelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9213] = nodeInfo{
displayName: "TimedShelve",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[9214] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9215] = nodeInfo{
displayName: "SuppressedOrShelved",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9216] = nodeInfo{
displayName: "MaxTimeShelved",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9318] = nodeInfo{
displayName: "ExclusiveLimitStateMachineType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9329] = nodeInfo{
displayName: "HighHigh",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9330] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9331] = nodeInfo{
displayName: "High",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9332] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9333] = nodeInfo{
displayName: "Low",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9334] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9335] = nodeInfo{
displayName: "LowLow",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9336] = nodeInfo{
displayName: "StateNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9337] = nodeInfo{
displayName: "LowLowToLow",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9338] = nodeInfo{
displayName: "LowToLowLow",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9339] = nodeInfo{
displayName: "HighHighToHigh",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9340] = nodeInfo{
displayName: "HighToHighHigh",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9341] = nodeInfo{
displayName: "ExclusiveLimitAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9398] = nodeInfo{
displayName: "ActiveState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9399] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9455] = nodeInfo{
displayName: "LimitState",
class: NodeClassObject,
description: "",
}
nodeInfoMap[9456] = nodeInfo{
displayName: "CurrentState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9457] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9461] = nodeInfo{
displayName: "LastTransition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9462] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9465] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9482] = nodeInfo{
displayName: "ExclusiveLevelAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9623] = nodeInfo{
displayName: "ExclusiveRateOfChangeAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9764] = nodeInfo{
displayName: "ExclusiveDeviationAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9905] = nodeInfo{
displayName: "SetpointNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9906] = nodeInfo{
displayName: "NonExclusiveLimitAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[9963] = nodeInfo{
displayName: "ActiveState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[9964] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10020] = nodeInfo{
displayName: "HighHighState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10021] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10025] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10029] = nodeInfo{
displayName: "HighState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10030] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10034] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10038] = nodeInfo{
displayName: "LowState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10039] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10043] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10047] = nodeInfo{
displayName: "LowLowState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10048] = nodeInfo{
displayName: "Id",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10052] = nodeInfo{
displayName: "TransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10060] = nodeInfo{
displayName: "NonExclusiveLevelAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[10214] = nodeInfo{
displayName: "NonExclusiveRateOfChangeAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[10368] = nodeInfo{
displayName: "NonExclusiveDeviationAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[10522] = nodeInfo{
displayName: "SetpointNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[10523] = nodeInfo{
displayName: "DiscreteAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[10637] = nodeInfo{
displayName: "OffNormalAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[10751] = nodeInfo{
displayName: "TripAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11093] = nodeInfo{
displayName: "AuditConditionShelvingEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11110] = nodeInfo{
displayName: "TrueState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11111] = nodeInfo{
displayName: "FalseState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11112] = nodeInfo{
displayName: "ConditionClassId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11113] = nodeInfo{
displayName: "ConditionClassName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11120] = nodeInfo{
displayName: "InputNode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11124] = nodeInfo{
displayName: "HighHighLimit",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11125] = nodeInfo{
displayName: "HighLimit",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11126] = nodeInfo{
displayName: "LowLimit",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11127] = nodeInfo{
displayName: "LowLowLimit",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11158] = nodeInfo{
displayName: "NormalState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11163] = nodeInfo{
displayName: "BaseConditionClassType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11164] = nodeInfo{
displayName: "ProcessConditionClassType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11165] = nodeInfo{
displayName: "MaintenanceConditionClassType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11166] = nodeInfo{
displayName: "SystemConditionClassType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11168] = nodeInfo{
displayName: "TreatUncertainAsBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11169] = nodeInfo{
displayName: "PercentDataBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11170] = nodeInfo{
displayName: "PercentDataGood",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11171] = nodeInfo{
displayName: "UseSlopedExtrapolation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11172] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11187] = nodeInfo{
displayName: "AggregateConfigurationType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11188] = nodeInfo{
displayName: "TreatUncertainAsBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11189] = nodeInfo{
displayName: "PercentDataBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11190] = nodeInfo{
displayName: "PercentDataGood",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11191] = nodeInfo{
displayName: "UseSlopedExtrapolation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11192] = nodeInfo{
displayName: "HistoryServerCapabilities",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11193] = nodeInfo{
displayName: "AccessHistoryDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11196] = nodeInfo{
displayName: "InsertDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11197] = nodeInfo{
displayName: "ReplaceDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11198] = nodeInfo{
displayName: "UpdateDataCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11199] = nodeInfo{
displayName: "DeleteRawCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11200] = nodeInfo{
displayName: "DeleteAtTimeCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11201] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11202] = nodeInfo{
displayName: "HA Configuration",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11203] = nodeInfo{
displayName: "AggregateConfiguration",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11204] = nodeInfo{
displayName: "TreatUncertainAsBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11205] = nodeInfo{
displayName: "PercentDataBad",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11206] = nodeInfo{
displayName: "PercentDataGood",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11207] = nodeInfo{
displayName: "UseSlopedExtrapolation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11208] = nodeInfo{
displayName: "Stepped",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11214] = nodeInfo{
displayName: "Annotations",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11215] = nodeInfo{
displayName: "HistoricalEventFilter",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11234] = nodeInfo{
displayName: "HistoryUpdateType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[11238] = nodeInfo{
displayName: "MultiStateValueDiscreteType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[11241] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11242] = nodeInfo{
displayName: "AccessHistoryEventsCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11268] = nodeInfo{
displayName: "MaxReturnDataValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11269] = nodeInfo{
displayName: "MaxReturnEventValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11270] = nodeInfo{
displayName: "InsertAnnotationCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11273] = nodeInfo{
displayName: "MaxReturnDataValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11274] = nodeInfo{
displayName: "MaxReturnEventValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11275] = nodeInfo{
displayName: "InsertAnnotationCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11278] = nodeInfo{
displayName: "InsertEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11279] = nodeInfo{
displayName: "ReplaceEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11280] = nodeInfo{
displayName: "UpdateEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11281] = nodeInfo{
displayName: "InsertEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11282] = nodeInfo{
displayName: "ReplaceEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11283] = nodeInfo{
displayName: "UpdateEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11285] = nodeInfo{
displayName: "TimeAverage2",
class: NodeClassObject,
description: "Retrieve the time weighted average data over the interval using Simple Bounding Values.",
}
nodeInfoMap[11286] = nodeInfo{
displayName: "Minimum2",
class: NodeClassObject,
description: "Retrieve the minimum value in the interval including the Simple Bounding Values.",
}
nodeInfoMap[11287] = nodeInfo{
displayName: "Maximum2",
class: NodeClassObject,
description: "Retrieve the maximum value in the interval including the Simple Bounding Values.",
}
nodeInfoMap[11288] = nodeInfo{
displayName: "Range2",
class: NodeClassObject,
description: "Retrieve the difference between the Minimum2 and Maximum2 value over the interval.",
}
nodeInfoMap[11292] = nodeInfo{
displayName: "WorstQuality2",
class: NodeClassObject,
description: "Retrieve the worst StatusCode of data in the interval including the Simple Bounding Values.",
}
nodeInfoMap[11293] = nodeInfo{
displayName: "PerformUpdateType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[11304] = nodeInfo{
displayName: "Total2",
class: NodeClassObject,
description: "Retrieve the total (time integral) of the data over the interval using Simple Bounding Values.",
}
nodeInfoMap[11305] = nodeInfo{
displayName: "MinimumActualTime2",
class: NodeClassObject,
description: "Retrieve the minimum value with the actual timestamp including the Simple Bounding Values.",
}
nodeInfoMap[11306] = nodeInfo{
displayName: "MaximumActualTime2",
class: NodeClassObject,
description: "Retrieve the maximum value with the actual timestamp including the Simple Bounding Values.",
}
nodeInfoMap[11307] = nodeInfo{
displayName: "DurationInStateZero",
class: NodeClassObject,
description: "Retrieve the time a Boolean or numeric was in a zero state using Simple Bounding Values.",
}
nodeInfoMap[11308] = nodeInfo{
displayName: "DurationInStateNonZero",
class: NodeClassObject,
description: "Retrieve the time a Boolean or numeric was in a non-zero state using Simple Bounding Values.",
}
nodeInfoMap[11312] = nodeInfo{
displayName: "CurrentServerId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11313] = nodeInfo{
displayName: "RedundantServerArray",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11314] = nodeInfo{
displayName: "ServerUriArray",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11322] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11323] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11324] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11325] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11326] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11327] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11340] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11341] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11342] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11343] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11426] = nodeInfo{
displayName: "StandardDeviationSample",
class: NodeClassObject,
description: "Retrieve the standard deviation for the interval for a sample of the population (n-1).",
}
nodeInfoMap[11427] = nodeInfo{
displayName: "StandardDeviationPopulation",
class: NodeClassObject,
description: "Retrieve the standard deviation for the interval for a complete population (n) which includes Simple Bounding Values.",
}
nodeInfoMap[11428] = nodeInfo{
displayName: "VarianceSample",
class: NodeClassObject,
description: "Retrieve the variance for the interval as calculated by the StandardDeviationSample.",
}
nodeInfoMap[11429] = nodeInfo{
displayName: "VariancePopulation",
class: NodeClassObject,
description: "Retrieve the variance for the interval as calculated by the StandardDeviationPopulation which includes Simple Bounding Values.",
}
nodeInfoMap[11432] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "The human readable strings associated with the values of an enumerated value (when values are sequential).",
}
nodeInfoMap[11433] = nodeInfo{
displayName: "ValueAsText",
class: NodeClassVariable,
description: "The string representation of the current value for a variable with an enumerated data type.",
}
nodeInfoMap[11436] = nodeInfo{
displayName: "ProgressEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11446] = nodeInfo{
displayName: "SystemStatusChangeEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11456] = nodeInfo{
displayName: "EffectiveTransitionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11461] = nodeInfo{
displayName: "ValueAsText",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11485] = nodeInfo{
displayName: "SecureChannelId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11487] = nodeInfo{
displayName: "OptionSetType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[11488] = nodeInfo{
displayName: "OptionSetValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11489] = nodeInfo{
displayName: "GetMonitoredItems",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11490] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11491] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11492] = nodeInfo{
displayName: "GetMonitoredItems",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11493] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11494] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11498] = nodeInfo{
displayName: "MaxStringLength",
class: NodeClassVariable,
description: "The maximum length for a string that can be stored in the owning variable.",
}
nodeInfoMap[11499] = nodeInfo{
displayName: "StartOfArchive",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11500] = nodeInfo{
displayName: "StartOfOnlineArchive",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11501] = nodeInfo{
displayName: "DeleteEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11502] = nodeInfo{
displayName: "DeleteEventCapability",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11505] = nodeInfo{
displayName: "StartBound",
class: NodeClassObject,
description: "Retrieve the value at the beginning of the interval using Simple Bounding Values.",
}
nodeInfoMap[11506] = nodeInfo{
displayName: "EndBound",
class: NodeClassObject,
description: "Retrieve the value at the end of the interval using Simple Bounding Values.",
}
nodeInfoMap[11507] = nodeInfo{
displayName: "DeltaBounds",
class: NodeClassObject,
description: "Retrieve the difference between the StartBound and EndBound value in the interval.",
}
nodeInfoMap[11508] = nodeInfo{
displayName: "OptionalPlaceholder",
class: NodeClassObject,
description: "Specifies that zero or more instances with the attributes and references of the instance declaration may appear when a type is instantiated.",
}
nodeInfoMap[11509] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[11510] = nodeInfo{
displayName: "MandatoryPlaceholder",
class: NodeClassObject,
description: "Specifies that one or more instances with the attributes and references of the instance declaration must appear when a type is instantiated.",
}
nodeInfoMap[11511] = nodeInfo{
displayName: "NamingRule",
class: NodeClassVariable,
description: "Specified the significances of the BrowseName when a type is instantiated.",
}
nodeInfoMap[11512] = nodeInfo{
displayName: "MaxArrayLength",
class: NodeClassVariable,
description: "The maximum length for an array that can be stored in the owning variable.",
}
nodeInfoMap[11513] = nodeInfo{
displayName: "EngineeringUnits",
class: NodeClassVariable,
description: "The engineering units for the value of the owning variable.",
}
nodeInfoMap[11527] = nodeInfo{
displayName: "Namespaces",
class: NodeClassObject,
description: "Describes the namespaces supported by the server.",
}
nodeInfoMap[11549] = nodeInfo{
displayName: "MaxArrayLength",
class: NodeClassVariable,
description: "The maximum length for an array value supported by the server.",
}
nodeInfoMap[11550] = nodeInfo{
displayName: "MaxStringLength",
class: NodeClassVariable,
description: "The maximum length for a string value supported by the server.",
}
nodeInfoMap[11551] = nodeInfo{
displayName: "OperationLimits",
class: NodeClassObject,
description: "Defines the limits supported by the server for different operations.",
}
nodeInfoMap[11562] = nodeInfo{
displayName: "<VendorCapability>",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11564] = nodeInfo{
displayName: "OperationLimitsType",
class: NodeClassObjectType,
description: "Identifies the operation limits imposed by the server.",
}
nodeInfoMap[11565] = nodeInfo{
displayName: "MaxNodesPerRead",
class: NodeClassVariable,
description: "The maximum number of operations in a single Read request.",
}
nodeInfoMap[11567] = nodeInfo{
displayName: "MaxNodesPerWrite",
class: NodeClassVariable,
description: "The maximum number of operations in a single Write request.",
}
nodeInfoMap[11569] = nodeInfo{
displayName: "MaxNodesPerMethodCall",
class: NodeClassVariable,
description: "The maximum number of operations in a single Call request.",
}
nodeInfoMap[11570] = nodeInfo{
displayName: "MaxNodesPerBrowse",
class: NodeClassVariable,
description: "The maximum number of operations in a single Browse request.",
}
nodeInfoMap[11571] = nodeInfo{
displayName: "MaxNodesPerRegisterNodes",
class: NodeClassVariable,
description: "The maximum number of operations in a single RegisterNodes request.",
}
nodeInfoMap[11572] = nodeInfo{
displayName: "MaxNodesPerTranslateBrowsePathsToNodeIds",
class: NodeClassVariable,
description: "The maximum number of operations in a single TranslateBrowsePathsToNodeIds request.",
}
nodeInfoMap[11573] = nodeInfo{
displayName: "MaxNodesPerNodeManagement",
class: NodeClassVariable,
description: "The maximum number of operations in a single AddNodes, AddReferences, DeleteNodes or DeleteReferences request.",
}
nodeInfoMap[11574] = nodeInfo{
displayName: "MaxMonitoredItemsPerCall",
class: NodeClassVariable,
description: "The maximum number of operations in a single MonitoredItem related request.",
}
nodeInfoMap[11575] = nodeInfo{
displayName: "FileType",
class: NodeClassObjectType,
description: "An object that represents a file that can be accessed via the server.",
}
nodeInfoMap[11576] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[11579] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[11580] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11581] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11582] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11583] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11584] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11585] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11586] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11587] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11588] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11589] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11590] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11591] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11592] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11593] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11594] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11595] = nodeInfo{
displayName: "AddressSpaceFileType",
class: NodeClassObjectType,
description: "A file used to store a namespace exported from the server.",
}
nodeInfoMap[11615] = nodeInfo{
displayName: "ExportNamespace",
class: NodeClassMethod,
description: "Updates the file by exporting the server namespace.",
}
nodeInfoMap[11616] = nodeInfo{
displayName: "NamespaceMetadataType",
class: NodeClassObjectType,
description: "Provides the metadata for a namespace used by the server.",
}
nodeInfoMap[11617] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "The URI of the namespace.",
}
nodeInfoMap[11618] = nodeInfo{
displayName: "NamespaceVersion",
class: NodeClassVariable,
description: "The human readable string representing version of the namespace.",
}
nodeInfoMap[11619] = nodeInfo{
displayName: "NamespacePublicationDate",
class: NodeClassVariable,
description: "The publication date for the namespace.",
}
nodeInfoMap[11620] = nodeInfo{
displayName: "IsNamespaceSubset",
class: NodeClassVariable,
description: "If TRUE then the server only supports a subset of the namespace.",
}
nodeInfoMap[11621] = nodeInfo{
displayName: "StaticNodeIdTypes",
class: NodeClassVariable,
description: "A list of IdTypes for nodes which are the same in every server that exposes them.",
}
nodeInfoMap[11622] = nodeInfo{
displayName: "StaticNumericNodeIdRange",
class: NodeClassVariable,
description: "A list of ranges for numeric node ids which are the same in every server that exposes them.",
}
nodeInfoMap[11623] = nodeInfo{
displayName: "StaticStringNodeIdPattern",
class: NodeClassVariable,
description: "A regular expression which matches string node ids are the same in every server that exposes them.",
}
nodeInfoMap[11624] = nodeInfo{
displayName: "NamespaceFile",
class: NodeClassObject,
description: "A file containing the nodes of the namespace.",
}
nodeInfoMap[11625] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[11628] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[11629] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11630] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11631] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11632] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11633] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11634] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11635] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11636] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11637] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11638] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11639] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11640] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11641] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11642] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11643] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11645] = nodeInfo{
displayName: "NamespacesType",
class: NodeClassObjectType,
description: "A container for the namespace metadata provided by the server.",
}
nodeInfoMap[11646] = nodeInfo{
displayName: "<NamespaceIdentifier>",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11647] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "The URI of the namespace.",
}
nodeInfoMap[11648] = nodeInfo{
displayName: "NamespaceVersion",
class: NodeClassVariable,
description: "The human readable string representing version of the namespace.",
}
nodeInfoMap[11649] = nodeInfo{
displayName: "NamespacePublicationDate",
class: NodeClassVariable,
description: "The publication date for the namespace.",
}
nodeInfoMap[11650] = nodeInfo{
displayName: "IsNamespaceSubset",
class: NodeClassVariable,
description: "If TRUE then the server only supports a subset of the namespace.",
}
nodeInfoMap[11651] = nodeInfo{
displayName: "StaticNodeIdTypes",
class: NodeClassVariable,
description: "A list of IdTypes for nodes which are the same in every server that exposes them.",
}
nodeInfoMap[11652] = nodeInfo{
displayName: "StaticNumericNodeIdRange",
class: NodeClassVariable,
description: "A list of ranges for numeric node ids which are the same in every server that exposes them.",
}
nodeInfoMap[11653] = nodeInfo{
displayName: "StaticStringNodeIdPattern",
class: NodeClassVariable,
description: "A regular expression which matches string node ids are the same in every server that exposes them.",
}
nodeInfoMap[11675] = nodeInfo{
displayName: "AddressSpaceFile",
class: NodeClassObject,
description: "A file containing the nodes of the namespace.",
}
nodeInfoMap[11676] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[11679] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[11680] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11681] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11682] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11683] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11684] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11685] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11686] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11687] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11688] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11689] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11690] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11691] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11692] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11693] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[11694] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11696] = nodeInfo{
displayName: "SystemState",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11697] = nodeInfo{
displayName: "SampledMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11698] = nodeInfo{
displayName: "MaxSampledMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11699] = nodeInfo{
displayName: "DisabledMonitoredItemsSamplingCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11701] = nodeInfo{
displayName: "BitMask",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11702] = nodeInfo{
displayName: "MaxArrayLength",
class: NodeClassVariable,
description: "The maximum length for an array value supported by the server.",
}
nodeInfoMap[11703] = nodeInfo{
displayName: "MaxStringLength",
class: NodeClassVariable,
description: "The maximum length for a string value supported by the server.",
}
nodeInfoMap[11704] = nodeInfo{
displayName: "OperationLimits",
class: NodeClassObject,
description: "Defines the limits supported by the server for different operations.",
}
nodeInfoMap[11705] = nodeInfo{
displayName: "MaxNodesPerRead",
class: NodeClassVariable,
description: "The maximum number of operations in a single Read request.",
}
nodeInfoMap[11707] = nodeInfo{
displayName: "MaxNodesPerWrite",
class: NodeClassVariable,
description: "The maximum number of operations in a single Write request.",
}
nodeInfoMap[11709] = nodeInfo{
displayName: "MaxNodesPerMethodCall",
class: NodeClassVariable,
description: "The maximum number of operations in a single Call request.",
}
nodeInfoMap[11710] = nodeInfo{
displayName: "MaxNodesPerBrowse",
class: NodeClassVariable,
description: "The maximum number of operations in a single Browse request.",
}
nodeInfoMap[11711] = nodeInfo{
displayName: "MaxNodesPerRegisterNodes",
class: NodeClassVariable,
description: "The maximum number of operations in a single RegisterNodes request.",
}
nodeInfoMap[11712] = nodeInfo{
displayName: "MaxNodesPerTranslateBrowsePathsToNodeIds",
class: NodeClassVariable,
description: "The maximum number of operations in a single TranslateBrowsePathsToNodeIds request.",
}
nodeInfoMap[11713] = nodeInfo{
displayName: "MaxNodesPerNodeManagement",
class: NodeClassVariable,
description: "The maximum number of operations in a single AddNodes, AddReferences, DeleteNodes or DeleteReferences request.",
}
nodeInfoMap[11714] = nodeInfo{
displayName: "MaxMonitoredItemsPerCall",
class: NodeClassVariable,
description: "The maximum number of operations in a single MonitoredItem related request.",
}
nodeInfoMap[11715] = nodeInfo{
displayName: "Namespaces",
class: NodeClassObject,
description: "Describes the namespaces supported by the server.",
}
nodeInfoMap[11737] = nodeInfo{
displayName: "BitFieldMaskDataType",
class: NodeClassDataType,
description: "A mask of 32 bits that can be updated individually by using the top 32 bits as a mask.",
}
nodeInfoMap[11753] = nodeInfo{
displayName: "SystemOffNormalAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11851] = nodeInfo{
displayName: "Comment",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11852] = nodeInfo{
displayName: "SelectedResponse",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11853] = nodeInfo{
displayName: "Comment",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11854] = nodeInfo{
displayName: "Comment",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11855] = nodeInfo{
displayName: "ShelvingTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11856] = nodeInfo{
displayName: "AuditProgramTransitionEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11875] = nodeInfo{
displayName: "TransitionNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11876] = nodeInfo{
displayName: "AggregateFunctions",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11878] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11881] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11882] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11884] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11885] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11891] = nodeInfo{
displayName: "UnauthorizedRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11892] = nodeInfo{
displayName: "UnauthorizedRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11939] = nodeInfo{
displayName: "OpenFileMode",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[11940] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11943] = nodeInfo{
displayName: "EndpointUrlListDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[11944] = nodeInfo{
displayName: "NetworkGroupDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[11945] = nodeInfo{
displayName: "NonTransparentNetworkRedundancyType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[11948] = nodeInfo{
displayName: "ServerNetworkGroups",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11949] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11950] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11951] = nodeInfo{
displayName: "EndpointUrlListDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11954] = nodeInfo{
displayName: "NetworkGroupDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11957] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11958] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[11959] = nodeInfo{
displayName: "EndpointUrlListDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[11962] = nodeInfo{
displayName: "NetworkGroupDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12021] = nodeInfo{
displayName: "ArrayItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12024] = nodeInfo{
displayName: "InstrumentRange",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12025] = nodeInfo{
displayName: "EURange",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12026] = nodeInfo{
displayName: "EngineeringUnits",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12027] = nodeInfo{
displayName: "Title",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12028] = nodeInfo{
displayName: "AxisScaleType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12029] = nodeInfo{
displayName: "YArrayItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12037] = nodeInfo{
displayName: "XAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12038] = nodeInfo{
displayName: "XYArrayItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12046] = nodeInfo{
displayName: "XAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12047] = nodeInfo{
displayName: "ImageItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12055] = nodeInfo{
displayName: "XAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12056] = nodeInfo{
displayName: "YAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12057] = nodeInfo{
displayName: "CubeItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12065] = nodeInfo{
displayName: "XAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12066] = nodeInfo{
displayName: "YAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12067] = nodeInfo{
displayName: "ZAxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12068] = nodeInfo{
displayName: "NDimensionArrayItemType",
class: NodeClassVariableType,
description: "",
}
nodeInfoMap[12076] = nodeInfo{
displayName: "AxisDefinition",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12077] = nodeInfo{
displayName: "AxisScaleEnumeration",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12078] = nodeInfo{
displayName: "EnumStrings",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12079] = nodeInfo{
displayName: "AxisInformation",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12080] = nodeInfo{
displayName: "XVType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12081] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12082] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12083] = nodeInfo{
displayName: "AxisInformation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12086] = nodeInfo{
displayName: "XVType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12089] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12090] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12091] = nodeInfo{
displayName: "AxisInformation",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12094] = nodeInfo{
displayName: "XVType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12097] = nodeInfo{
displayName: "<ClientName>",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12098] = nodeInfo{
displayName: "SessionDiagnostics",
class: NodeClassVariable,
description: "Diagnostics information for an active session.",
}
nodeInfoMap[12099] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12100] = nodeInfo{
displayName: "SessionName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12101] = nodeInfo{
displayName: "ClientDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12102] = nodeInfo{
displayName: "ServerUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12103] = nodeInfo{
displayName: "EndpointUrl",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12104] = nodeInfo{
displayName: "LocaleIds",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12105] = nodeInfo{
displayName: "ActualSessionTimeout",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12106] = nodeInfo{
displayName: "MaxResponseMessageSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12107] = nodeInfo{
displayName: "ClientConnectionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12108] = nodeInfo{
displayName: "ClientLastContactTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12109] = nodeInfo{
displayName: "CurrentSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12110] = nodeInfo{
displayName: "CurrentMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12111] = nodeInfo{
displayName: "CurrentPublishRequestsInQueue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12112] = nodeInfo{
displayName: "TotalRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12113] = nodeInfo{
displayName: "UnauthorizedRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12114] = nodeInfo{
displayName: "ReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12115] = nodeInfo{
displayName: "HistoryReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12116] = nodeInfo{
displayName: "WriteCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12117] = nodeInfo{
displayName: "HistoryUpdateCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12118] = nodeInfo{
displayName: "CallCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12119] = nodeInfo{
displayName: "CreateMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12120] = nodeInfo{
displayName: "ModifyMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12121] = nodeInfo{
displayName: "SetMonitoringModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12122] = nodeInfo{
displayName: "SetTriggeringCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12123] = nodeInfo{
displayName: "DeleteMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12124] = nodeInfo{
displayName: "CreateSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12125] = nodeInfo{
displayName: "ModifySubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12126] = nodeInfo{
displayName: "SetPublishingModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12127] = nodeInfo{
displayName: "PublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12128] = nodeInfo{
displayName: "RepublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12129] = nodeInfo{
displayName: "TransferSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12130] = nodeInfo{
displayName: "DeleteSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12131] = nodeInfo{
displayName: "AddNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12132] = nodeInfo{
displayName: "AddReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12133] = nodeInfo{
displayName: "DeleteNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12134] = nodeInfo{
displayName: "DeleteReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12135] = nodeInfo{
displayName: "BrowseCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12136] = nodeInfo{
displayName: "BrowseNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12137] = nodeInfo{
displayName: "TranslateBrowsePathsToNodeIdsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12138] = nodeInfo{
displayName: "QueryFirstCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12139] = nodeInfo{
displayName: "QueryNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12140] = nodeInfo{
displayName: "RegisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12141] = nodeInfo{
displayName: "UnregisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12142] = nodeInfo{
displayName: "SessionSecurityDiagnostics",
class: NodeClassVariable,
description: "Security related diagnostics information for an active session.",
}
nodeInfoMap[12143] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12144] = nodeInfo{
displayName: "ClientUserIdOfSession",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12145] = nodeInfo{
displayName: "ClientUserIdHistory",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12146] = nodeInfo{
displayName: "AuthenticationMechanism",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12147] = nodeInfo{
displayName: "Encoding",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12148] = nodeInfo{
displayName: "TransportProtocol",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12149] = nodeInfo{
displayName: "SecurityMode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12150] = nodeInfo{
displayName: "SecurityPolicyUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12151] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12152] = nodeInfo{
displayName: "SubscriptionDiagnosticsArray",
class: NodeClassVariable,
description: "A list of diagnostics for each subscription owned by the session.",
}
nodeInfoMap[12161] = nodeInfo{
displayName: "MaxNodesPerHistoryReadData",
class: NodeClassVariable,
description: "The maximum number of operations in a single data HistoryRead request.",
}
nodeInfoMap[12162] = nodeInfo{
displayName: "MaxNodesPerHistoryReadEvents",
class: NodeClassVariable,
description: "The maximum number of operations in a single event HistoryRead request.",
}
nodeInfoMap[12163] = nodeInfo{
displayName: "MaxNodesPerHistoryUpdateData",
class: NodeClassVariable,
description: "The maximum number of operations in a single data HistoryUpdate request.",
}
nodeInfoMap[12164] = nodeInfo{
displayName: "MaxNodesPerHistoryUpdateEvents",
class: NodeClassVariable,
description: "The maximum number of operations in a single event HistoryUpdate request.",
}
nodeInfoMap[12165] = nodeInfo{
displayName: "MaxNodesPerHistoryReadData",
class: NodeClassVariable,
description: "The maximum number of operations in a single data HistoryRead request.",
}
nodeInfoMap[12166] = nodeInfo{
displayName: "MaxNodesPerHistoryReadEvents",
class: NodeClassVariable,
description: "The maximum number of operations in a single event HistoryRead request.",
}
nodeInfoMap[12167] = nodeInfo{
displayName: "MaxNodesPerHistoryUpdateData",
class: NodeClassVariable,
description: "The maximum number of operations in a single data HistoryUpdate request.",
}
nodeInfoMap[12168] = nodeInfo{
displayName: "MaxNodesPerHistoryUpdateEvents",
class: NodeClassVariable,
description: "The maximum number of operations in a single event HistoryUpdate request.",
}
nodeInfoMap[12169] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12170] = nodeInfo{
displayName: "ViewVersion",
class: NodeClassVariable,
description: "The version number of the view.",
}
nodeInfoMap[12171] = nodeInfo{
displayName: "ComplexNumberType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12172] = nodeInfo{
displayName: "DoubleComplexNumberType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12173] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12174] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12175] = nodeInfo{
displayName: "ComplexNumberType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12178] = nodeInfo{
displayName: "DoubleComplexNumberType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12181] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12182] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12183] = nodeInfo{
displayName: "ComplexNumberType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12186] = nodeInfo{
displayName: "DoubleComplexNumberType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12189] = nodeInfo{
displayName: "ServerOnNetwork",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12195] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12201] = nodeInfo{
displayName: "ServerOnNetwork",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12207] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12213] = nodeInfo{
displayName: "ServerOnNetwork",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12502] = nodeInfo{
displayName: "Context",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12503] = nodeInfo{
displayName: "Progress",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12522] = nodeInfo{
displayName: "TrustListType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12542] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12543] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12544] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12545] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12546] = nodeInfo{
displayName: "CloseAndUpdate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12547] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12548] = nodeInfo{
displayName: "AddCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12549] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12550] = nodeInfo{
displayName: "RemoveCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12551] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12552] = nodeInfo{
displayName: "TrustListMasks",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12553] = nodeInfo{
displayName: "EnumValues",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12554] = nodeInfo{
displayName: "TrustListDataType",
class: NodeClassDataType,
description: "",
}
nodeInfoMap[12555] = nodeInfo{
displayName: "CertificateGroupType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12556] = nodeInfo{
displayName: "CertificateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12557] = nodeInfo{
displayName: "ApplicationCertificateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12558] = nodeInfo{
displayName: "HttpsCertificateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12559] = nodeInfo{
displayName: "RsaMinApplicationCertificateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12560] = nodeInfo{
displayName: "RsaSha256ApplicationCertificateType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12561] = nodeInfo{
displayName: "TrustListUpdatedAuditEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12581] = nodeInfo{
displayName: "ServerConfigurationType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12583] = nodeInfo{
displayName: "SupportedPrivateKeyFormats",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12584] = nodeInfo{
displayName: "MaxTrustListSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12585] = nodeInfo{
displayName: "MulticastDnsEnabled",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12616] = nodeInfo{
displayName: "UpdateCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12617] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12618] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12620] = nodeInfo{
displayName: "CertificateUpdatedAuditEventType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[12637] = nodeInfo{
displayName: "ServerConfiguration",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12639] = nodeInfo{
displayName: "SupportedPrivateKeyFormats",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12640] = nodeInfo{
displayName: "MaxTrustListSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12641] = nodeInfo{
displayName: "MulticastDnsEnabled",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12642] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12643] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[12646] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[12647] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12648] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12649] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12650] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12651] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12652] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12653] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12654] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12655] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12656] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12657] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12658] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12659] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12660] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12661] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12662] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12663] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12664] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12665] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12666] = nodeInfo{
displayName: "CloseAndUpdate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12667] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12668] = nodeInfo{
displayName: "AddCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12669] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12670] = nodeInfo{
displayName: "RemoveCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12671] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12676] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12677] = nodeInfo{
displayName: "TrustListDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12680] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12681] = nodeInfo{
displayName: "TrustListDataType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12686] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[12687] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[12690] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[12691] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[12694] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[12695] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[12705] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12708] = nodeInfo{
displayName: "ServerCapabilities",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12710] = nodeInfo{
displayName: "ServerCapabilities",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12712] = nodeInfo{
displayName: "RelativePathElement",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12715] = nodeInfo{
displayName: "RelativePath",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12718] = nodeInfo{
displayName: "RelativePathElement",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12721] = nodeInfo{
displayName: "RelativePath",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12731] = nodeInfo{
displayName: "CreateSigningRequest",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12732] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12733] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12734] = nodeInfo{
displayName: "ApplyChanges",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12737] = nodeInfo{
displayName: "CreateSigningRequest",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12738] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12739] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12740] = nodeInfo{
displayName: "ApplyChanges",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12745] = nodeInfo{
displayName: "OptionSetValues",
class: NodeClassVariable,
description: "Contains the human-readable representation for each bit of the bit mask.",
}
nodeInfoMap[12746] = nodeInfo{
displayName: "SetSubscriptionDurable",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12747] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12748] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12749] = nodeInfo{
displayName: "SetSubscriptionDurable",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12750] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12751] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12755] = nodeInfo{
displayName: "OptionSet",
class: NodeClassDataType,
description: "This abstract Structured DataType is the base DataType for all DataTypes representing a bit mask.",
}
nodeInfoMap[12756] = nodeInfo{
displayName: "Union",
class: NodeClassDataType,
description: "This abstract DataType is the base DataType for all union DataTypes.",
}
nodeInfoMap[12757] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12758] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12759] = nodeInfo{
displayName: "OptionSet",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12762] = nodeInfo{
displayName: "Union",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12765] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12766] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12767] = nodeInfo{
displayName: "OptionSet",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12770] = nodeInfo{
displayName: "Union",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12775] = nodeInfo{
displayName: "GetRejectedList",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12776] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12777] = nodeInfo{
displayName: "GetRejectedList",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12778] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12779] = nodeInfo{
displayName: "SamplingIntervalDiagnostics",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12780] = nodeInfo{
displayName: "SamplingInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12781] = nodeInfo{
displayName: "SampledMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12782] = nodeInfo{
displayName: "MaxSampledMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12783] = nodeInfo{
displayName: "DisabledMonitoredItemsSamplingCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12784] = nodeInfo{
displayName: "SubscriptionDiagnostics",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12785] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12786] = nodeInfo{
displayName: "SubscriptionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12787] = nodeInfo{
displayName: "Priority",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12788] = nodeInfo{
displayName: "PublishingInterval",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12789] = nodeInfo{
displayName: "MaxKeepAliveCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12790] = nodeInfo{
displayName: "MaxLifetimeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12791] = nodeInfo{
displayName: "MaxNotificationsPerPublish",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12792] = nodeInfo{
displayName: "PublishingEnabled",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12793] = nodeInfo{
displayName: "ModifyCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12794] = nodeInfo{
displayName: "EnableCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12795] = nodeInfo{
displayName: "DisableCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12796] = nodeInfo{
displayName: "RepublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12797] = nodeInfo{
displayName: "RepublishMessageRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12798] = nodeInfo{
displayName: "RepublishMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12799] = nodeInfo{
displayName: "TransferRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12800] = nodeInfo{
displayName: "TransferredToAltClientCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12801] = nodeInfo{
displayName: "TransferredToSameClientCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12802] = nodeInfo{
displayName: "PublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12803] = nodeInfo{
displayName: "DataChangeNotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12804] = nodeInfo{
displayName: "EventNotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12805] = nodeInfo{
displayName: "NotificationsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12806] = nodeInfo{
displayName: "LatePublishRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12807] = nodeInfo{
displayName: "CurrentKeepAliveCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12808] = nodeInfo{
displayName: "CurrentLifetimeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12809] = nodeInfo{
displayName: "UnacknowledgedMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12810] = nodeInfo{
displayName: "DiscardedMessageCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12811] = nodeInfo{
displayName: "MonitoredItemCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12812] = nodeInfo{
displayName: "DisabledMonitoredItemCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12813] = nodeInfo{
displayName: "MonitoringQueueOverflowCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12814] = nodeInfo{
displayName: "NextSequenceNumber",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12815] = nodeInfo{
displayName: "EventQueueOverFlowCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12816] = nodeInfo{
displayName: "SessionDiagnostics",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12817] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12818] = nodeInfo{
displayName: "SessionName",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12819] = nodeInfo{
displayName: "ClientDescription",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12820] = nodeInfo{
displayName: "ServerUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12821] = nodeInfo{
displayName: "EndpointUrl",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12822] = nodeInfo{
displayName: "LocaleIds",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12823] = nodeInfo{
displayName: "ActualSessionTimeout",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12824] = nodeInfo{
displayName: "MaxResponseMessageSize",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12825] = nodeInfo{
displayName: "ClientConnectionTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12826] = nodeInfo{
displayName: "ClientLastContactTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12827] = nodeInfo{
displayName: "CurrentSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12828] = nodeInfo{
displayName: "CurrentMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12829] = nodeInfo{
displayName: "CurrentPublishRequestsInQueue",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12830] = nodeInfo{
displayName: "TotalRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12831] = nodeInfo{
displayName: "UnauthorizedRequestCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12832] = nodeInfo{
displayName: "ReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12833] = nodeInfo{
displayName: "HistoryReadCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12834] = nodeInfo{
displayName: "WriteCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12835] = nodeInfo{
displayName: "HistoryUpdateCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12836] = nodeInfo{
displayName: "CallCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12837] = nodeInfo{
displayName: "CreateMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12838] = nodeInfo{
displayName: "ModifyMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12839] = nodeInfo{
displayName: "SetMonitoringModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12840] = nodeInfo{
displayName: "SetTriggeringCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12841] = nodeInfo{
displayName: "DeleteMonitoredItemsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12842] = nodeInfo{
displayName: "CreateSubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12843] = nodeInfo{
displayName: "ModifySubscriptionCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12844] = nodeInfo{
displayName: "SetPublishingModeCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12845] = nodeInfo{
displayName: "PublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12846] = nodeInfo{
displayName: "RepublishCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12847] = nodeInfo{
displayName: "TransferSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12848] = nodeInfo{
displayName: "DeleteSubscriptionsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12849] = nodeInfo{
displayName: "AddNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12850] = nodeInfo{
displayName: "AddReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12851] = nodeInfo{
displayName: "DeleteNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12852] = nodeInfo{
displayName: "DeleteReferencesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12853] = nodeInfo{
displayName: "BrowseCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12854] = nodeInfo{
displayName: "BrowseNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12855] = nodeInfo{
displayName: "TranslateBrowsePathsToNodeIdsCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12856] = nodeInfo{
displayName: "QueryFirstCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12857] = nodeInfo{
displayName: "QueryNextCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12858] = nodeInfo{
displayName: "RegisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12859] = nodeInfo{
displayName: "UnregisterNodesCount",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12860] = nodeInfo{
displayName: "SessionSecurityDiagnostics",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12861] = nodeInfo{
displayName: "SessionId",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12862] = nodeInfo{
displayName: "ClientUserIdOfSession",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12863] = nodeInfo{
displayName: "ClientUserIdHistory",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12864] = nodeInfo{
displayName: "AuthenticationMechanism",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12865] = nodeInfo{
displayName: "Encoding",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12866] = nodeInfo{
displayName: "TransportProtocol",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12867] = nodeInfo{
displayName: "SecurityMode",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12868] = nodeInfo{
displayName: "SecurityPolicyUri",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12869] = nodeInfo{
displayName: "ClientCertificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12871] = nodeInfo{
displayName: "ResendData",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12872] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12873] = nodeInfo{
displayName: "ResendData",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12874] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12877] = nodeInfo{
displayName: "NormalizedString",
class: NodeClassDataType,
description: "A string normalized based on the rules in the unicode specification.",
}
nodeInfoMap[12878] = nodeInfo{
displayName: "DecimalString",
class: NodeClassDataType,
description: "An arbitraty numeric value.",
}
nodeInfoMap[12879] = nodeInfo{
displayName: "DurationString",
class: NodeClassDataType,
description: "A period of time formatted as defined in ISO 8601-2000.",
}
nodeInfoMap[12880] = nodeInfo{
displayName: "TimeString",
class: NodeClassDataType,
description: "A time formatted as defined in ISO 8601-2000.",
}
nodeInfoMap[12881] = nodeInfo{
displayName: "DateString",
class: NodeClassDataType,
description: "A date formatted as defined in ISO 8601-2000.",
}
nodeInfoMap[12882] = nodeInfo{
displayName: "EstimatedReturnTime",
class: NodeClassVariable,
description: "Indicates the time at which the Server is expected to be available in the state RUNNING.",
}
nodeInfoMap[12883] = nodeInfo{
displayName: "RequestServerStateChange",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12884] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12885] = nodeInfo{
displayName: "EstimatedReturnTime",
class: NodeClassVariable,
description: "Indicates the time at which the Server is expected to be available in the state RUNNING.",
}
nodeInfoMap[12886] = nodeInfo{
displayName: "RequestServerStateChange",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12887] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12890] = nodeInfo{
displayName: "DiscoveryConfiguration",
class: NodeClassDataType,
description: "A base type for discovery configuration information.",
}
nodeInfoMap[12891] = nodeInfo{
displayName: "MdnsDiscoveryConfiguration",
class: NodeClassDataType,
description: "The discovery information needed for mDNS registration.",
}
nodeInfoMap[12892] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12893] = nodeInfo{
displayName: "Default XML",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12894] = nodeInfo{
displayName: "DiscoveryConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12897] = nodeInfo{
displayName: "MdnsDiscoveryConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12900] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12901] = nodeInfo{
displayName: "Default Binary",
class: NodeClassObject,
description: "",
}
nodeInfoMap[12902] = nodeInfo{
displayName: "DiscoveryConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12905] = nodeInfo{
displayName: "MdnsDiscoveryConfiguration",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[12908] = nodeInfo{
displayName: "MaxByteStringLength",
class: NodeClassVariable,
description: "The maximum length for a byte string that can be stored in the owning variable.",
}
nodeInfoMap[12910] = nodeInfo{
displayName: "MaxByteStringLength",
class: NodeClassVariable,
description: "The maximum length for a byte string value supported by the server.",
}
nodeInfoMap[12911] = nodeInfo{
displayName: "MaxByteStringLength",
class: NodeClassVariable,
description: "The maximum length for a byte string value supported by the server.",
}
nodeInfoMap[12912] = nodeInfo{
displayName: "ConditionRefresh2",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[12913] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13225] = nodeInfo{
displayName: "CertificateExpirationAlarmType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[13325] = nodeInfo{
displayName: "ExpirationDate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13326] = nodeInfo{
displayName: "CertificateType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13327] = nodeInfo{
displayName: "Certificate",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13341] = nodeInfo{
displayName: "MimeType",
class: NodeClassVariable,
description: "The content of the file.",
}
nodeInfoMap[13353] = nodeInfo{
displayName: "FileDirectoryType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[13354] = nodeInfo{
displayName: "<FileDirectoryName>",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13355] = nodeInfo{
displayName: "CreateDirectory",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13356] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13357] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13358] = nodeInfo{
displayName: "CreateFile",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13359] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13360] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13361] = nodeInfo{
displayName: "Delete",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13362] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13363] = nodeInfo{
displayName: "MoveOrCopy",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13364] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13365] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13366] = nodeInfo{
displayName: "<FileName>",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13367] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13368] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13369] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13370] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13372] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13373] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13374] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13375] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13376] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13377] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13378] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13379] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13380] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13381] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13382] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13383] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13384] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13385] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13386] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13387] = nodeInfo{
displayName: "CreateDirectory",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13388] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13389] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13390] = nodeInfo{
displayName: "CreateFile",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13391] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13392] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13393] = nodeInfo{
displayName: "Delete",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13394] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13395] = nodeInfo{
displayName: "MoveOrCopy",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13396] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13397] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13599] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13600] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13601] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13602] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13603] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13605] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13606] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13607] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13608] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13609] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13610] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13611] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13612] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13613] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13614] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13615] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13616] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13617] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13618] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13619] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13620] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13621] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13622] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13623] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13631] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13735] = nodeInfo{
displayName: "CertificateGroup",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13736] = nodeInfo{
displayName: "CertificateType",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13737] = nodeInfo{
displayName: "UpdateCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13738] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13739] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13813] = nodeInfo{
displayName: "CertificateGroupFolderType",
class: NodeClassObjectType,
description: "",
}
nodeInfoMap[13814] = nodeInfo{
displayName: "DefaultApplicationGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13815] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13816] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13817] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13818] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13819] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13821] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13822] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13823] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13824] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13825] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13826] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13827] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13828] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13829] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13830] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13831] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13832] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13833] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13834] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13835] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13836] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13837] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13838] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13839] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13847] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13848] = nodeInfo{
displayName: "DefaultHttpsGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13849] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13850] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13851] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13852] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13853] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13855] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13856] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13857] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13858] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13859] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13860] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13861] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13862] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13863] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13864] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13865] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13866] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13867] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13868] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13869] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13870] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13871] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13872] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13873] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13881] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13882] = nodeInfo{
displayName: "DefaultUserTokenGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13883] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13884] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13885] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13886] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13887] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13889] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13890] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13891] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13892] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13893] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13894] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13895] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13896] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13897] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13898] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13899] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13900] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13901] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13902] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13903] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13904] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13905] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13906] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13907] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13915] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13916] = nodeInfo{
displayName: "<AdditionalGroup>",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13917] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13918] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13919] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13920] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13921] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13923] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13924] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13925] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13926] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13927] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13928] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13929] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13930] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13931] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13932] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13933] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13934] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13935] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13936] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13937] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13938] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13939] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13940] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13941] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13949] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13950] = nodeInfo{
displayName: "CertificateGroups",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13951] = nodeInfo{
displayName: "DefaultApplicationGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13952] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[13953] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[13954] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[13955] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[13956] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[13958] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13959] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13960] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13961] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13962] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13963] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13964] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13965] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13966] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13967] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13968] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13969] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13970] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13971] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13972] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13973] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13974] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[13975] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13976] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[13984] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14053] = nodeInfo{
displayName: "CertificateGroups",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14088] = nodeInfo{
displayName: "DefaultHttpsGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14089] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14090] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[14091] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[14092] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[14093] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[14095] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14096] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14097] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14098] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14099] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14100] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14101] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14102] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14103] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14104] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14105] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14106] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14107] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14108] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14109] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14110] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14111] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14112] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14113] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14114] = nodeInfo{
displayName: "CloseAndUpdate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14115] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14116] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14117] = nodeInfo{
displayName: "AddCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14118] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14119] = nodeInfo{
displayName: "RemoveCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14120] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14121] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14122] = nodeInfo{
displayName: "DefaultUserTokenGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14123] = nodeInfo{
displayName: "TrustList",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14124] = nodeInfo{
displayName: "Size",
class: NodeClassVariable,
description: "The size of the file in bytes.",
}
nodeInfoMap[14125] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[14126] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[14127] = nodeInfo{
displayName: "OpenCount",
class: NodeClassVariable,
description: "The current number of open file handles.",
}
nodeInfoMap[14129] = nodeInfo{
displayName: "Open",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14130] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14131] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14132] = nodeInfo{
displayName: "Close",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14133] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14134] = nodeInfo{
displayName: "Read",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14135] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14136] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14137] = nodeInfo{
displayName: "Write",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14138] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14139] = nodeInfo{
displayName: "GetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14140] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14141] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14142] = nodeInfo{
displayName: "SetPosition",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14143] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14144] = nodeInfo{
displayName: "LastUpdateTime",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14145] = nodeInfo{
displayName: "OpenWithMasks",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14146] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14147] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14148] = nodeInfo{
displayName: "CloseAndUpdate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14149] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14150] = nodeInfo{
displayName: "OutputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14151] = nodeInfo{
displayName: "AddCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14152] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14153] = nodeInfo{
displayName: "RemoveCertificate",
class: NodeClassMethod,
description: "",
}
nodeInfoMap[14154] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14155] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14156] = nodeInfo{
displayName: "DefaultApplicationGroup",
class: NodeClassObject,
description: "",
}
nodeInfoMap[14157] = nodeInfo{
displayName: "Writable",
class: NodeClassVariable,
description: "Whether the file is writable.",
}
nodeInfoMap[14158] = nodeInfo{
displayName: "UserWritable",
class: NodeClassVariable,
description: "Whether the file is writable by the current user.",
}
nodeInfoMap[14160] = nodeInfo{
displayName: "InputArguments",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14161] = nodeInfo{
displayName: "CertificateTypes",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14415] = nodeInfo{
displayName: "ServerNetworkGroups",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[14900] = nodeInfo{
displayName: "ExpirationLimit",
class: NodeClassVariable,
description: "",
}
nodeInfoMap[15182] = nodeInfo{
displayName: "http://opcfoundation.org/UA/",
class: NodeClassObject,
description: "",
}
nodeInfoMap[15183] = nodeInfo{
displayName: "NamespaceUri",
class: NodeClassVariable,
description: "The URI of the namespace.",
}
nodeInfoMap[15184] = nodeInfo{
displayName: "NamespaceVersion",
class: NodeClassVariable,
description: "The human readable string representing version of the namespace.",
}
nodeInfoMap[15185] = nodeInfo{
displayName: "NamespacePublicationDate",
class: NodeClassVariable,
description: "The publication date for the namespace.",
}
nodeInfoMap[15186] = nodeInfo{
displayName: "IsNamespaceSubset",
class: NodeClassVariable,
description: "If TRUE then the server only supports a subset of the namespace.",
}
nodeInfoMap[15187] = nodeInfo{
displayName: "StaticNodeIdTypes",
class: NodeClassVariable,
description: "A list of IdTypes for nodes which are the same in every server that exposes them.",
}
nodeInfoMap[15188] = nodeInfo{
displayName: "StaticNumericNodeIdRange",
class: NodeClassVariable,
description: "A list of ranges for numeric node ids which are the same in every server that exposes them.",
}
nodeInfoMap[15189] = nodeInfo{
displayName: "StaticStringNodeIdPattern",
class: NodeClassVariable,
description: "A regular expression which matches string node ids are the same in every server that exposes them.",
}
} | stack/uatype/node_id_info_auto.go | 0.639624 | 0.667256 | node_id_info_auto.go | starcoder |
package gobacktest
import (
"math"
"time"
// "github.com/shopspring/decimal"
)
// Position represents the holdings position
type Position struct {
timestamp time.Time
symbol string
qty int64 // current qty of the position, positive on BOT position, negativ on SLD position
qtyBOT int64 // how many BOT
qtySLD int64 // how many SLD
avgPrice float64 // average price without cost
avgPriceNet float64 // average price including cost
avgPriceBOT float64 // average price BOT, without cost
avgPriceSLD float64 // average price SLD, without cost
value float64 // qty * price
valueBOT float64 // qty BOT * price
valueSLD float64 // qty SLD * price
netValue float64 // current value - cost
netValueBOT float64 // current BOT value + cost
netValueSLD float64 // current SLD value - cost
marketPrice float64 // last known market price
marketValue float64 // qty * price
commission float64
exchangeFee float64
cost float64 // commission + fees
costBasis float64 // absolute qty * avgPriceNet
realProfitLoss float64
unrealProfitLoss float64
totalProfitLoss float64
}
func (p *Position) Qty() int64 {
return p.qty
}
func (p *Position) MarketValue() float64 {
return p.marketValue
}
// Create a new position based on a fill event
func (p *Position) Create(fill FillEvent) {
p.timestamp = fill.Time()
p.symbol = fill.Symbol()
p.update(fill)
}
// Update a position on a new fill event
func (p *Position) Update(fill FillEvent) {
p.timestamp = fill.Time()
p.update(fill)
}
// UpdateValue updates the current market value of a position
func (p *Position) UpdateValue(data DataEvent) {
p.timestamp = data.Time()
latest := data.Price()
p.updateValue(latest)
}
// internal function to update a position on a new fill event
func (p *Position) update(fill FillEvent) {
// convert fill to internally used decimal numbers
fillQty := float64(fill.Qty())
fillPrice := fill.Price()
fillCommission := fill.Commission()
fillExchangeFee := fill.ExchangeFee()
fillCost := fill.Cost()
fillNetValue := fill.NetValue()
// convert position to internally used decimal numbers
qty := float64(p.qty)
qtyBot := float64(p.qtyBOT)
qtySld := float64(p.qtySLD)
avgPrice := p.avgPrice
avgPriceNet := p.avgPriceNet
avgPriceBot := p.avgPriceBOT
avgPriceSld := p.avgPriceSLD
value := p.value
valueBot := p.valueBOT
valueSld := p.valueSLD
netValue := p.netValue
netValueBot := p.netValueBOT
netValueSld := p.netValueSLD
commission := p.commission
exchangeFee := p.exchangeFee
cost := p.cost
costBasis := p.costBasis
realProfitLoss := p.realProfitLoss
switch fill.Direction() {
case BOT:
if p.qty >= 0 { // position is long, adding to position
costBasis += fillNetValue
} else { // position is short, closing partially out
// costBasis + abs(fillQty) / qty * costBasis
costBasis += math.Abs(fillQty) / qty * costBasis
// realProfitLoss + fillQty * (avgPriceNet - fillPrice) - fillCost
realProfitLoss += fillQty*(avgPriceNet-fillPrice) - fillCost
}
// update average price for bought stock without cost
// ( (abs(qty) * avgPrice) + (fillQty * fillPrice) ) / (abs(qty) + fillQty)
avgPrice = ((math.Abs(qty) * avgPrice) + (fillQty * fillPrice)) / (math.Abs(qty) + fillQty)
// (abs(qty) * avgPriceNet + fillNetValue) / (abs(qty) * fillQty)
avgPriceNet = (math.Abs(qty)*avgPriceNet + fillNetValue) / (math.Abs(qty) + fillQty)
// ( (qty + avgPriceBot) + (fillQty * fillPrice) ) / fillQty
avgPriceBot = ((qtyBot * avgPriceBot) + (fillQty * fillPrice)) / (qtyBot + fillQty)
// update position qty
qty += fillQty
qtyBot += fillQty
// update bought value
valueBot = qtyBot * avgPriceBot
netValueBot += fillNetValue
case SLD:
if p.qty > 0 { // position is long, closing partially out
costBasis -= math.Abs(fillQty) / qty * costBasis
// realProfitLoss + fillQty * (fillPrice - avgPriceNet) - fillCost
realProfitLoss += math.Abs(fillQty)*(fillPrice-avgPriceNet) - fillCost
} else { // position is short, adding to position
costBasis -= fillNetValue
}
// update average price for bought stock without cost
// ( (abs(qty) * avgPrice) + (fillQty * fillPrice) ) / (abs(qty) + fillQty)
avgPrice = (math.Abs(qty)*avgPrice + fillQty*fillPrice) / (math.Abs(qty) + fillQty)
// (abs(qty) * avgPriceNet + fillNetValue) / (abs(qty) * fillQty)
avgPriceNet = (math.Abs(qty)*avgPriceNet + fillNetValue) / (math.Abs(qty) + fillQty)
// avgPriceSld + (fillQty * fillPrice) / fillQty
avgPriceSld = (qtySld*avgPriceSld + fillQty*fillPrice) / (qtySld + fillQty)
// update position qty
qty -= fillQty
qtySld += fillQty
// update sold value
valueSld = qtySld * avgPriceSld
netValueSld += fillNetValue
}
commission += fillCommission
exchangeFee += fillExchangeFee
cost += fillCost
value = valueSld - valueBot
netValue = value - cost
// convert from internal decimal to float
p.qty = int64(qty)
p.qtyBOT = int64(qtyBot)
p.qtySLD = int64(qtySld)
p.avgPrice = math.Round(avgPrice*math.Pow10(DP)) / math.Pow10(DP)
p.avgPriceBOT = math.Round(avgPriceBot*math.Pow10(DP)) / math.Pow10(DP)
p.avgPriceSLD = math.Round(avgPriceSld*math.Pow10(DP)) / math.Pow10(DP)
p.avgPriceNet = math.Round(avgPriceNet*math.Pow10(DP)) / math.Pow10(DP)
p.value = math.Round(value*math.Pow10(DP)) / math.Pow10(DP)
p.valueBOT = math.Round(valueBot*math.Pow10(DP)) / math.Pow10(DP)
p.valueSLD = math.Round(valueSld*math.Pow10(DP)) / math.Pow10(DP)
p.netValue = math.Round(netValue*math.Pow10(DP)) / math.Pow10(DP)
p.netValueBOT = math.Round(netValueBot*math.Pow10(DP)) / math.Pow10(DP)
p.netValueSLD = math.Round(netValueSld*math.Pow10(DP)) / math.Pow10(DP)
p.commission = commission
p.exchangeFee = exchangeFee
p.cost = cost
p.costBasis = math.Round(costBasis*math.Pow10(DP)) / math.Pow10(DP)
p.realProfitLoss = math.Round(realProfitLoss*math.Pow10(DP)) / math.Pow10(DP)
p.updateValue(fill.Price())
}
// internal function to updates the current market value and profit/loss of a position
func (p *Position) updateValue(l float64) {
// convert to internally used decimal numbers
latest := l
qty := float64(p.qty)
costBasis := p.costBasis
// update market value
marketPrice := latest
p.marketPrice = marketPrice
// abs(qty) * current
marketValue := qty * latest
p.marketValue = marketValue
// qty * current - costBasis
unrealProfitLoss := qty*latest - costBasis
p.unrealProfitLoss = math.Round(unrealProfitLoss*math.Pow10(DP)) / math.Pow10(DP)
realProfitLoss := p.realProfitLoss
totalProfitLoss := realProfitLoss + unrealProfitLoss
p.totalProfitLoss = math.Round(totalProfitLoss*math.Pow10(DP)) / math.Pow10(DP)
} | position.go | 0.643777 | 0.51562 | position.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// AccessPackageApprovalStage
type AccessPackageApprovalStage struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// The number of days that a request can be pending a response before it is automatically denied.
durationBeforeAutomaticDenial *i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration
// If escalation is required, the time a request can be pending a response from a primary approver.
durationBeforeEscalation *i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration
// If escalation is enabled and the primary approvers do not respond before the escalation time, the escalationApprovers are the users who will be asked to approve requests.
escalationApprovers []SubjectSetable
// The subjects, typically users, who are the fallback escalation approvers.
fallbackEscalationApprovers []SubjectSetable
// The subjects, typically users, who are the fallback primary approvers.
fallbackPrimaryApprovers []SubjectSetable
// Indicates whether the approver is required to provide a justification for approving a request.
isApproverJustificationRequired *bool
// If true, then one or more escalationApprovers are configured in this approval stage.
isEscalationEnabled *bool
// The subjects, typically users, who will be asked to approve requests. A collection of singleUser, groupMembers, requestorManager, internalSponsors or externalSponsors.
primaryApprovers []SubjectSetable
}
// NewAccessPackageApprovalStage instantiates a new accessPackageApprovalStage and sets the default values.
func NewAccessPackageApprovalStage()(*AccessPackageApprovalStage) {
m := &AccessPackageApprovalStage{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateAccessPackageApprovalStageFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAccessPackageApprovalStageFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewAccessPackageApprovalStage(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AccessPackageApprovalStage) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetDurationBeforeAutomaticDenial gets the durationBeforeAutomaticDenial property value. The number of days that a request can be pending a response before it is automatically denied.
func (m *AccessPackageApprovalStage) GetDurationBeforeAutomaticDenial()(*i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration) {
if m == nil {
return nil
} else {
return m.durationBeforeAutomaticDenial
}
}
// GetDurationBeforeEscalation gets the durationBeforeEscalation property value. If escalation is required, the time a request can be pending a response from a primary approver.
func (m *AccessPackageApprovalStage) GetDurationBeforeEscalation()(*i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration) {
if m == nil {
return nil
} else {
return m.durationBeforeEscalation
}
}
// GetEscalationApprovers gets the escalationApprovers property value. If escalation is enabled and the primary approvers do not respond before the escalation time, the escalationApprovers are the users who will be asked to approve requests.
func (m *AccessPackageApprovalStage) GetEscalationApprovers()([]SubjectSetable) {
if m == nil {
return nil
} else {
return m.escalationApprovers
}
}
// GetFallbackEscalationApprovers gets the fallbackEscalationApprovers property value. The subjects, typically users, who are the fallback escalation approvers.
func (m *AccessPackageApprovalStage) GetFallbackEscalationApprovers()([]SubjectSetable) {
if m == nil {
return nil
} else {
return m.fallbackEscalationApprovers
}
}
// GetFallbackPrimaryApprovers gets the fallbackPrimaryApprovers property value. The subjects, typically users, who are the fallback primary approvers.
func (m *AccessPackageApprovalStage) GetFallbackPrimaryApprovers()([]SubjectSetable) {
if m == nil {
return nil
} else {
return m.fallbackPrimaryApprovers
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AccessPackageApprovalStage) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["durationBeforeAutomaticDenial"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetISODurationValue()
if err != nil {
return err
}
if val != nil {
m.SetDurationBeforeAutomaticDenial(val)
}
return nil
}
res["durationBeforeEscalation"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetISODurationValue()
if err != nil {
return err
}
if val != nil {
m.SetDurationBeforeEscalation(val)
}
return nil
}
res["escalationApprovers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateSubjectSetFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]SubjectSetable, len(val))
for i, v := range val {
res[i] = v.(SubjectSetable)
}
m.SetEscalationApprovers(res)
}
return nil
}
res["fallbackEscalationApprovers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateSubjectSetFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]SubjectSetable, len(val))
for i, v := range val {
res[i] = v.(SubjectSetable)
}
m.SetFallbackEscalationApprovers(res)
}
return nil
}
res["fallbackPrimaryApprovers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateSubjectSetFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]SubjectSetable, len(val))
for i, v := range val {
res[i] = v.(SubjectSetable)
}
m.SetFallbackPrimaryApprovers(res)
}
return nil
}
res["isApproverJustificationRequired"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetIsApproverJustificationRequired(val)
}
return nil
}
res["isEscalationEnabled"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetIsEscalationEnabled(val)
}
return nil
}
res["primaryApprovers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateSubjectSetFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]SubjectSetable, len(val))
for i, v := range val {
res[i] = v.(SubjectSetable)
}
m.SetPrimaryApprovers(res)
}
return nil
}
return res
}
// GetIsApproverJustificationRequired gets the isApproverJustificationRequired property value. Indicates whether the approver is required to provide a justification for approving a request.
func (m *AccessPackageApprovalStage) GetIsApproverJustificationRequired()(*bool) {
if m == nil {
return nil
} else {
return m.isApproverJustificationRequired
}
}
// GetIsEscalationEnabled gets the isEscalationEnabled property value. If true, then one or more escalationApprovers are configured in this approval stage.
func (m *AccessPackageApprovalStage) GetIsEscalationEnabled()(*bool) {
if m == nil {
return nil
} else {
return m.isEscalationEnabled
}
}
// GetPrimaryApprovers gets the primaryApprovers property value. The subjects, typically users, who will be asked to approve requests. A collection of singleUser, groupMembers, requestorManager, internalSponsors or externalSponsors.
func (m *AccessPackageApprovalStage) GetPrimaryApprovers()([]SubjectSetable) {
if m == nil {
return nil
} else {
return m.primaryApprovers
}
}
// Serialize serializes information the current object
func (m *AccessPackageApprovalStage) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteISODurationValue("durationBeforeAutomaticDenial", m.GetDurationBeforeAutomaticDenial())
if err != nil {
return err
}
}
{
err := writer.WriteISODurationValue("durationBeforeEscalation", m.GetDurationBeforeEscalation())
if err != nil {
return err
}
}
if m.GetEscalationApprovers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetEscalationApprovers()))
for i, v := range m.GetEscalationApprovers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("escalationApprovers", cast)
if err != nil {
return err
}
}
if m.GetFallbackEscalationApprovers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFallbackEscalationApprovers()))
for i, v := range m.GetFallbackEscalationApprovers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("fallbackEscalationApprovers", cast)
if err != nil {
return err
}
}
if m.GetFallbackPrimaryApprovers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFallbackPrimaryApprovers()))
for i, v := range m.GetFallbackPrimaryApprovers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("fallbackPrimaryApprovers", cast)
if err != nil {
return err
}
}
{
err := writer.WriteBoolValue("isApproverJustificationRequired", m.GetIsApproverJustificationRequired())
if err != nil {
return err
}
}
{
err := writer.WriteBoolValue("isEscalationEnabled", m.GetIsEscalationEnabled())
if err != nil {
return err
}
}
if m.GetPrimaryApprovers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPrimaryApprovers()))
for i, v := range m.GetPrimaryApprovers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("primaryApprovers", cast)
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AccessPackageApprovalStage) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetDurationBeforeAutomaticDenial sets the durationBeforeAutomaticDenial property value. The number of days that a request can be pending a response before it is automatically denied.
func (m *AccessPackageApprovalStage) SetDurationBeforeAutomaticDenial(value *i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration)() {
if m != nil {
m.durationBeforeAutomaticDenial = value
}
}
// SetDurationBeforeEscalation sets the durationBeforeEscalation property value. If escalation is required, the time a request can be pending a response from a primary approver.
func (m *AccessPackageApprovalStage) SetDurationBeforeEscalation(value *i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ISODuration)() {
if m != nil {
m.durationBeforeEscalation = value
}
}
// SetEscalationApprovers sets the escalationApprovers property value. If escalation is enabled and the primary approvers do not respond before the escalation time, the escalationApprovers are the users who will be asked to approve requests.
func (m *AccessPackageApprovalStage) SetEscalationApprovers(value []SubjectSetable)() {
if m != nil {
m.escalationApprovers = value
}
}
// SetFallbackEscalationApprovers sets the fallbackEscalationApprovers property value. The subjects, typically users, who are the fallback escalation approvers.
func (m *AccessPackageApprovalStage) SetFallbackEscalationApprovers(value []SubjectSetable)() {
if m != nil {
m.fallbackEscalationApprovers = value
}
}
// SetFallbackPrimaryApprovers sets the fallbackPrimaryApprovers property value. The subjects, typically users, who are the fallback primary approvers.
func (m *AccessPackageApprovalStage) SetFallbackPrimaryApprovers(value []SubjectSetable)() {
if m != nil {
m.fallbackPrimaryApprovers = value
}
}
// SetIsApproverJustificationRequired sets the isApproverJustificationRequired property value. Indicates whether the approver is required to provide a justification for approving a request.
func (m *AccessPackageApprovalStage) SetIsApproverJustificationRequired(value *bool)() {
if m != nil {
m.isApproverJustificationRequired = value
}
}
// SetIsEscalationEnabled sets the isEscalationEnabled property value. If true, then one or more escalationApprovers are configured in this approval stage.
func (m *AccessPackageApprovalStage) SetIsEscalationEnabled(value *bool)() {
if m != nil {
m.isEscalationEnabled = value
}
}
// SetPrimaryApprovers sets the primaryApprovers property value. The subjects, typically users, who will be asked to approve requests. A collection of singleUser, groupMembers, requestorManager, internalSponsors or externalSponsors.
func (m *AccessPackageApprovalStage) SetPrimaryApprovers(value []SubjectSetable)() {
if m != nil {
m.primaryApprovers = value
}
} | models/access_package_approval_stage.go | 0.574514 | 0.404743 | access_package_approval_stage.go | starcoder |
package strings
import (
"bytes"
"strings"
"unicode"
)
// ToLower returns a copy of the string s with all Unicode letters mapped to their lower case.
func ToLower(s string) string { return strings.ToLower(s) }
// ToLowerFirst returns a copy of the string s with first Unicode letters mapped to their lower case.
func ToLowerFirst(s string) string {
if s == "" {
return s
}
if len(s) == 1 {
return strings.ToLower(s)
}
var b bytes.Buffer
for i, v := range s {
if i == 0 {
b.WriteString(strings.ToLower(string(v)))
} else {
b.WriteRune(v)
}
}
return b.String()
}
// ToLowerSpecial returns a copy of the string s with all Unicode letters mapped to their
// lower case, giving priority to the special casing rules.
func ToLowerSpecial(c unicode.SpecialCase, s string) string {
return strings.ToLowerSpecial(c, s)
}
// ToUpper returns a copy of the string s with all Unicode letters mapped to their upper case.
func ToUpper(s string) string { return strings.ToUpper(s) }
// ToUpperFirst returns a copy of the string s with first Unicode letters mapped to their upper case.
func ToUpperFirst(s string) string {
if s == "" {
return s
}
if len(s) == 1 {
return strings.ToUpper(s)
}
var b bytes.Buffer
for i, v := range s {
if i == 0 {
b.WriteString(strings.ToUpper(string(v)))
} else {
b.WriteRune(v)
}
}
return b.String()
}
// ToUpperSpecial returns a copy of the string s with all Unicode letters mapped to their
// upper case, giving priority to the special casing rules.
func ToUpperSpecial(c unicode.SpecialCase, s string) string {
return strings.ToUpperSpecial(c, s)
}
// Title returns a copy of the string s with all Unicode letters that begin words
// mapped to their title case.
func Title(s string) string { return strings.Title(s) }
// ToTitle returns a copy of the string s with all Unicode letters mapped to their title case.
func ToTitle(s string) string { return strings.ToTitle(s) }
// ToTitleSpecial returns a copy of the string s with all Unicode letters mapped to their
// title case, giving priority to the special casing rules.
func ToTitleSpecial(c unicode.SpecialCase, s string) string {
return strings.ToTitleSpecial(c, s)
}
// Camel returns a copy of the string s with all Unicode letters that begin words
// mapped to their camel case.
func Camel(s string) string {
prev := true
result := Map(
func(r rune) rune {
if isSeparator(r) {
prev = true
return '_'
} else {
if prev {
prev = false
return unicode.ToTitle(r)
}
}
prev = false
return unicode.ToLower(r)
},
s)
return Remove(result, "_")
}
// UnCamel returns a copy of the string s with all Unicode letters that begin words
// mapped to their uncamel case.
func UnCamel(s, sep string) string {
var b bytes.Buffer
for i, r := range strings.TrimSpace(s) {
if unicode.IsUpper(r) {
if i > 0 {
b.WriteString(sep)
}
b.WriteRune(unicode.ToLower(r))
} else {
b.WriteRune(r)
}
i++
}
return b.String()
}
// isSeparator reports whether the rune could mark a word boundary.
// TODO: update when package unicode captures more of the properties.
func isSeparator(r rune) bool {
// ASCII alphanumerics and underscore are not separators
if r <= 0x7F {
switch {
case '0' <= r && r <= '9':
return false
case 'a' <= r && r <= 'z':
return false
case 'A' <= r && r <= 'Z':
return false
case r == '_':
return true
case r == '-':
return true
}
return true
}
// Letters and digits are not separators
if unicode.IsLetter(r) || unicode.IsDigit(r) {
return false
}
// Otherwise, all we can do for now is treat spaces as separators.
return unicode.IsSpace(r)
} | vendor/gopkg.in/goyy/goyy.v0/util/strings/case.go | 0.656108 | 0.445409 | case.go | starcoder |
package primitive
import (
"math"
"math/rand"
)
type Vector struct {
X, Y, Z float64
}
var UnitVector = Vector{1, 1, 1}
func VectorInUnitSphere(rnd *rand.Rand) Vector {
for {
r := Vector{rnd.Float64(), rnd.Float64(), rnd.Float64()}
p := r.MultiplyScalar(2.0).Subtract(UnitVector)
if p.SquaredLength() >= 1.0 {
return p
}
}
}
func (v Vector) Length() float64 {
return math.Sqrt(v.X*v.X + v.Y*v.Y + v.Z*v.Z)
}
func (v Vector) SquaredLength() float64 {
return v.X*v.X + v.Y*v.Y + v.Z*v.Z
}
func (v Vector) Normalize() Vector {
return v.DivideScalar(v.Length())
}
func (v Vector) Dot(ov Vector) float64 {
return v.X*ov.X + v.Y*ov.Y + v.Z*ov.Z
}
func (v Vector) Cross(ov Vector) Vector {
return Vector{
v.Y*ov.Z - v.Z*ov.Y,
v.Z*ov.X - v.X*ov.Z,
v.X*ov.Y - v.Y*ov.X,
}
}
func (v Vector) Add(ov Vector) Vector {
return Vector{v.X + ov.X, v.Y + ov.Y, v.Z + ov.Z}
}
func (v Vector) Subtract(ov Vector) Vector {
return Vector{v.X - ov.X, v.Y - ov.Y, v.Z - ov.Z}
}
func (v Vector) Multiply(ov Vector) Vector {
return Vector{v.X * ov.X, v.Y * ov.Y, v.Z * ov.Z}
}
func (v Vector) Divide(ov Vector) Vector {
return Vector{v.X / ov.X, v.Y / ov.Y, v.Z / ov.Z}
}
func (v Vector) AddScalar(t float64) Vector {
return Vector{v.X + t, v.Y + t, v.Z + t}
}
func (v Vector) SubtractScalar(t float64) Vector {
return Vector{v.X - t, v.Y - t, v.Z - t}
}
func (v Vector) MultiplyScalar(t float64) Vector {
return Vector{v.X * t, v.Y * t, v.Z * t}
}
func (v Vector) DivideScalar(t float64) Vector {
return Vector{v.X / t, v.Y / t, v.Z / t}
}
func (v Vector) Reflect(ov Vector) Vector {
b := 2 * v.Dot(ov)
return v.Subtract(ov.MultiplyScalar(b))
}
func (v Vector) Refract(ov Vector, n float64) (bool, Vector) {
uv := v.Normalize()
uo := ov.Normalize()
dt := uv.Dot(uo)
discriminant := 1.0 - (n * n * (1 - dt*dt))
if discriminant > 0 {
a := uv.Subtract(ov.MultiplyScalar(dt)).MultiplyScalar(n)
b := ov.MultiplyScalar(math.Sqrt(discriminant))
return true, a.Subtract(b)
}
return false, Vector{}
} | primitive/vector.go | 0.864668 | 0.696513 | vector.go | starcoder |
package search
type JSONHeatmapFacetMap JSONFacetMap
func CreateJSONHeatmapFacetMap(fieldname string) *JSONHeatmapFacetMap {
jfm := CreateJSONFacetMap(fieldname)
return (*JSONHeatmapFacetMap)(jfm)
}
func (jfm *JSONHeatmapFacetMap) withSubFacet(string, *JSONFacetMap) *JSONHeatmapFacetMap {
panic("subfacets not supported in Heatmap")
}
/**
* Indicate the region to compute the heatmap facet on.
*
* Defaults to the "world" ("[-180,-90 TO 180,90]")
*/
func (jfm *JSONHeatmapFacetMap) SetRegionQuery(queryString string) *JSONHeatmapFacetMap {
(*jfm)["geom"] = queryString
return jfm
}
/**
* Indicates the size of each cell in the computed heatmap grid
*
* If not set, defaults to being computed by {@code distErrPct} or {@code distErr}
*
* @param individualCellSize the forced size of each cell in the heatmap grid
*
* @see #setDistErr(double)
* @see #setDistErrPct(double)
*/
func (jfm *JSONHeatmapFacetMap) SetGridLevel(individualCellSize int64) *JSONHeatmapFacetMap {
if individualCellSize <= 0 {
individualCellSize = 1
}
(*jfm)["gridLevel"] = individualCellSize
return jfm
}
/**
* A fraction of the heatmap region that is used to compute the cell size.
*
* Defaults to 0.15 if not specified.
*
* @see #setGridLevel(int)
* @see #setDistErr(double)
*/
func (jfm *JSONHeatmapFacetMap) SetDistErrPct(distErrPct float64) *JSONHeatmapFacetMap {
if distErrPct < 0 {
distErrPct = 0
}
if distErrPct > 1 {
distErrPct = 1
}
(*jfm)["distErrPct"] = distErrPct
return jfm
}
/**
* Indicates the maximum acceptable cell error distance.
*
* Used to compute the size of each cell in the heatmap grid rather than specifying {@link #setGridLevel(int)}
*
* @param distErr a positive value representing the maximum acceptable cell error.
*
* @see #setGridLevel(int)
* @see #setDistErrPct(double)
*/
func (jfm *JSONHeatmapFacetMap) SetDistErr(distErr float64) *JSONHeatmapFacetMap {
if distErr < 0 {
distErr = 0
}
(*jfm)["distErr"] = distErr
return jfm
}
/*
public enum HeatmapFormat {
INTS2D("ints2D"), PNG("png");
private final String value;
HeatmapFormat(String value) {
this.value = value;
}
@Override
public String toString() { return value; }
}
*/
/**
* Sets the format that the computed heatmap should be returned in.
*
* Defaults to 'ints2D' if not specified.
*/
func (jfm *JSONHeatmapFacetMap) SetHeatmapFormat(format string) *JSONHeatmapFacetMap {
(*jfm)["format"] = format
return jfm
} | pkg/search/solrJSONHeatmapFacetMap.go | 0.926412 | 0.447883 | solrJSONHeatmapFacetMap.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.