code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package path
import (
"math"
"math/rand"
"time"
"github.com/Windowsfreak/go-mc/bot/world"
"github.com/Windowsfreak/go-mc/data/block"
"github.com/beefsack/go-astar"
)
// Point represents a point in 3D space.
type Point struct {
X, Y, Z float64
}
type V3 struct {
X, Y, Z int
}
func (v V3) Cost(other V3) float64 {
x, y, z := v.X-other.X, v.Y-other.Y, v.Z-other.Z
return math.Sqrt(float64(x*x+z*z)) + math.Sqrt(1.2*float64(y*y))
}
// Nav represents a navigation to a position.
type Nav struct {
World *world.World
Start, Dest V3
}
func (n *Nav) Path() (path []astar.Pather, distance float64, found bool) {
return astar.Path(
Tile{ // Start point
Nav: n,
Movement: Waypoint,
Pos: n.Start,
},
Tile{ // Destination point
Nav: n,
Movement: Waypoint,
Pos: n.Dest,
})
}
// Tile represents a point in a path. All tiles in a path are adjaceent their
// preceeding tiles.
type Tile struct {
Nav *Nav
HalfBlock bool
Movement Movement
Pos V3
BlockStatus world.BlockStatus
ExtraCost int
}
func (t Tile) PathNeighborCost(to astar.Pather) float64 {
other := to.(Tile)
return 1 + other.Movement.BaseCost()
}
func (t Tile) PathEstimatedCost(to astar.Pather) float64 {
other := to.(Tile)
cost := t.Pos.Cost(other.Pos)
return cost + other.Movement.BaseCost()
}
func (t Tile) PathNeighbors() []astar.Pather {
possibles := make([]astar.Pather, 0, 8)
if c := t.PathEstimatedCost(Tile{Pos: t.Nav.Start}); c > 8000 {
return nil
}
if t.Pos == t.Nav.Dest && t.Movement != Waypoint {
dupe := t
dupe.Movement = Waypoint
dupe.BlockStatus = 0
return []astar.Pather{dupe}
}
for _, m := range allMovements {
x, y, z := m.Offset()
pos := V3{X: t.Pos.X + x, Y: t.Pos.Y + y, Z: t.Pos.Z + z}
possible := m.Possible(t.Nav, pos.X, pos.Y, pos.Z, t.Pos, t.Movement)
// fmt.Printf("%v-%v: Trying (%v) %v: possible=%v\n", t.Movement, t.Pos, pos, m, possible)
if possible {
bStateID := t.Nav.World.GetBlockStatus(pos.X, pos.Y, pos.Z)
possibles = append(possibles, Tile{
Nav: t.Nav,
Movement: m,
Pos: pos,
BlockStatus: bStateID,
HalfBlock: IsSlab(bStateID) && SlabIsBottom(bStateID),
})
}
}
// fmt.Printf("%v.Neighbours(): %+v\n", t.Pos, possibles)
return possibles
}
func (t Tile) Inputs(pos, deltaPos, vel Point, runTime time.Duration) Inputs {
// Sufficient for simple movements.
at := math.Atan2(-deltaPos.X, -deltaPos.Z)
mdX, _, mdZ := t.Movement.Offset()
wantYaw := -math.Atan2(float64(mdX), float64(mdZ)) * 180 / math.Pi
out := Inputs{
ThrottleX: math.Sin(at),
ThrottleZ: math.Cos(at),
Yaw: wantYaw,
}
if mdX == 0 && mdZ == 0 {
out.Yaw = math.NaN()
}
if (rand.Int() % 14) == 0 {
out.Pitch = float64((rand.Int() % 4) - 2)
}
switch t.Movement {
case DescendLadder, DescendLadderEast, DescendLadderWest, DescendLadderNorth, DescendLadderSouth:
// Deadzone the throttle to prevent an accidental ascend.
if dist2 := math.Sqrt(deltaPos.X*deltaPos.X + deltaPos.Z*deltaPos.Z); dist2 < (0.22 * 0.22 * 2) {
out.ThrottleX, out.ThrottleZ = 0, 0
}
case AscendLadder:
dist2 := math.Sqrt(deltaPos.X*deltaPos.X + deltaPos.Z*deltaPos.Z)
if x, _, z := LadderDirection(t.BlockStatus).Offset(); dist2 > (0.8*0.8) && deltaPos.Y < 0 {
pos.X -= (0.25 * float64(x))
pos.Z -= (0.25 * float64(z))
} else {
pos.X += (0.42 * float64(x))
pos.Z += (0.42 * float64(z))
}
at = math.Atan2(-pos.X+float64(t.Pos.X)+0.5, -pos.Z+float64(t.Pos.Z)+0.5)
out = Inputs{
ThrottleX: math.Sin(at),
ThrottleZ: math.Cos(at),
Yaw: math.NaN(),
}
case AscendNorth, AscendSouth, AscendEast, AscendWest:
var (
b = block.ByID[block.StateID[uint32(t.BlockStatus)]]
_, isStairs = stairs[b.ID]
_, isSlab = slabs[b.ID]
maybeStuck = runTime < 1250*time.Millisecond
dist2 = math.Sqrt(deltaPos.X*deltaPos.X + deltaPos.Z*deltaPos.Z)
)
out.Jump = dist2 < 1.75 && deltaPos.Y < -0.81
switch {
case isStairs:
// Special logic for stairs: Try to go towards the downwards edge initially.
if dist2 > (0.9*0.9) && deltaPos.Y < 0 {
if x, _, z := StairsDirection(t.BlockStatus).Offset(); dist2 > (0.9*0.9) && deltaPos.Y < 0 {
pos.X += (0.49 * float64(x))
pos.Z += (0.49 * float64(z))
}
at = math.Atan2(-pos.X+float64(t.Pos.X)+0.5, -pos.Z+float64(t.Pos.Z)+0.5)
out = Inputs{
ThrottleX: math.Sin(at),
ThrottleZ: math.Cos(at),
Yaw: math.NaN(),
Jump: out.Jump && !maybeStuck,
}
}
// We dont need to jump for slabs, so only jump if we get stuck.
case isSlab:
out.Jump = out.Jump && !maybeStuck
}
// Turn off the throttle if we get stuck on the jump.
if dist2 < 1 && deltaPos.Y < 0 && vel.Y == 0 {
out.ThrottleX, out.ThrottleZ = 0, 0
}
case JumpCrossEast, JumpCrossWest, JumpCrossNorth, JumpCrossSouth:
dist2 := math.Sqrt(deltaPos.X*deltaPos.X + deltaPos.Z*deltaPos.Z)
out.Jump = dist2 > 1.5 && dist2 < 1.78
}
return out
}
func (t Tile) IsComplete(d Point) bool {
switch t.Movement {
case DescendLadder, DescendLadderNorth, DescendLadderSouth, DescendLadderWest, DescendLadderEast,
DropNorth, DropSouth, DropEast, DropWest:
return (d.X*d.X+d.Z*d.Z) < (2*0.2*0.25) && d.Y <= 0.05
case AscendLadder:
return d.Y >= 0
case JumpCrossEast, JumpCrossWest, JumpCrossNorth, JumpCrossSouth:
return (d.X*d.X+d.Z*d.Z) < (0.22*0.22) && d.Y >= -0.065
}
yLowerCutoff := -0.065
if t.HalfBlock {
yLowerCutoff -= 0.5
}
// fmt.Println(t.HalfBlock, d.Y, d.Y >= yLowerCutoff, d.Y <= 0.08)
return (d.X*d.X+d.Z*d.Z) < (0.18*0.18) && d.Y >= yLowerCutoff && d.Y <= 0.08
} | bot/path/path.go | 0.668339 | 0.476336 | path.go | starcoder |
package stats
import (
"fmt"
"runtime"
"strings"
"sync/atomic"
hist "github.com/samaritan-proxy/circonusllhist"
)
// the default value is enough is most scenarios.
var (
defaultSupportedQuantiles = []float64{
0.0, 0.25, 0.5, 0.9, 0.95, 0.99, 1.0,
}
defaultSupportedBuckets = []float64{
0.5, 1, 5, 10, 25, 50, 100, 250, 500, 1000,
2500, 5000, 10000, 30000, 60000, 300000, 600000, 1800000, 3600000,
}
)
// HistogramStatistics holds the computed statistic of a histogram.
type HistogramStatistics struct {
*hist.Histogram
sampleCount uint64
sampleSum float64
// quantile releated
sqs []float64 // supported
cqs []float64 // computed
// bucket releated
sbs []float64 // supported
cbs []uint64 // computed
}
func newHistogramStatistics(h *hist.Histogram) *HistogramStatistics {
s := &HistogramStatistics{
Histogram: h,
sampleCount: h.SampleCount(),
sampleSum: h.ApproxSum(),
}
// quantiles
s.sqs = defaultSupportedQuantiles
s.cqs, _ = h.ApproxQuantile(s.sqs)
// buckets
s.sbs = defaultSupportedBuckets
for _, b := range s.sbs {
s.cbs = append(s.cbs, h.ApproxCountBelow(b))
}
return s
}
// SupportedQuantiles returns the supported quantiles.
func (s *HistogramStatistics) SupportedQuantiles() []float64 {
return s.sqs
}
// ComputedQuantiles returns the computed quantile values during the period.
func (s *HistogramStatistics) ComputedQuantiles() []float64 {
if len(s.cqs) == 0 {
return make([]float64, len(s.sqs))
}
return s.cqs
}
// SupportedBuckets returns the supported buckets.
func (s *HistogramStatistics) SupportedBuckets() []float64 {
return s.sbs
}
// ComputedBuckets returns the computed bucket values during the period.
func (s *HistogramStatistics) ComputedBuckets() []uint64 {
if len(s.sbs) == 0 {
return make([]uint64, len(s.sbs))
}
return s.cbs
}
// SampleCount returns the total number of value during the period.
func (s *HistogramStatistics) SampleCount() uint64 {
return s.sampleCount
}
// SampleSum returns the sum of all values during the period.
func (s *HistogramStatistics) SampleSum() float64 {
return s.sampleSum
}
// A Histogram records values one at a time.
type Histogram struct {
metric
store *Store
sampleCount uint64
rawCount uint64
raws []*hist.Histogram
itl *hist.Histogram // interval hist
cum *hist.Histogram // cumulative hist
}
// NewHistogram creates a histogram with given params.
// NOTE: It should only be used in unit tests.
func NewHistogram(store *Store, name, tagExtractedName string, tags []*Tag) *Histogram {
h := &Histogram{
store: store,
metric: newMetric(name, tagExtractedName, tags),
itl: hist.NewNoLocks(),
cum: hist.New(),
rawCount: uint64(runtime.GOMAXPROCS(0)),
}
h.raws = make([]*hist.Histogram, h.rawCount)
for i := uint64(0); i < h.rawCount; i++ {
h.raws[i] = hist.New()
}
return h
}
// Record records a value to the Histogram.
func (h *Histogram) Record(val uint64) {
raw := h.raws[atomic.AddUint64(&h.sampleCount, 1)%h.rawCount]
raw.RecordIntScale(int64(val), 0)
if h.store != nil {
h.store.deliverHistogramSampleToSinks(h, val)
}
h.markUsed()
}
// RefreshIntervalStatistic refreshs the interval statistics of histogram.
// NOTE: It should only be used in unit tests.
func (h *Histogram) RefreshIntervalStatistics() {
// merge and reset all raw hists
merged := hist.NewNoLocks()
for _, raw := range h.raws {
merged.Merge(raw)
// NOTE: Merge and reset is not atomic, maybe some
// samples would be dropped.
raw.FullReset()
}
h.itl = merged.Copy()
h.cum.Merge(merged)
}
// IntervalStatistics returns the interval statistics of Histogram.
func (h *Histogram) IntervalStatistics() *HistogramStatistics {
return newHistogramStatistics(h.itl.Copy())
}
// CumulativeStatistics returns the cumulative statistics of Histogram.
func (h *Histogram) CumulativeStatistics() *HistogramStatistics {
return newHistogramStatistics(h.cum.Copy())
}
// Summary returns the summary of the histogram.
func (h *Histogram) Summary() string {
if h.cum.SampleCount() == 0 {
return "No recorded values"
}
itlStat := newHistogramStatistics(h.itl)
cumStat := newHistogramStatistics(h.cum)
var summary []string
for i, q := range cumStat.SupportedQuantiles() {
summary = append(summary,
fmt.Sprintf("P%d(%d,%d)", int(100*q),
int64(itlStat.ComputedQuantiles()[i]),
int64(cumStat.ComputedQuantiles()[i]),
),
)
}
return strings.Join(summary, " ")
} | histogram.go | 0.708213 | 0.469459 | histogram.go | starcoder |
package simulation
import (
. "github.com/openthread/ot-ns/types"
"github.com/openthread/ot-ns/visualize"
"github.com/pkg/errors"
"github.com/simonlingoogle/go-simplelogger"
)
type simulationController struct {
sim *Simulation
}
func (sc *simulationController) CtrlSetSpeed(speed float64) error {
sim := sc.sim
sim.PostAsync(true, func() {
sim.SetSpeed(speed)
})
return nil
}
func (sc *simulationController) CtrlSetNodeFailed(nodeid NodeId, failed bool) error {
sim := sc.sim
sim.PostAsync(true, func() {
sim.SetNodeFailed(nodeid, failed)
})
return nil
}
func (sc *simulationController) CtrlDeleteNode(nodeid NodeId) error {
sim := sc.sim
sim.PostAsync(true, func() {
_ = sim.DeleteNode(nodeid)
})
return nil
}
func (sc *simulationController) CtrlMoveNodeTo(nodeid NodeId, x, y int) error {
sim := sc.sim
sim.PostAsync(true, func() {
sim.MoveNodeTo(nodeid, x, y)
})
return nil
}
func (sc *simulationController) CtrlAddNode(x, y int, isRouter bool, mode NodeMode) error {
sim := sc.sim
nodeCfg := DefaultNodeConfig()
nodeCfg.IsRouter = isRouter
nodeCfg.IsMtd = !isRouter && !mode.FullThreadDevice
nodeCfg.RxOffWhenIdle = !isRouter && !mode.RxOnWhenIdle
nodeCfg.X, nodeCfg.Y = x, y
sim.PostAsync(true, func() {
simplelogger.Infof("CtrlAddNode: %+v", nodeCfg)
_, err := sim.AddNode(nodeCfg)
if err != nil {
simplelogger.Errorf("add node failed: %v", err)
return
}
})
return nil
}
type readonlySimulationController struct {
}
func (r readonlySimulationController) CtrlSetSpeed(speed float64) error {
return readonlySimulationError
}
func (r readonlySimulationController) CtrlSetNodeFailed(nodeid NodeId, failed bool) error {
return readonlySimulationError
}
var readonlySimulationError = errors.Errorf("simulation is readonly")
func (r readonlySimulationController) CtrlAddNode(x, y int, router bool, mode NodeMode) error {
return readonlySimulationError
}
func (r readonlySimulationController) CtrlMoveNodeTo(nodeid NodeId, x, y int) error {
return readonlySimulationError
}
func (r readonlySimulationController) CtrlDeleteNode(nodeid NodeId) error {
return readonlySimulationError
}
func NewSimulationController(sim *Simulation) visualize.SimulationController {
if !sim.cfg.ReadOnly {
return &simulationController{sim}
} else {
return readonlySimulationController{}
}
} | simulation/simulationController.go | 0.575707 | 0.413596 | simulationController.go | starcoder |
package tsm1
/*
A TSM file is composed for four sections: header, blocks, index and the footer.
┌────────┬────────────────────────────────────┬─────────────┬──────────────┐
│ Header │ Blocks │ Index │ Footer │
│5 bytes │ N bytes │ N bytes │ 4 bytes │
└────────┴────────────────────────────────────┴─────────────┴──────────────┘
Header is composed of a magic number to identify the file type and a version
number.
┌───────────────────┐
│ Header │
├─────────┬─────────┤
│ Magic │ Version │
│ 4 bytes │ 1 byte │
└─────────┴─────────┘
Blocks are sequences of pairs of CRC32 and data. The block data is opaque to the
file. The CRC32 is used for block level error detection. The length of the blocks
is stored in the index.
┌───────────────────────────────────────────────────────────┐
│ Blocks │
├───────────────────┬───────────────────┬───────────────────┤
│ Block 1 │ Block 2 │ Block N │
├─────────┬─────────┼─────────┬─────────┼─────────┬─────────┤
│ CRC │ Data │ CRC │ Data │ CRC │ Data │
│ 4 bytes │ N bytes │ 4 bytes │ N bytes │ 4 bytes │ N bytes │
└─────────┴─────────┴─────────┴─────────┴─────────┴─────────┘
Following the blocks is the index for the blocks in the file. The index is
composed of a sequence of index entries ordered lexicographically by key and
then by time. Each index entry starts with a key length and key followed by a
count of the number of blocks in the file. Each block entry is composed of
the min and max time for the block, the offset into the file where the block
is located and the the size of the block.
The index structure can provide efficient access to all blocks as well as the
ability to determine the cost associated with acessing a given key. Given a key
and timestamp, we can determine whether a file contains the block for that
timestamp as well as where that block resides and how much data to read to
retrieve the block. If we know we need to read all or multiple blocks in a
file, we can use the size to determine how much to read in a given IO.
┌────────────────────────────────────────────────────────────────────────────┐
│ Index │
├─────────┬─────────┬──────┬───────┬─────────┬─────────┬────────┬────────┬───┤
│ Key Len │ Key │ Type │ Count │Min Time │Max Time │ Offset │ Size │...│
│ 2 bytes │ N bytes │1 byte│2 bytes│ 8 bytes │ 8 bytes │8 bytes │4 bytes │ │
└─────────┴─────────┴──────┴───────┴─────────┴─────────┴────────┴────────┴───┘
The last section is the footer that stores the offset of the start of the index.
┌─────────┐
│ Footer │
├─────────┤
│Index Ofs│
│ 8 bytes │
└─────────┘
*/
import (
"bufio"
"bytes"
"encoding/binary"
"fmt"
"hash/crc32"
"io"
"os"
"sort"
"sync"
"time"
)
const (
// MagicNumber is written as the first 4 bytes of a data file to
// identify the file as a tsm1 formatted file
MagicNumber uint32 = 0x16D116D1
// Version indicates the version of the TSM file format.
Version byte = 1
// Size in bytes of an index entry
indexEntrySize = 28
// Size in bytes used to store the count of index entries for a key
indexCountSize = 2
// Size in bytes used to store the type of block encoded
indexTypeSize = 1
// Max number of blocks for a given key that can exist in a single file
maxIndexEntries = (1 << (indexCountSize * 8)) - 1
// max length of a key in an index entry (measurement + tags)
maxKeyLength = (1 << (2 * 8)) - 1
)
var (
//ErrNoValues is returned when TSMWriter.WriteIndex is called and there are no values to write.
ErrNoValues = fmt.Errorf("no values written")
// ErrTSMClosed is returned when performing an operation against a closed TSM file.
ErrTSMClosed = fmt.Errorf("tsm file closed")
// ErrMaxKeyLengthExceeded is returned when attempting to write a key that is too long.
ErrMaxKeyLengthExceeded = fmt.Errorf("max key length exceeded")
// ErrMaxBlocksExceeded is returned when attempting to write a block past the allowed number.
ErrMaxBlocksExceeded = fmt.Errorf("max blocks exceeded")
)
// TSMWriter writes TSM formatted key and values.
type TSMWriter interface {
// Write writes a new block for key containing and values. Writes append
// blocks in the order that the Write function is called. The caller is
// responsible for ensuring keys and blocks are sorted appropriately.
// Values are encoded as a full block. The caller is responsible for
// ensuring a fixed number of values are encoded in each block as well as
// ensuring the Values are sorted. The first and last timestamp values are
// used as the minimum and maximum values for the index entry.
Write(key string, values Values) error
// WriteBlock writes a new block for key containing the bytes in block. WriteBlock appends
// blocks in the order that the WriteBlock function is called. The caller is
// responsible for ensuring keys and blocks are sorted appropriately, and that the
// block and index information is correct for the block. The minTime and maxTime
// timestamp values are used as the minimum and maximum values for the index entry.
WriteBlock(key string, minTime, maxTime int64, block []byte) error
// WriteIndex finishes the TSM write streams and writes the index.
WriteIndex() error
// Flushes flushes all pending changes to the underlying file resources.
Flush() error
// Close closes any underlying file resources.
Close() error
// Size returns the current size in bytes of the file.
Size() uint32
}
// IndexWriter writes a TSMIndex.
type IndexWriter interface {
// Add records a new block entry for a key in the index.
Add(key string, blockType byte, minTime, maxTime int64, offset int64, size uint32)
// Entries returns all index entries for a key.
Entries(key string) []IndexEntry
// Keys returns the unique set of keys in the index.
Keys() []string
// KeyCount returns the count of unique keys in the index.
KeyCount() int
// Size returns the size of a the current index in bytes.
Size() uint32
// MarshalBinary returns a byte slice encoded version of the index.
MarshalBinary() ([]byte, error)
// WriteTo writes the index contents to a writer.
WriteTo(w io.Writer) (int64, error)
}
// IndexEntry is the index information for a given block in a TSM file.
type IndexEntry struct {
// The min and max time of all points stored in the block.
MinTime, MaxTime int64
// The absolute position in the file where this block is located.
Offset int64
// The size in bytes of the block in the file.
Size uint32
}
// UnmarshalBinary decodes an IndexEntry from a byte slice.
func (e *IndexEntry) UnmarshalBinary(b []byte) error {
if len(b) != indexEntrySize {
return fmt.Errorf("unmarshalBinary: short buf: %v != %v", indexEntrySize, len(b))
}
e.MinTime = int64(binary.BigEndian.Uint64(b[:8]))
e.MaxTime = int64(binary.BigEndian.Uint64(b[8:16]))
e.Offset = int64(binary.BigEndian.Uint64(b[16:24]))
e.Size = binary.BigEndian.Uint32(b[24:28])
return nil
}
// AppendTo writes a binary-encoded version of IndexEntry to b, allocating
// and returning a new slice, if necessary.
func (e *IndexEntry) AppendTo(b []byte) []byte {
if len(b) < indexEntrySize {
if cap(b) < indexEntrySize {
b = make([]byte, indexEntrySize)
} else {
b = b[:indexEntrySize]
}
}
binary.BigEndian.PutUint64(b[:8], uint64(e.MinTime))
binary.BigEndian.PutUint64(b[8:16], uint64(e.MaxTime))
binary.BigEndian.PutUint64(b[16:24], uint64(e.Offset))
binary.BigEndian.PutUint32(b[24:28], uint32(e.Size))
return b
}
// Contains returns true if this IndexEntry may contain values for the given time.
// The min and max times are inclusive.
func (e *IndexEntry) Contains(t int64) bool {
return e.MinTime <= t && e.MaxTime >= t
}
// OverlapsTimeRange returns true if the given time ranges are completely within the entry's time bounds.
func (e *IndexEntry) OverlapsTimeRange(min, max int64) bool {
return e.MinTime <= max && e.MaxTime >= min
}
// String returns a string representation of the entry.
func (e *IndexEntry) String() string {
return fmt.Sprintf("min=%s max=%s ofs=%d siz=%d",
time.Unix(0, e.MinTime).UTC(), time.Unix(0, e.MaxTime).UTC(), e.Offset, e.Size)
}
// NewIndexWriter returns a new IndexWriter.
func NewIndexWriter() IndexWriter {
return &directIndex{
blocks: map[string]*indexEntries{},
}
}
// directIndex is a simple in-memory index implementation for a TSM file. The full index
// must fit in memory.
type directIndex struct {
mu sync.RWMutex
size uint32
blocks map[string]*indexEntries
}
func (d *directIndex) Add(key string, blockType byte, minTime, maxTime int64, offset int64, size uint32) {
d.mu.Lock()
defer d.mu.Unlock()
entries := d.blocks[key]
if entries == nil {
entries = &indexEntries{
Type: blockType,
}
d.blocks[key] = entries
// size of the key stored in the index
d.size += uint32(2 + len(key))
// size of the count of entries stored in the index
d.size += indexCountSize
}
entries.entries = append(entries.entries, IndexEntry{
MinTime: minTime,
MaxTime: maxTime,
Offset: offset,
Size: size,
})
// size of the encoded index entry
d.size += indexEntrySize
}
func (d *directIndex) entries(key string) []IndexEntry {
entries := d.blocks[key]
if entries == nil {
return nil
}
return entries.entries
}
func (d *directIndex) Entries(key string) []IndexEntry {
d.mu.RLock()
defer d.mu.RUnlock()
return d.entries(key)
}
func (d *directIndex) Entry(key string, t int64) *IndexEntry {
d.mu.RLock()
defer d.mu.RUnlock()
entries := d.entries(key)
for _, entry := range entries {
if entry.Contains(t) {
return &entry
}
}
return nil
}
func (d *directIndex) Keys() []string {
d.mu.RLock()
defer d.mu.RUnlock()
var keys []string
for k := range d.blocks {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
func (d *directIndex) KeyCount() int {
d.mu.RLock()
n := len(d.blocks)
d.mu.RUnlock()
return n
}
func (d *directIndex) addEntries(key string, entries *indexEntries) {
existing := d.blocks[key]
if existing == nil {
d.blocks[key] = entries
return
}
existing.entries = append(existing.entries, entries.entries...)
}
func (d *directIndex) WriteTo(w io.Writer) (int64, error) {
d.mu.RLock()
defer d.mu.RUnlock()
// Index blocks are writtens sorted by key
keys := make([]string, 0, len(d.blocks))
for k := range d.blocks {
keys = append(keys, k)
}
sort.Strings(keys)
var (
n int
err error
buf [5]byte
N int64
)
// For each key, individual entries are sorted by time
for _, key := range keys {
entries := d.blocks[key]
if entries.Len() > maxIndexEntries {
return N, fmt.Errorf("key '%s' exceeds max index entries: %d > %d", key, entries.Len(), maxIndexEntries)
}
sort.Sort(entries)
binary.BigEndian.PutUint16(buf[0:2], uint16(len(key)))
buf[2] = entries.Type
binary.BigEndian.PutUint16(buf[3:5], uint16(entries.Len()))
// Append the key length and key
if n, err = w.Write(buf[0:2]); err != nil {
return int64(n) + N, fmt.Errorf("write: writer key length error: %v", err)
}
N += int64(n)
if n, err = io.WriteString(w, key); err != nil {
return int64(n) + N, fmt.Errorf("write: writer key error: %v", err)
}
N += int64(n)
// Append the block type and count
if n, err = w.Write(buf[2:5]); err != nil {
return int64(n) + N, fmt.Errorf("write: writer block type and count error: %v", err)
}
N += int64(n)
// Append each index entry for all blocks for this key
var n64 int64
if n64, err = entries.WriteTo(w); err != nil {
return n64 + N, fmt.Errorf("write: writer entries error: %v", err)
}
N += n64
}
return N, nil
}
func (d *directIndex) MarshalBinary() ([]byte, error) {
var b bytes.Buffer
if _, err := d.WriteTo(&b); err != nil {
return nil, err
}
return b.Bytes(), nil
}
func (d *directIndex) UnmarshalBinary(b []byte) error {
d.mu.Lock()
defer d.mu.Unlock()
d.size = uint32(len(b))
var pos int
for pos < len(b) {
n, key, err := readKey(b[pos:])
if err != nil {
return fmt.Errorf("readIndex: read key error: %v", err)
}
pos += n
var entries indexEntries
n, err = readEntries(b[pos:], &entries)
if err != nil {
return fmt.Errorf("readIndex: read entries error: %v", err)
}
pos += n
d.addEntries(string(key), &entries)
}
return nil
}
func (d *directIndex) Size() uint32 {
return d.size
}
// tsmWriter writes keys and values in the TSM format
type tsmWriter struct {
wrapped io.Writer
w *bufio.Writer
index IndexWriter
n int64
}
// NewTSMWriter returns a new TSMWriter writing to w.
func NewTSMWriter(w io.Writer) (TSMWriter, error) {
index := &directIndex{
blocks: map[string]*indexEntries{},
}
return &tsmWriter{wrapped: w, w: bufio.NewWriterSize(w, 1024*1024), index: index}, nil
}
func (t *tsmWriter) writeHeader() error {
var buf [5]byte
binary.BigEndian.PutUint32(buf[0:4], MagicNumber)
buf[4] = Version
n, err := t.w.Write(buf[:])
if err != nil {
return err
}
t.n = int64(n)
return nil
}
// Write writes a new block containing key and values.
func (t *tsmWriter) Write(key string, values Values) error {
if len(key) > maxKeyLength {
return ErrMaxKeyLengthExceeded
}
// Nothing to write
if len(values) == 0 {
return nil
}
// Write header only after we have some data to write.
if t.n == 0 {
if err := t.writeHeader(); err != nil {
return err
}
}
block, err := values.Encode(nil)
if err != nil {
return err
}
blockType, err := BlockType(block)
if err != nil {
return err
}
var checksum [crc32.Size]byte
binary.BigEndian.PutUint32(checksum[:], crc32.ChecksumIEEE(block))
_, err = t.w.Write(checksum[:])
if err != nil {
return err
}
n, err := t.w.Write(block)
if err != nil {
return err
}
n += len(checksum)
// Record this block in index
t.index.Add(key, blockType, values[0].UnixNano(), values[len(values)-1].UnixNano(), t.n, uint32(n))
// Increment file position pointer
t.n += int64(n)
return nil
}
// WriteBlock writes block for the given key and time range to the TSM file. If the write
// exceeds max entries for a given key, ErrMaxBlocksExceeded is returned. This indicates
// that the index is now full for this key and no future writes to this key will succeed.
func (t *tsmWriter) WriteBlock(key string, minTime, maxTime int64, block []byte) error {
if len(key) > maxKeyLength {
return ErrMaxKeyLengthExceeded
}
// Nothing to write
if len(block) == 0 {
return nil
}
blockType, err := BlockType(block)
if err != nil {
return err
}
// Write header only after we have some data to write.
if t.n == 0 {
if err := t.writeHeader(); err != nil {
return err
}
}
var checksum [crc32.Size]byte
binary.BigEndian.PutUint32(checksum[:], crc32.ChecksumIEEE(block))
_, err = t.w.Write(checksum[:])
if err != nil {
return err
}
n, err := t.w.Write(block)
if err != nil {
return err
}
n += len(checksum)
// Record this block in index
t.index.Add(key, blockType, minTime, maxTime, t.n, uint32(n))
// Increment file position pointer (checksum + block len)
t.n += int64(n)
if len(t.index.Entries(key)) >= maxIndexEntries {
return ErrMaxBlocksExceeded
}
return nil
}
// WriteIndex writes the index section of the file. If there are no index entries to write,
// this returns ErrNoValues.
func (t *tsmWriter) WriteIndex() error {
indexPos := t.n
if t.index.KeyCount() == 0 {
return ErrNoValues
}
// Write the index
if _, err := t.index.WriteTo(t.w); err != nil {
return err
}
var buf [8]byte
binary.BigEndian.PutUint64(buf[:], uint64(indexPos))
// Write the index index position
_, err := t.w.Write(buf[:])
return err
}
func (t *tsmWriter) Flush() error {
if err := t.w.Flush(); err != nil {
return err
}
if f, ok := t.wrapped.(*os.File); ok {
if err := f.Sync(); err != nil {
return err
}
}
return nil
}
func (t *tsmWriter) Close() error {
if err := t.Flush(); err != nil {
return err
}
if c, ok := t.wrapped.(io.Closer); ok {
return c.Close()
}
return nil
}
func (t *tsmWriter) Size() uint32 {
return uint32(t.n) + t.index.Size()
}
// verifyVersion verifies that the reader's bytes are a TSM byte
// stream of the correct version (1)
func verifyVersion(r io.ReadSeeker) error {
_, err := r.Seek(0, 0)
if err != nil {
return fmt.Errorf("init: failed to seek: %v", err)
}
var b [4]byte
_, err = io.ReadFull(r, b[:])
if err != nil {
return fmt.Errorf("init: error reading magic number of file: %v", err)
}
if binary.BigEndian.Uint32(b[:]) != MagicNumber {
return fmt.Errorf("can only read from tsm file")
}
_, err = io.ReadFull(r, b[:1])
if err != nil {
return fmt.Errorf("init: error reading version: %v", err)
}
if b[0] != Version {
return fmt.Errorf("init: file is version %b. expected %b", b[0], Version)
}
return nil
} | Beam/go/vendor/github.com/influxdata/influxdb/tsdb/engine/tsm1/writer.go | 0.679817 | 0.751853 | writer.go | starcoder |
package ride
import (
"encoding/binary"
"math/big"
"strconv"
"github.com/ericlagergren/decimal"
"github.com/pkg/errors"
"github.com/wavesplatform/gowaves/pkg/ride/math"
)
func intArg(args []rideType) (rideInt, error) {
if len(args) != 1 {
return 0, errors.Errorf("%d is invalid number of arguments, expected 1", len(args))
}
if args[0] == nil {
return 0, errors.Errorf("argument 1 is empty")
}
l, ok := args[0].(rideInt)
if !ok {
return 0, errors.Errorf("argument 1 is not of type 'Int' but '%s'", args[0].instanceOf())
}
return l, nil
}
func twoIntArgs(args []rideType) (rideInt, rideInt, error) {
if len(args) != 2 {
return 0, 0, errors.Errorf("%d is invalid number of arguments, expected 2", len(args))
}
if args[0] == nil {
return 0, 0, errors.Errorf("argument 1 is empty")
}
if args[1] == nil {
return 0, 0, errors.Errorf("argument 2 is empty")
}
l1, ok := args[0].(rideInt)
if !ok {
return 0, 0, errors.Errorf("argument 1 is not of type 'Int' but '%s'", args[0].instanceOf())
}
l2, ok := args[1].(rideInt)
if !ok {
return 0, 0, errors.Errorf("argument 2 is not of type 'Int' but '%s'", args[1].instanceOf())
}
return l1, l2, nil
}
func intArgs(args []rideType, count int) ([]rideInt, error) {
if len(args) != count {
return nil, errors.Errorf("%d is invalid number of arguments, expected %d", len(args), count)
}
r := make([]rideInt, len(args))
for n, arg := range args {
if arg == nil {
return nil, errors.Errorf("argument %d is empty", n+1)
}
l, ok := arg.(rideInt)
if !ok {
return nil, errors.Errorf("argument %d is not of type 'Int' but '%s'", n+1, arg.instanceOf())
}
r[n] = l
}
return r, nil
}
func ge(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "ge")
}
return rideBoolean(l1 >= l2), nil
}
func gt(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "gt")
}
return rideBoolean(l1 > l2), nil
}
func intToString(_ environment, args ...rideType) (rideType, error) {
l, err := intArg(args)
if err != nil {
return nil, errors.Wrap(err, "intToString")
}
return rideString(strconv.Itoa(int(l))), nil
}
func unaryMinus(_ environment, args ...rideType) (rideType, error) {
l, err := intArg(args)
if err != nil {
return nil, errors.Wrap(err, "unaryMinus")
}
return -l, nil
}
func sum(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "sum")
}
return l1 + l2, nil
}
func sub(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "sub")
}
return l1 - l2, nil
}
func mul(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "mul")
}
return l1 * l2, nil
}
func div(_ environment, args ...rideType) (rideType, error) {
l1, l2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "div")
}
if l2 == 0 {
return nil, errors.New("div: division by zero")
}
return rideInt(math.FloorDiv(int64(l1), int64(l2))), nil
}
func mod(_ environment, args ...rideType) (rideType, error) {
i1, i2, err := twoIntArgs(args)
if err != nil {
return nil, errors.Wrap(err, "mod")
}
if i2 == 0 {
return nil, errors.New("mod: division by zero")
}
return rideInt(math.ModDivision(int64(i1), int64(i2))), nil
}
func fraction(_ environment, args ...rideType) (rideType, error) {
values, err := intArgs(args, 3)
if err != nil {
return nil, errors.Wrap(err, "fraction")
}
res, err := math.Fraction(int64(values[0]), int64(values[1]), int64(values[2]))
if err != nil {
return nil, errors.Wrap(err, "fraction")
}
return rideInt(res), nil
}
func fractionIntRounds(_ environment, args ...rideType) (rideType, error) {
if err := checkArgs(args, 4); err != nil {
return nil, errors.Wrap(err, "fraction")
}
value, ok := args[0].(rideInt)
if !ok {
return nil, errors.Errorf("fraction: unexpected argument type '%s'", args[0].instanceOf())
}
v := big.NewInt(int64(value))
numerator, ok := args[1].(rideInt)
if !ok {
return nil, errors.Errorf("fraction: unexpected argument type '%s'", args[1].instanceOf())
}
n := big.NewInt(int64(numerator))
denominator, ok := args[2].(rideInt)
if !ok {
return nil, errors.Errorf("fraction: unexpected argument type '%s'", args[2].instanceOf())
}
d := big.NewInt(int64(denominator))
round, err := roundingMode(args[3])
if err != nil {
return nil, errors.Wrap(err, "fraction")
}
r, err := fractionBigIntLikeInScala(v, n, d, round)
if err != nil {
return nil, errors.Wrap(err, "fraction")
}
if !r.IsInt64() {
return nil, errors.New("fraction: result is out of int64 range")
}
return rideInt(r.Int64()), nil
}
func intToBytes(_ environment, args ...rideType) (rideType, error) {
i, err := intArg(args)
if err != nil {
return nil, errors.Wrap(err, "intToBytes")
}
out := make([]byte, 8)
binary.BigEndian.PutUint64(out, uint64(i))
return rideBytes(out), nil
}
func pow(env environment, args ...rideType) (rideType, error) {
if err := checkArgs(args, 6); err != nil {
return nil, errors.Wrap(err, "pow")
}
base, ok := args[0].(rideInt)
if !ok {
return nil, errors.Errorf("pow: unexpected argument type '%s'", args[0].instanceOf())
}
bp, ok := args[1].(rideInt)
if !ok {
return nil, errors.Errorf("pow: unexpected argument type '%s'", args[1].instanceOf())
}
exponent, ok := args[2].(rideInt)
if !ok {
return nil, errors.Errorf("pow: unexpected argument type '%s'", args[2].instanceOf())
}
ep, ok := args[3].(rideInt)
if !ok {
return nil, errors.Errorf("pow: unexpected argument type '%s'", args[3].instanceOf())
}
rp, ok := args[4].(rideInt)
if !ok {
return nil, errors.Errorf("pow: unexpected argument type '%s'", args[4].instanceOf())
}
round, err := roundingMode(args[5])
if err != nil {
return nil, errors.Wrap(err, "pow")
}
f := math.PowV1
if env.validateInternalPayments() {
f = math.PowV2
}
r, err := f(int64(base), int64(exponent), int(bp), int(ep), int(rp), round)
if err != nil {
return nil, errors.Wrap(err, "pow")
}
return rideInt(r), nil
}
func log(_ environment, args ...rideType) (rideType, error) {
if err := checkArgs(args, 6); err != nil {
return nil, errors.Wrap(err, "log")
}
base, ok := args[0].(rideInt)
if !ok {
return nil, errors.Errorf("log: unexpected argument type '%s'", args[0].instanceOf())
}
bp, ok := args[1].(rideInt)
if !ok {
return nil, errors.Errorf("log: unexpected argument type '%s'", args[1].instanceOf())
}
exponent, ok := args[2].(rideInt)
if !ok {
return nil, errors.Errorf("log: unexpected argument type '%s'", args[2].instanceOf())
}
ep, ok := args[3].(rideInt)
if !ok {
return nil, errors.Errorf("log: unexpected argument type '%s'", args[3].instanceOf())
}
rp, ok := args[4].(rideInt)
if !ok {
return nil, errors.Errorf("log: unexpected argument type '%s'", args[4].instanceOf())
}
round, err := roundingMode(args[5])
if err != nil {
return nil, errors.Wrap(err, "log")
}
r, err := math.Log(int64(base), int64(exponent), int(bp), int(ep), int(rp), round)
if err != nil {
return nil, errors.Wrap(err, "log")
}
return rideInt(r), nil
}
func sqrt(_ environment, args ...rideType) (rideType, error) {
if err := checkArgs(args, 4); err != nil {
return nil, errors.Wrap(err, "sqrt")
}
n, ok := args[0].(rideInt)
if !ok {
return nil, errors.Errorf("sqrt: unexpected argument type '%s'", args[0].instanceOf())
}
np, ok := args[1].(rideInt)
if !ok {
return nil, errors.Errorf("sqrt: unexpected argument type '%s'", args[1].instanceOf())
}
rp, ok := args[2].(rideInt)
if !ok {
return nil, errors.Errorf("sqrt: unexpected argument type '%s'", args[2].instanceOf())
}
round, err := roundingMode(args[3])
if err != nil {
return nil, errors.Wrap(err, "sqrt")
}
r, err := math.Sqrt(int64(n), int(np), int(rp), round)
if err != nil {
return nil, errors.Wrap(err, "sqrt")
}
return rideInt(r), nil
}
func roundingMode(v rideType) (decimal.RoundingMode, error) {
switch v.instanceOf() {
case "Ceiling":
return decimal.ToPositiveInf, nil
case "Floor":
return decimal.ToNegativeInf, nil
case "HalfEven":
return decimal.ToNearestEven, nil
case "Down":
return decimal.ToZero, nil
case "Up": // round-up v2-v4
return decimal.AwayFromZero, nil
case "HalfUp":
return decimal.ToNearestAway, nil
case "HalfDown": // round-half-down v2-v4
return decimal.ToNearestTowardZero, nil
default:
return 0, errors.Errorf("unable to get rounding mode from '%s'", v.instanceOf())
}
} | pkg/ride/functions_int.go | 0.551574 | 0.445891 | functions_int.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTPStatementBlock271AllOf struct for BTPStatementBlock271AllOf
type BTPStatementBlock271AllOf struct {
BtType *string `json:"btType,omitempty"`
SpaceAfterOpen *BTPSpace10 `json:"spaceAfterOpen,omitempty"`
}
// NewBTPStatementBlock271AllOf instantiates a new BTPStatementBlock271AllOf object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTPStatementBlock271AllOf() *BTPStatementBlock271AllOf {
this := BTPStatementBlock271AllOf{}
return &this
}
// NewBTPStatementBlock271AllOfWithDefaults instantiates a new BTPStatementBlock271AllOf object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTPStatementBlock271AllOfWithDefaults() *BTPStatementBlock271AllOf {
this := BTPStatementBlock271AllOf{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTPStatementBlock271AllOf) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementBlock271AllOf) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTPStatementBlock271AllOf) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTPStatementBlock271AllOf) SetBtType(v string) {
o.BtType = &v
}
// GetSpaceAfterOpen returns the SpaceAfterOpen field value if set, zero value otherwise.
func (o *BTPStatementBlock271AllOf) GetSpaceAfterOpen() BTPSpace10 {
if o == nil || o.SpaceAfterOpen == nil {
var ret BTPSpace10
return ret
}
return *o.SpaceAfterOpen
}
// GetSpaceAfterOpenOk returns a tuple with the SpaceAfterOpen field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementBlock271AllOf) GetSpaceAfterOpenOk() (*BTPSpace10, bool) {
if o == nil || o.SpaceAfterOpen == nil {
return nil, false
}
return o.SpaceAfterOpen, true
}
// HasSpaceAfterOpen returns a boolean if a field has been set.
func (o *BTPStatementBlock271AllOf) HasSpaceAfterOpen() bool {
if o != nil && o.SpaceAfterOpen != nil {
return true
}
return false
}
// SetSpaceAfterOpen gets a reference to the given BTPSpace10 and assigns it to the SpaceAfterOpen field.
func (o *BTPStatementBlock271AllOf) SetSpaceAfterOpen(v BTPSpace10) {
o.SpaceAfterOpen = &v
}
func (o BTPStatementBlock271AllOf) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.SpaceAfterOpen != nil {
toSerialize["spaceAfterOpen"] = o.SpaceAfterOpen
}
return json.Marshal(toSerialize)
}
type NullableBTPStatementBlock271AllOf struct {
value *BTPStatementBlock271AllOf
isSet bool
}
func (v NullableBTPStatementBlock271AllOf) Get() *BTPStatementBlock271AllOf {
return v.value
}
func (v *NullableBTPStatementBlock271AllOf) Set(val *BTPStatementBlock271AllOf) {
v.value = val
v.isSet = true
}
func (v NullableBTPStatementBlock271AllOf) IsSet() bool {
return v.isSet
}
func (v *NullableBTPStatementBlock271AllOf) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTPStatementBlock271AllOf(val *BTPStatementBlock271AllOf) *NullableBTPStatementBlock271AllOf {
return &NullableBTPStatementBlock271AllOf{value: val, isSet: true}
}
func (v NullableBTPStatementBlock271AllOf) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTPStatementBlock271AllOf) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btp_statement_block_271_all_of.go | 0.675658 | 0.459561 | model_btp_statement_block_271_all_of.go | starcoder |
package assert
import (
"bytes"
"fmt"
"strings"
"text/tabwriter"
"unicode"
"github.com/google/gapid/core/data/compare"
)
type (
// Level is used to control what output level is used when flushing assertion text.
level int
// Assertion is the type for the start of an assertion line.
// You construct an assertion from an Output using assert.For.
Assertion struct {
level level
out *bytes.Buffer
to Output
}
)
const (
// Log is the informational level.
Log = level(iota)
// Error is used for things that cause test failures but do not abort.
Error
// Fatal is used for failures that cause the running test to immediately stop.
Fatal
)
func (l level) String() string {
switch l {
case Log:
return "Info"
case Error:
return "Error"
case Fatal:
return "Critical"
default:
return "Unknown"
}
}
// Critical switches this assertion from Error to Fatal.
func (a *Assertion) Critical() *Assertion {
a.level = Fatal
return a
}
// Log appends the supplied message to the cached output, and then flushes to the underlying output at Log level.
func (a *Assertion) Log(args ...interface{}) {
fmt.Fprint(a.out, args...)
a.level = Log
a.Commit()
}
// Error appends the supplied message to the cached output, and then flushes to the underlying output at Error level.
func (a *Assertion) Error(args ...interface{}) {
fmt.Fprint(a.out, args...)
a.level = Error
a.Commit()
}
// Fatal appends the supplied message to the cached output, and then flushes to the underlying output at Fatal level.
func (a *Assertion) Fatal(args ...interface{}) {
fmt.Fprint(a.out, args...)
a.level = Fatal
a.Commit()
}
// PrintPretty writes a value to the output buffer.
// It performs standardised transformations (mostly quoting)
func (a Assertion) PrintPretty(value interface{}) {
switch value := value.(type) {
case error:
a.out.WriteRune('`')
fmt.Fprint(a.out, value)
a.out.WriteRune('`')
case string:
a.out.WriteRune('`')
a.out.WriteString(value)
a.out.WriteRune('`')
default:
fmt.Fprint(a.out, value)
}
}
// Print writes a set of values to the output buffer, joined by tabs.
// The values will be printed with PrintValue.
func (a *Assertion) Print(args ...interface{}) *Assertion {
if len(args) == 0 {
return a
}
for i, v := range args {
if i != 0 {
a.out.WriteString("\t")
}
a.PrintPretty(v)
}
return a
}
// Raw writes a set of values to the output buffer, joined by tabs.
// It does not use the pretty printer.
func (a *Assertion) Raw(args ...interface{}) *Assertion {
if len(args) == 0 {
return a
}
for i, v := range args {
if i != 0 {
a.out.WriteString("\t")
}
fmt.Fprint(a.out, v)
}
return a
}
// Println prints the values using Print and then starts a new indented line.
func (a *Assertion) Println(args ...interface{}) *Assertion {
a.Print(args...)
a.out.WriteString("\n ")
return a
}
// Println prints the values using Print and then starts a new indented line.
func (a *Assertion) Rawln(args ...interface{}) *Assertion {
a.Raw(args...)
a.out.WriteString("\n ")
return a
}
// Printf writes a formatted unquoted string to the output buffer.
func (a *Assertion) Printf(format string, args ...interface{}) *Assertion {
fmt.Fprintf(a.out, format, args...)
return a
}
// Add appends a key value pair to the output buffer.
func (a *Assertion) Add(key string, values ...interface{}) *Assertion {
a.out.WriteString(key)
a.out.WriteString("\t\t")
a.Println(values...)
return a
}
// Got adds the standard "Got" entry to the output buffer.
func (a *Assertion) Got(values ...interface{}) *Assertion {
a.out.WriteString("Got\t\t")
a.Println(values...)
return a
}
// Expect adds the standard "Expect" entry to the output buffer.
func (a *Assertion) Expect(op string, values ...interface{}) *Assertion {
a.out.WriteString("Expect\t")
a.out.WriteString(op)
a.out.WriteString("\t")
a.Println(values...)
return a
}
// ExpectRaw adds the standard "Expect" entry to the output buffer, without pretty printing.
func (a *Assertion) ExpectRaw(op string, values ...interface{}) *Assertion {
a.out.WriteString("Expect\t")
a.out.WriteString(op)
a.out.WriteString("\t")
a.Rawln(values...)
return a
}
// Compare adds both the "Got" and "Expect" entries to the output buffer, with the operator being
// prepended to the expect list.
func (a *Assertion) Compare(value interface{}, op string, expect ...interface{}) *Assertion {
return a.Got(value).Expect(op, expect...)
}
// CompareRaw is like Compare except it does not push the values through the pretty printer.
func (a *Assertion) CompareRaw(value interface{}, op string, expect ...interface{}) *Assertion {
return a.Got(value).ExpectRaw(op, expect...)
}
// Test commits the pending output if the condition is not true.
func (a *Assertion) Test(condition bool) bool {
if !condition {
if a.level <= Error {
a.level = Error
}
a.Commit()
}
return condition
}
// TestDeepEqual adds the entries for Got and Expect, then tests if they are the same using
// compare.DeepEqual, commiting if they are not.
func (a *Assertion) TestDeepEqual(value interface{}, expect interface{}) bool {
return a.Compare(value, "deep ==", expect).Test(compare.DeepEqual(value, expect))
}
// TestDeepNotEqual adds the entries for Got and Expect, then tests if they are the same using
// compare.DeepEqual, commiting if they are.
func (a *Assertion) TestDeepNotEqual(value interface{}, expect interface{}) bool {
return a.Compare(value, "deep !=", expect).Test(!compare.DeepEqual(value, expect))
}
// TestDeepDiff is equivalent to TestDeepEqual except it also prints a diff.
func (a *Assertion) TestDeepDiff(value interface{}, expect interface{}) bool {
diff := compare.Diff(value, expect, 10)
if len(diff) == 0 {
return true
}
for _, diff := range diff {
a.Println(diff)
}
a.Commit()
return false
}
// Commit writes the output lines to the main output object.
func (a Assertion) Commit() {
// push the output buffer through a tabwriter to align columns
buf := &bytes.Buffer{}
tabs := tabwriter.NewWriter(buf, 1, 4, 1, ' ', tabwriter.StripEscape)
tabs.Write(a.out.Bytes())
tabs.Flush()
message := a.level.String() + ":" + strings.TrimRightFunc(buf.String(), unicode.IsSpace)
switch a.level {
case Log:
a.to.Log(message)
case Error:
a.to.Error(message)
case Fatal:
a.to.Fatal(message)
default:
a.to.Log(message)
}
} | core/assert/assertion.go | 0.640299 | 0.438665 | assertion.go | starcoder |
package matrixexp
import (
"github.com/gonum/blas/blas64"
)
// Add represents matrix addition.
type Add struct {
Left MatrixExp
Right MatrixExp
}
// String implements the Stringer interface.
func (m1 *Add) String() string {
return m1.Left.String() + ".Add(" + m1.Right.String() + ")"
}
// Dims returns the matrix dimensions.
func (m1 *Add) Dims() (r, c int) {
r, c = m1.Left.Dims()
return
}
// At returns the value at a given row, column index.
func (m1 *Add) At(r, c int) float64 {
return m1.Left.At(r, c) + m1.Right.At(r, c)
}
// Eval returns a matrix literal.
func (m1 *Add) Eval() MatrixLiteral {
r, c := m1.Dims()
lm := m1.Left.Eval()
rm := m1.Right.Eval()
v1 := lm.AsVector()
v2 := rm.AsVector()
for i, v := range v2 {
v1[i] += v
}
return &General{blas64.General{
Rows: r,
Cols: c,
Stride: c,
Data: v1,
}}
}
// Copy creates a (deep) copy of the Matrix Expression.
func (m1 *Add) Copy() MatrixExp {
return &Add{
Left: m1.Left.Copy(),
Right: m1.Right.Copy(),
}
}
// Err returns the first error encountered while constructing the matrix expression.
func (m1 *Add) Err() error {
if err := m1.Left.Err(); err != nil {
return err
}
if err := m1.Right.Err(); err != nil {
return err
}
r1, c1 := m1.Left.Dims()
r2, c2 := m1.Right.Dims()
if r1 != r2 || c1 != c2 {
return ErrDimMismatch{
R1: r1,
C1: c1,
R2: r2,
C2: c2,
}
}
return nil
}
// T transposes a matrix.
func (m1 *Add) T() MatrixExp {
return &T{m1}
}
// Add two matrices together.
func (m1 *Add) Add(m2 MatrixExp) MatrixExp {
return &Add{
Left: m1,
Right: m2,
}
}
// Sub subtracts the right matrix from the left matrix.
func (m1 *Add) Sub(m2 MatrixExp) MatrixExp {
return &Sub{
Left: m1,
Right: m2,
}
}
// Scale performs scalar multiplication.
func (m1 *Add) Scale(c float64) MatrixExp {
return &Scale{
C: c,
M: m1,
}
}
// Mul performs matrix multiplication.
func (m1 *Add) Mul(m2 MatrixExp) MatrixExp {
return &Mul{
Left: m1,
Right: m2,
}
}
// MulElem performs element-wise multiplication.
func (m1 *Add) MulElem(m2 MatrixExp) MatrixExp {
return &MulElem{
Left: m1,
Right: m2,
}
}
// DivElem performs element-wise division.
func (m1 *Add) DivElem(m2 MatrixExp) MatrixExp {
return &DivElem{
Left: m1,
Right: m2,
}
} | add.go | 0.902307 | 0.517144 | add.go | starcoder |
package secp256k1
import (
"crypto/elliptic"
"math/big"
"sync"
)
var (
initonce sync.Once
secp256k1 *secp256k1Curve
three = new(big.Int).SetUint64(3)
)
type secp256k1Curve struct {
elliptic.CurveParams
}
func initSECP256K1() {
// http://www.secg.org/sec2-v2.pdf
secp256k1 = &secp256k1Curve{elliptic.CurveParams{Name: "secp256k1"}}
secp256k1.P, _ = new(big.Int).SetString("fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f", 16)
secp256k1.N, _ = new(big.Int).SetString("fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141", 16)
secp256k1.B, _ = new(big.Int).SetString("0000000000000000000000000000000000000000000000000000000000000007", 16)
secp256k1.Gx, _ = new(big.Int).SetString("79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798", 16)
secp256k1.Gy, _ = new(big.Int).SetString("483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8", 16)
secp256k1.BitSize = 256
}
// SECP256K1 -- returns secp256k1 curve
func SECP256K1() elliptic.Curve {
initonce.Do(initSECP256K1)
return secp256k1
}
// Params -- returns elliptic curve params
func (curve *secp256k1Curve) Params() *elliptic.CurveParams {
return &curve.CurveParams
}
// IsOnCurve -- check the point (x,y) is on the curve or not.
func (curve *secp256k1Curve) IsOnCurve(x, y *big.Int) bool {
// y² = x³ + b
var y2, x3 big.Int
y2.Mul(y, y) //y²
y2.Mod(&y2, curve.P) //y²%P
x3.Mul(x, x) //x²
x3.Mul(&x3, x) //x³
x3.Add(&x3, curve.B) //x³+B
x3.Mod(&x3, curve.P) //(x³+B)%P
return x3.Cmp(&y2) == 0
}
// zForAffine --
// returns a Jacobian Z value for the affine point (x, y). If x and
// y are zero, it assumes that they represent the point at infinity because (0,
// 0) is not on the any of the curves handled here.
func zForAffine(x, y *big.Int) *big.Int {
z := new(big.Int)
if x.Sign() != 0 || y.Sign() != 0 {
z.SetInt64(1)
}
return z
}
// affineFromJacobian --
// reverses the Jacobian transform. If the point is ∞ it returns 0, 0.
// For a given (x, y) position on the curve, the Jacobian coordinates are (x1, y1, z1)
// where x = x1/z1² and y = y1/z1³. The greatest speedups come when the whole
// calculation can be performed within the transform (as in ScalarMult and
// ScalarBaseMult). But even for Add and Double, it's faster to apply and
// reverse the transform than to operate in affine coordinates.
func (curve *secp256k1Curve) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
if z.Sign() == 0 {
return new(big.Int), new(big.Int)
}
zinv := new(big.Int).ModInverse(z, curve.P)
zinvsq := new(big.Int).Mul(zinv, zinv)
xOut = new(big.Int).Mul(x, zinvsq)
xOut.Mod(xOut, curve.P)
zinvsq.Mul(zinvsq, zinv)
yOut = new(big.Int).Mul(y, zinvsq)
yOut.Mod(yOut, curve.P)
return
}
// Add -- returns the sum of (x1,y1) and (x2,y2).
func (curve *secp256k1Curve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
z1 := zForAffine(x1, y1)
z2 := zForAffine(x2, y2)
return curve.affineFromJacobian(curve.addJacobian(x1, y1, z1, x2, y2, z2))
}
// addJacobian --
// takes two points in Jacobian coordinates, (x1, y1, z1) and
// (x2, y2, z2) and returns their sum, also in Jacobian form.
func (curve *secp256k1Curve) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big.Int, *big.Int) {
// See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl
x3, y3, z3 := new(big.Int), new(big.Int), new(big.Int)
if z1.Sign() == 0 {
x3.Set(x2)
y3.Set(y2)
z3.Set(z2)
return x3, y3, z3
}
if z2.Sign() == 0 {
x3.Set(x1)
y3.Set(y1)
z3.Set(z1)
return x3, y3, z3
}
z1z1 := new(big.Int).Mul(z1, z1) //z1²
z1z1.Mod(z1z1, curve.P) //z1²%P
z2z2 := new(big.Int).Mul(z2, z2) //z2²
z2z2.Mod(z2z2, curve.P) //z2²%P
u1 := new(big.Int).Mul(x1, z2z2) //x1*z2²
u1.Mod(u1, curve.P) //(x1*z2²)%P
u2 := new(big.Int).Mul(x2, z1z1) //x2*z1²
u2.Mod(u2, curve.P) //(x2*z1²)%P
h := new(big.Int).Sub(u2, u1) //u2-u1
xEqual := h.Sign() == 0
if h.Sign() == -1 {
h.Add(h, curve.P)
}
i := new(big.Int).Lsh(h, 1)
i.Mul(i, i)
j := new(big.Int).Mul(h, i)
s1 := new(big.Int).Mul(y1, z2)
s1.Mul(s1, z2z2)
s1.Mod(s1, curve.P)
s2 := new(big.Int).Mul(y2, z1)
s2.Mul(s2, z1z1)
s2.Mod(s2, curve.P)
r := new(big.Int).Sub(s2, s1)
if r.Sign() == -1 {
r.Add(r, curve.P)
}
yEqual := r.Sign() == 0
if xEqual && yEqual {
return curve.doubleJacobian(x1, y1, z1)
}
r.Lsh(r, 1) //r²
v := new(big.Int).Mul(u1, i)
x3.Set(r)
x3.Mul(x3, x3)
x3.Sub(x3, j)
x3.Sub(x3, v)
x3.Sub(x3, v)
x3.Mod(x3, curve.P)
y3.Set(r)
v.Sub(v, x3)
y3.Mul(y3, v)
s1.Mul(s1, j)
s1.Lsh(s1, 1)
y3.Sub(y3, s1)
y3.Mod(y3, curve.P)
z3.Add(z1, z2)
z3.Mul(z3, z3)
z3.Sub(z3, z1z1)
z3.Sub(z3, z2z2)
z3.Mul(z3, h)
z3.Mod(z3, curve.P)
return x3, y3, z3
}
// Double -- returns 2*(x,y).
func (curve *secp256k1Curve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
z1 := zForAffine(x1, y1)
return curve.affineFromJacobian(curve.doubleJacobian(x1, y1, z1))
}
// doubleJacobian -- takes a point in Jacobian coordinates, (x, y, z),
// and returns its double, also in Jacobian form.
func (curve *secp256k1Curve) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) {
// See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l
var a, b, c, d, e, f, x3, y3, z3 big.Int
a.Mul(x, x)
a.Mod(&a, curve.P)
b.Mul(y, y)
b.Mod(&b, curve.P)
c.Mul(&b, &b)
c.Mod(&c, curve.P)
d.Add(x, &b)
d.Mul(&d, &d)
d.Sub(&d, &a)
d.Sub(&d, &c)
d.Lsh(&d, 1)
if d.Sign() < 0 {
d.Add(&d, curve.P)
} else {
d.Mod(&d, curve.P)
}
e.Mul(three, &a)
e.Mod(&e, curve.P)
f.Mul(&e, &e)
f.Mod(&f, curve.P)
x3.Lsh(&d, 1)
x3.Sub(&f, &x3)
if x3.Sign() < 0 {
x3.Add(&x3, curve.P)
} else {
x3.Mod(&x3, curve.P)
}
y3.Sub(&d, &x3)
y3.Mul(&e, &y3)
c.Lsh(&c, 3)
y3.Sub(&y3, &c)
if y3.Sign() < 0 {
y3.Add(&y3, curve.P)
} else {
y3.Mod(&y3, curve.P)
}
z3.Mul(y, z)
z3.Lsh(&z3, 1)
z3.Mod(&z3, curve.P)
return &x3, &y3, &z3
}
// ScalarMult -- returns k*(Bx,By) where k is a number in big-endian form.
func (curve *secp256k1Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
Bz := new(big.Int).SetInt64(1)
x, y, z := new(big.Int), new(big.Int), new(big.Int)
for _, byte := range k {
for bitNum := 0; bitNum < 8; bitNum++ {
x, y, z = curve.doubleJacobian(x, y, z)
if byte&0x80 == 0x80 {
x, y, z = curve.addJacobian(Bx, By, Bz, x, y, z)
}
byte <<= 1
}
}
return curve.affineFromJacobian(x, y, z)
}
// ScalarBaseMult -- returns k*G.
func (curve *secp256k1Curve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
return curve.ScalarMult(curve.Gx, curve.Gy, k)
} | xcrypto/secp256k1/secp256k1.go | 0.697609 | 0.417568 | secp256k1.go | starcoder |
package parser
import (
"fmt"
"github.com/authzed/spicedb/pkg/schemadsl/dslshape"
"github.com/authzed/spicedb/pkg/schemadsl/input"
"github.com/authzed/spicedb/pkg/schemadsl/lexer"
)
// AstNode defines an interface for working with nodes created by this parser.
type AstNode interface {
// Connect connects this AstNode to another AstNode with the given predicate,
// and returns the same AstNode.
Connect(predicate string, other AstNode) AstNode
// Decorate decorates this AstNode with the given property and string value,
// and returns the same AstNode.
Decorate(property string, value string) AstNode
// Decorate decorates this AstNode with the given property and int value,
// and returns the same AstNode.
DecorateWithInt(property string, value int) AstNode
}
// NodeBuilder is a function for building AST nodes.
type NodeBuilder func(source input.Source, kind dslshape.NodeType) AstNode
// tryParserFn is a function that attempts to build an AST node.
type tryParserFn func() (AstNode, bool)
// lookaheadParserFn is a function that performs lookahead.
type lookaheadParserFn func(currentToken lexer.Lexeme) bool
// rightNodeConstructor is a function which takes in a left expr node and the
// token consumed for a left-recursive operator, and returns a newly constructed
// operator expression if a right expression could be found.
type rightNodeConstructor func(AstNode, lexer.Lexeme) (AstNode, bool)
// commentedLexeme is a lexer.Lexeme with comments attached.
type commentedLexeme struct {
lexer.Lexeme
comments []string
}
// sourceParser holds the state of the parser.
type sourceParser struct {
source input.Source // the name of the input; used only for error reports
lex *lexer.PeekableLexer // a reference to the lexer used for tokenization
builder NodeBuilder // the builder function for creating AstNode instances
nodes *nodeStack // the stack of the current nodes
currentToken commentedLexeme // the current token
previousToken commentedLexeme // the previous token
}
// buildParser returns a new sourceParser instance.
func buildParser(lx *lexer.Lexer, builder NodeBuilder, source input.Source, input string) *sourceParser {
l := lexer.NewPeekableLexer(lx)
return &sourceParser{
source: source,
lex: l,
builder: builder,
nodes: &nodeStack{},
currentToken: commentedLexeme{lexer.Lexeme{Kind: lexer.TokenTypeEOF}, make([]string, 0)},
previousToken: commentedLexeme{lexer.Lexeme{Kind: lexer.TokenTypeEOF}, make([]string, 0)},
}
}
func (p *sourceParser) close() {
p.lex.Close()
}
// createNode creates a new AstNode and returns it.
func (p *sourceParser) createNode(kind dslshape.NodeType) AstNode {
return p.builder(p.source, kind)
}
// createErrorNodef creates a new error node and returns it.
func (p *sourceParser) createErrorNodef(format string, args ...interface{}) AstNode {
message := fmt.Sprintf(format, args...)
node := p.startNode(dslshape.NodeTypeError).Decorate(dslshape.NodePredicateErrorMessage, message)
p.finishNode()
return node
}
// startNode creates a new node of the given type, decorates it with the current token's
// position as its start position, and pushes it onto the nodes stack.
func (p *sourceParser) startNode(kind dslshape.NodeType) AstNode {
node := p.createNode(kind)
p.decorateStartRuneAndComments(node, p.currentToken)
p.nodes.push(node)
return node
}
// decorateStartRuneAndComments decorates the given node with the location of the given token as its
// starting rune, as well as any comments attached to the token.
func (p *sourceParser) decorateStartRuneAndComments(node AstNode, token commentedLexeme) {
node.Decorate(dslshape.NodePredicateSource, string(p.source))
node.DecorateWithInt(dslshape.NodePredicateStartRune, int(token.Position))
p.decorateComments(node, token.comments)
}
// decorateComments decorates the given node with the specified comments.
func (p *sourceParser) decorateComments(node AstNode, comments []string) {
for _, comment := range comments {
commentNode := p.createNode(dslshape.NodeTypeComment)
commentNode.Decorate(dslshape.NodeCommentPredicateValue, comment)
node.Connect(dslshape.NodePredicateChild, commentNode)
}
}
// decorateEndRune decorates the given node with the location of the given token as its
// ending rune.
func (p *sourceParser) decorateEndRune(node AstNode, token commentedLexeme) {
position := int(token.Position) + len(token.Value) - 1
node.DecorateWithInt(dslshape.NodePredicateEndRune, position)
}
// currentNode returns the node at the top of the stack.
func (p *sourceParser) currentNode() AstNode {
return p.nodes.topValue()
}
// finishNode pops the current node from the top of the stack and decorates it with
// the current token's end position as its end position.
func (p *sourceParser) finishNode() {
if p.currentNode() == nil {
panic(fmt.Sprintf("No current node on stack. Token: %s", p.currentToken.Value))
}
p.decorateEndRune(p.currentNode(), p.previousToken)
p.nodes.pop()
}
// consumeToken advances the lexer forward, returning the next token.
func (p *sourceParser) consumeToken() commentedLexeme {
comments := make([]string, 0)
for {
token := p.lex.NextToken()
if token.Kind == lexer.TokenTypeSinglelineComment || token.Kind == lexer.TokenTypeMultilineComment {
comments = append(comments, token.Value)
}
if _, ok := ignoredTokenTypes[token.Kind]; !ok {
p.previousToken = p.currentToken
p.currentToken = commentedLexeme{token, comments}
return p.currentToken
}
}
}
// isToken returns true if the current token matches one of the types given.
func (p *sourceParser) isToken(types ...lexer.TokenType) bool {
for _, kind := range types {
if p.currentToken.Kind == kind {
return true
}
}
return false
}
// isKeyword returns true if the current token is a keyword matching that given.
func (p *sourceParser) isKeyword(keyword string) bool {
return p.isToken(lexer.TokenTypeKeyword) && p.currentToken.Value == keyword
}
// emitErrorf creates a new error node and attachs it as a child of the current
// node.
func (p *sourceParser) emitErrorf(format string, args ...interface{}) {
errorNode := p.createErrorNodef(format, args...)
if len(p.currentToken.Value) > 0 {
errorNode.Decorate(dslshape.NodePredicateErrorSource, p.currentToken.Value)
}
p.currentNode().Connect(dslshape.NodePredicateChild, errorNode)
}
// consumeKeyword consumes an expected keyword token or adds an error node.
func (p *sourceParser) consumeKeyword(keyword string) bool {
if !p.tryConsumeKeyword(keyword) {
p.emitErrorf("Expected keyword %s, found token %v", keyword, p.currentToken.Kind)
return false
}
return true
}
// tryConsumeKeyword attempts to consume an expected keyword token.
func (p *sourceParser) tryConsumeKeyword(keyword string) bool {
if !p.isKeyword(keyword) {
return false
}
p.consumeToken()
return true
}
// cosumeIdentifier consumes an expected identifier token or adds an error node.
func (p *sourceParser) consumeIdentifier() (string, bool) {
token, ok := p.tryConsume(lexer.TokenTypeIdentifier)
if !ok {
p.emitErrorf("Expected identifier, found token %v", p.currentToken.Kind)
return "", false
}
return token.Value, true
}
// consume performs consumption of the next token if it matches any of the given
// types and returns it. If no matching type is found, adds an error node.
func (p *sourceParser) consume(types ...lexer.TokenType) (lexer.Lexeme, bool) {
token, ok := p.tryConsume(types...)
if !ok {
p.emitErrorf("Expected one of: %v, found: %v", types, p.currentToken.Kind)
}
return token, ok
}
// tryConsume performs consumption of the next token if it matches any of the given
// types and returns it.
func (p *sourceParser) tryConsume(types ...lexer.TokenType) (lexer.Lexeme, bool) {
token, found := p.tryConsumeWithComments(types...)
return token.Lexeme, found
}
// tryConsume performs consumption of the next token if it matches any of the given
// types and returns it.
func (p *sourceParser) tryConsumeWithComments(types ...lexer.TokenType) (commentedLexeme, bool) {
if p.isToken(types...) {
token := p.currentToken
p.consumeToken()
return token, true
}
return commentedLexeme{lexer.Lexeme{
Kind: lexer.TokenTypeError,
}, make([]string, 0)}, false
}
// performLeftRecursiveParsing performs left-recursive parsing of a set of operators. This method
// first performs the parsing via the subTryExprFn and then checks for one of the left-recursive
// operator token types found. If none found, the left expression is returned. Otherwise, the
// rightNodeBuilder is called to attempt to construct an operator expression. This method also
// properly handles decoration of the nodes with their proper start and end run locations and
// comments.
func (p *sourceParser) performLeftRecursiveParsing(subTryExprFn tryParserFn, rightNodeBuilder rightNodeConstructor, rightTokenTester lookaheadParserFn, operatorTokens ...lexer.TokenType) (AstNode, bool) {
var currentLeftToken commentedLexeme
currentLeftToken = p.currentToken
// Consume the left side of the expression.
leftNode, ok := subTryExprFn()
if !ok {
return nil, false
}
// Check for an operator token. If none found, then we've found just the left side of the
// expression and so we return that node.
if !p.isToken(operatorTokens...) {
return leftNode, true
}
// Keep consuming pairs of operators and child expressions until such
// time as no more can be consumed. We use this loop+custom build rather than recursion
// because these operators are *left* recursive, not right.
var currentLeftNode AstNode
currentLeftNode = leftNode
for {
// Check for an operator.
if !p.isToken(operatorTokens...) {
break
}
// If a lookahead function is defined, check the lookahead for the matched token.
if rightTokenTester != nil && !rightTokenTester(p.currentToken.Lexeme) {
break
}
// Consume the operator.
operatorToken, ok := p.tryConsumeWithComments(operatorTokens...)
if !ok {
break
}
// Consume the right hand expression and build an expression node (if applicable).
exprNode, ok := rightNodeBuilder(currentLeftNode, operatorToken.Lexeme)
if !ok {
p.emitErrorf("Expected right hand expression, found: %v", p.currentToken.Kind)
return currentLeftNode, true
}
p.decorateStartRuneAndComments(exprNode, currentLeftToken)
p.decorateEndRune(exprNode, p.previousToken)
currentLeftNode = exprNode
currentLeftToken = operatorToken
}
return currentLeftNode, true
}
// tryConsumeStatementTerminator tries to consume a statement terminator.
func (p *sourceParser) tryConsumeStatementTerminator() (lexer.Lexeme, bool) {
return p.tryConsume(lexer.TokenTypeSyntheticSemicolon, lexer.TokenTypeSemicolon, lexer.TokenTypeEOF)
}
// consumeStatementTerminator consume a statement terminator.
func (p *sourceParser) consumeStatementTerminator() bool {
_, ok := p.tryConsumeStatementTerminator()
if ok {
return true
}
p.emitErrorf("Expected end of statement or definition, found: %s", p.currentToken.Kind)
return false
}
// binaryOpDefinition represents information a binary operator token and its associated node type.
type binaryOpDefinition struct {
// The token representing the binary expression's operator.
BinaryOperatorToken lexer.TokenType
// The type of node to create for this expression.
BinaryExpressionNodeType dslshape.NodeType
}
// buildBinaryOperatorExpressionFnTree builds a tree of functions to try to consume a set of binary
// operator expressions.
func (p *sourceParser) buildBinaryOperatorExpressionFnTree(ops []binaryOpDefinition) tryParserFn {
// Start with a base expression function.
var currentParseFn tryParserFn
currentParseFn = func() (AstNode, bool) {
arrowExpr, ok := p.tryConsumeArrowExpression()
if !ok {
return p.tryConsumeBaseExpression()
}
return arrowExpr, true
}
for i := range ops {
// Note: We have to reverse this to ensure we have proper precedence.
currentParseFn = func(operatorInfo binaryOpDefinition, currentFn tryParserFn) tryParserFn {
return (func() (AstNode, bool) {
return p.tryConsumeComputeExpression(currentFn, operatorInfo.BinaryOperatorToken, operatorInfo.BinaryExpressionNodeType)
})
}(ops[len(ops)-i-1], currentParseFn)
}
return currentParseFn
} | pkg/schemadsl/parser/parser_impl.go | 0.720467 | 0.572065 | parser_impl.go | starcoder |
package typesutil
import (
"reflect"
)
func FromRType(rtype reflect.Type) *RType {
return &RType{
Type: rtype,
}
}
type RType struct {
reflect.Type
}
func (rtype *RType) Method(i int) Method {
return &RMethod{Method: rtype.Type.Method(i)}
}
func (rtype *RType) MethodByName(name string) (Method, bool) {
if m, ok := rtype.Type.MethodByName(name); ok {
return &RMethod{Method: m}, true
}
return nil, false
}
func (rtype *RType) In(i int) Type {
return FromRType(rtype.Type.In(i))
}
func (rtype *RType) Out(i int) Type {
return FromRType(rtype.Type.Out(i))
}
func (rtype *RType) Implements(u Type) bool {
return rtype.Type.Implements(u.(*RType).Type)
}
func (rtype *RType) AssignableTo(u Type) bool {
return rtype.Type.AssignableTo(u.(*RType).Type)
}
func (rtype *RType) ConvertibleTo(u Type) bool {
return rtype.Type.ConvertibleTo(u.(*RType).Type)
}
func (rtype *RType) Field(i int) StructField {
return &RStructField{
StructField: rtype.Type.Field(i),
}
}
func (rtype *RType) FieldByName(name string) (StructField, bool) {
if sf, ok := rtype.Type.FieldByName(name); ok {
return &RStructField{
StructField: sf,
}, true
}
return nil, false
}
func (rtype *RType) FieldByNameFunc(match func(string) bool) (StructField, bool) {
if sf, ok := rtype.Type.FieldByNameFunc(match); ok {
return &RStructField{
StructField: sf,
}, true
}
return nil, false
}
func (rtype *RType) Key() Type {
return FromRType(rtype.Type.Key())
}
func (rtype *RType) Elem() Type {
return FromRType(rtype.Type.Elem())
}
type RStructField struct {
StructField reflect.StructField
}
func (f *RStructField) PkgPath() string {
return f.StructField.PkgPath
}
func (f *RStructField) Name() string {
return f.StructField.Name
}
func (f *RStructField) Tag() reflect.StructTag {
return f.StructField.Tag
}
func (f *RStructField) Type() Type {
return FromRType(f.StructField.Type)
}
func (f *RStructField) Anonymous() bool {
return f.StructField.Anonymous
}
type RMethod struct {
Method reflect.Method
}
func (m *RMethod) PkgPath() string {
return m.Method.PkgPath
}
func (m *RMethod) Name() string {
return m.Method.Name
}
func (m *RMethod) Type() Type {
return FromRType(m.Method.Type)
} | typesutil/rtype.go | 0.667473 | 0.462594 | rtype.go | starcoder |
package main
var openapi = `{
"openapi": "3.0.2",
"info": {
"description": "This is the SpaceApi Validator api",
"version": "1.2.0",
"title": "SpaceApi Validator"
},
"servers": [
{
"url": "https://validator.spaceapi.io",
"description": "The SpaceApi Validator Service"
}
],
"paths": {
"/v1": {
"get": {
"deprecated": true,
"tags": [
"v1"
],
"responses": {
"200": {
"description": "get default information about the server",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ServerInformation"
}
}
}
}
}
}
},
"/v1/validate/": {
"post": {
"deprecated": true,
"tags": [
"v1"
],
"summary": "validate an input against the SpaceApi schema",
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateV1"
}
}
}
},
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateV1Response"
}
}
}
},
"400": {
"description": "request body is malformed"
},
"500": {
"description": "something went wrong"
}
}
}
},
"/v2": {
"get": {
"tags": [
"v2"
],
"responses": {
"200": {
"description": "get default information about the server",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ServerInformation"
}
}
}
}
}
}
},
"/v2/validateURL": {
"post": {
"tags": [
"v2"
],
"summary": "validate an input against the SpaceApi schema",
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateUrlV2"
}
}
}
},
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateUrlV2Response"
}
}
}
},
"400": {
"description": "request body is malformed"
},
"500": {
"description": "something went wrong"
}
}
}
},
"/v2/validateJSON": {
"post": {
"tags": [
"v2"
],
"summary": "validate an input against the SpaceApi schema",
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateJsonV2"
}
}
}
},
"responses": {
"200": {
"description": "successful operation",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ValidateJsonV2Response"
}
}
}
},
"400": {
"description": "request body is malformed"
},
"500": {
"description": "something went wrong"
}
}
}
}
},
"components": {
"schemas": {
"ServerInformation": {
"properties": {
"description": {
"type": "string"
},
"usage": {
"type": "string"
},
"version": {
"type": "string"
}
},
"required": [
"description",
"usage",
"version"
]
},
"ValidateV1": {
"properties": {
"data": {
"type": "object"
}
},
"required": [
"data"
]
},
"ValidateUrlV2": {
"properties": {
"url": {
"type": "string",
"pattern": "uri"
}
},
"required": [
"url"
]
},
"ValidateJsonV2": {
"type": "object"
},
"ValidateV1Response": {
"properties": {
"valid": {
"type": "boolean"
},
"message": {
"type": "string"
}
},
"required": [
"valid",
"message"
]
},
"ValidateUrlV2Response": {
"properties": {
"valid": {
"type": "boolean"
},
"message": {
"type": "string"
},
"isHttps": {
"type": "boolean"
},
"httpsForward": {
"type": "boolean"
},
"reachable": {
"type": "boolean"
},
"cors": {
"type": "boolean"
},
"contentType": {
"type": "boolean"
},
"certValid": {
"type": "boolean"
},
"checkedVersions": {
"type": "array",
"items": {
"type": "string"
}
},
"validatedJson": {
"type": "object"
},
"schemaErrors": {
"type": "array",
"items": {
"$ref": "#/components/schemas/SchemaError"
}
}
},
"required": [
"valid",
"isHttps",
"httpsForward",
"reachable",
"cors",
"contentType",
"certValid"
]
},
"ValidateJsonV2Response": {
"properties": {
"valid": {
"type": "boolean"
},
"message": {
"type": "string"
},
"checkedVersions": {
"type": "array",
"items": {
"type": "string"
}
},
"validatedJson": {
"type": "object"
},
"schemaErrors": {
"type": "array",
"items": {
"$ref": "#/components/schemas/SchemaError"
}
}
},
"required": [
"valid",
"message"
]
},
"SchemaError": {
"properties": {
"field": {
"type": "string"
},
"message": {
"type": "string"
}
},
"required": [
"field",
"message"
]
}
}
}
}` | openapi.go | 0.722429 | 0.440529 | openapi.go | starcoder |
package lexer
import (
"bytes"
"fmt"
"unicode"
)
//LexemeType represents both a class and a lexer state
type LexemeType = int
const (
//unspecified is the default token type
unspecified LexemeType = iota
//identifier starts with an alphabetic character
identifier LexemeType = iota
//leftSeparator is a sequence of left parens only
leftSeparator LexemeType = iota
//rightSeparator is a sequence of right parens only
rightSeparator LexemeType = iota
//numberLiteral starts with a digit
numberLiteral LexemeType = iota
//operator starts with a symbol that isn't a paren, letter or number
operator LexemeType = iota
)
//Lexer holds binds to methods for the lexer
type Lexer struct{}
type lexeme struct {
typ LexemeType
str string
}
func scan(in string) ([]lexeme, error) {
var (
buf bytes.Buffer
state LexemeType
out []lexeme
)
seq := bytes.NewBufferString(in)
fmt.Printf("processing sequence: %q\n", seq.String())
var i int
for r, _, err := seq.ReadRune(); err == nil; r, _, err = seq.ReadRune() {
switch {
case unicode.IsSpace(r):
switch state {
case unspecified:
case identifier, leftSeparator, rightSeparator, numberLiteral, operator:
out = append(out, lexeme{typ: state, str: buf.String()})
buf.Reset()
state = unspecified
default:
return nil, fmt.Errorf("unexpected space at index %d", i)
}
case unicode.IsLetter(r):
switch state {
case unspecified:
state = identifier
fallthrough
case identifier:
buf.WriteRune(r)
default:
return nil, fmt.Errorf("unexpected letter %q at index %d", r, i)
}
case unicode.IsNumber(r):
switch state {
case unspecified:
state = numberLiteral
fallthrough
case identifier, numberLiteral:
buf.WriteRune(r)
default:
return nil, fmt.Errorf("unexpected number %q at index %d", r, i)
}
case r == '(':
switch state {
case unspecified:
state = leftSeparator
fallthrough
case leftSeparator:
buf.WriteRune(r)
default:
return nil, fmt.Errorf("unexpected left paren at index %d", i)
}
case r == ')':
switch state {
case unspecified:
state = rightSeparator
fallthrough
case rightSeparator:
buf.WriteRune(r)
default:
return nil, fmt.Errorf("unexpected right paren at index %d", i)
}
case unicode.IsSymbol(r) || unicode.IsPunct(r):
switch r {
case '.', '+', '-', '*', '/', '%', '=', '&', '|', '^', '!', '<', '>':
switch state {
case unspecified:
state = operator
fallthrough
case operator:
buf.WriteRune(r)
default:
return nil, fmt.Errorf("unexpected symbol %q at index %d", r, i)
}
default:
return nil, fmt.Errorf("unrecognized symbol %q at index %d", r, i)
}
default:
return nil, fmt.Errorf("encountered invalid character %q at index %d", r, i)
}
i++
}
return out, nil
}
//Tokenize tokenizes a string, producing a list of tokens
func (l Lexer) Tokenize(in string) ([]Token, error) {
fmt.Printf("tokenize %s\n", in)
lex, err := scan(in)
if err != nil {
return nil, err
}
fmt.Println(lex)
return evaluate(lex)
} | lexer/lexer.go | 0.535584 | 0.434581 | lexer.go | starcoder |
package storagehosttree
import (
"github.com/DxChainNetwork/godx/storage"
)
// node defines the storage host tree node
type node struct {
parent *node
left *node
right *node
// count includes the amount of storage hosts including the sum of all its' children
// and the node itself
count int
// indicates if the node contained any storage host information
occupied bool
// total evaluation of the storage hosts, including the sum of all its' child's
// evaluation and the evaluation of node it self
evalTotal int64
entry *nodeEntry
}
// nodeEntry is the information stored in a storage host tree node
type nodeEntry struct {
storage.HostInfo
eval int64
}
// nodeEntries defines a collection of node entry that implemented the sorting methods
// the sorting will be ranked from the higher evaluation to lower evaluation
type nodeEntries []nodeEntry
// the storage host with higher weight will placed in the front of the list
func (ne nodeEntries) Len() int { return len(ne) }
func (ne nodeEntries) Less(i, j int) bool { return ne[i].eval > ne[j].eval }
func (ne nodeEntries) Swap(i, j int) { ne[i], ne[j] = ne[j], ne[i] }
// newNode will create and initialize a new node object, which will be inserted into
// the StorageHostTree
func newNode(parent *node, entry *nodeEntry) *node {
return &node{
parent: parent,
occupied: true,
evalTotal: entry.eval,
count: 1,
entry: entry,
}
}
// nodeRemove will not remove the actual node from the tree
// instead, it update the evaluation, and occupied status
func (n *node) nodeRemove() {
n.evalTotal = n.evalTotal - n.entry.eval
n.occupied = false
parent := n.parent
for parent != nil {
parent.evalTotal = parent.evalTotal - n.entry.eval
parent = parent.parent
}
}
// nodeInsert will insert the node entry into the StorageHostTree
func (n *node) nodeInsert(entry *nodeEntry) (nodesAdded int, nodeInserted *node) {
// 1. check if the node is root node
if n.parent == nil && !n.occupied && n.left == nil && n.right == nil {
n.occupied = true
n.entry = entry
n.evalTotal = entry.eval
nodesAdded = 0
nodeInserted = n
return
}
// 2. add all child evaluation
n.evalTotal = n.evalTotal + entry.eval
// 3. check if the node is occupied
if !n.occupied {
n.occupied = true
n.entry = entry
nodesAdded = 0
nodeInserted = n
return nodesAdded, nodeInserted
}
// 4. insert new node, binary tree
if n.left == nil {
n.left = newNode(n, entry)
nodesAdded = 1
nodeInserted = n.left
} else if n.right == nil {
n.right = newNode(n, entry)
nodesAdded = 1
nodeInserted = n.right
} else if n.left.count <= n.right.count {
nodesAdded, nodeInserted = n.left.nodeInsert(entry)
} else {
nodesAdded, nodeInserted = n.right.nodeInsert(entry)
}
// 5. update the node count
n.count += nodesAdded
return
}
// nodeWithEval will retrieve node with the specific evaluation
func (n *node) nodeWithEval(eval int64) (*node, error) {
if eval > n.evalTotal {
return nil, ErrEvaluationTooLarge
}
if n.left != nil {
if eval < n.left.evalTotal {
return n.left.nodeWithEval(eval)
}
eval = eval - n.left.evalTotal
}
if n.right != nil && eval < n.right.evalTotal {
return n.right.nodeWithEval(eval)
}
if !n.occupied {
return nil, ErrNodeNotOccupied
}
return n, nil
} | storage/storageclient/storagehosttree/node.go | 0.7011 | 0.526586 | node.go | starcoder |
package dsp
import "math"
// Linear interpolates using linear interpolation.
func Linear(samples []float64, x float64) float64 {
var samp float64
low := math.Floor(x)
lowInt := int(low)
if lowInt < len(samples) {
lowValue := samples[lowInt]
highValue := lowValue
if i := lowInt + 1; i >= len(samples) {
highValue = 0
} else {
highValue = samples[i]
}
samp = lowValue + (x-low)*(highValue-lowValue)
}
return samp
}
// LinearF32 interpolates using linear interpolation.
func LinearF32(samples []float32, x float32) float32 {
var samp float32
low := float32(math.Floor(float64(x)))
lowInt := int(low)
if lowInt < len(samples) {
lowValue := samples[lowInt]
highValue := lowValue
if i := lowInt + 1; i >= len(samples) {
highValue = 0
} else {
highValue = samples[i]
}
samp = lowValue + (x-low)*(highValue-lowValue)
}
return samp
}
// Hermite4p3o interpolates using 4-point, 3rd-order Hermite (x-form)
func Hermite4p3o(samples []float64, x float64) float64 {
xi := int(x)
var s [4]float64
for i := -1; i <= 2; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[i+1] = samples[j]
}
}
x -= math.Floor(x)
c0 := s[1]
c1 := 1.0 / 2.0 * (s[2] - s[0])
c2 := s[0] - 5.0/2.0*s[1] + 2.0*s[2] - 1.0/2.0*s[3]
c3 := 1.0/2.0*(s[3]-s[0]) + 3.0/2.0*(s[1]-s[2])
return ((c3*x+c2)*x+c1)*x + c0
}
// Hermite4p3oF32 interpolates using 4-point, 3rd-order Hermite (x-form)
func Hermite4p3oF32(samples []float32, x float32) float32 {
xi := int(x)
var s [4]float32
for i := -1; i <= 2; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[i+1] = float32(samples[j])
}
}
x -= float32(math.Floor(float64(x)))
c0 := s[1]
c1 := 1.0 / 2.0 * (s[2] - s[0])
c2 := s[0] - 5.0/2.0*s[1] + 2.0*s[2] - 1.0/2.0*s[3]
c3 := 1.0/2.0*(s[3]-s[0]) + 3.0/2.0*(s[1]-s[2])
return ((c3*x+c2)*x+c1)*x + c0
}
// Optimal2x4p4o interpolates using optimal 2x (4-point, 4th-order) (z-form)
func Optimal2x4p4o(samples []float64, x float64) float64 {
const middle = 1
xi := int(x)
var s [6]float64
for i := -1; i <= 2; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[middle+i] = samples[j]
}
}
even1 := s[middle+1] + s[middle]
odd1 := s[middle+1] - s[middle]
even2 := s[middle+2] + s[middle-1]
odd2 := s[middle+2] - s[middle-1]
c0 := even1*0.45645918406487612 + even2*0.04354173901996461
c1 := odd1*0.47236675362442071 + odd2*0.17686613581136501
c2 := even1*-0.253674794204558521 + even2*0.25371918651882464
c3 := odd1*-0.37917091811631082 + odd2*0.11952965967158000
c4 := even1*0.04252164479749607 + even2*-0.04289144034653719
z := x - math.Floor(x) - 1.0/2.0
return (((c4*z+c3)*z+c2)*z+c1)*z + c0
}
// Optimal2x4p4oF32 interpolates using optimal 2x (4-point, 4th-order) (z-form)
func Optimal2x4p4oF32(samples []float32, x float32) float32 {
const middle = 1
xi := int(x)
var s [6]float32
for i := -1; i <= 2; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[middle+i] = samples[j]
}
}
even1 := s[middle+1] + s[middle]
odd1 := s[middle+1] - s[middle]
even2 := s[middle+2] + s[middle-1]
odd2 := s[middle+2] - s[middle-1]
c0 := even1*0.45645918406487612 + even2*0.04354173901996461
c1 := odd1*0.47236675362442071 + odd2*0.17686613581136501
c2 := even1*-0.253674794204558521 + even2*0.25371918651882464
c3 := odd1*-0.37917091811631082 + odd2*0.11952965967158000
c4 := even1*0.04252164479749607 + even2*-0.04289144034653719
z := x - float32(math.Floor(float64(x))) - 1.0/2.0
return (((c4*z+c3)*z+c2)*z+c1)*z + c0
}
// Optimal2x6p5o interpolates using optimal 2x (6-point, 5th-order) (z-form)
func Optimal2x6p5o(samples []float64, x float64) float64 {
const middle = 2
xi := int(x)
var s [6]float64
for i := -2; i <= 3; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[middle+i] = samples[j]
}
}
even1 := s[middle+1] + s[middle]
odd1 := s[middle+1] - s[middle]
even2 := s[middle+2] + s[middle-1]
odd2 := s[middle+2] - s[middle-1]
even3 := s[middle+3] + s[middle-2]
odd3 := s[middle+3] - s[middle-2]
c0 := even1*0.40513396007145713 + even2*0.09251794438424393 + even3*0.00234806603570670
c1 := odd1*0.28342806338906690 + odd2*0.21703277024054901 + odd3*0.01309294748731515
c2 := even1*-0.191337682540351941 + even2*0.16187844487943592 + even3*0.02946017143111912
c3 := odd1*-0.16471626190554542 + odd2*-0.00154547203542499 + odd3*0.03399271444851909
c4 := even1*0.03845798729588149 + even2*-0.05712936104242644 + even3*0.01866750929921070
c5 := odd1*0.04317950185225609 + odd2*-0.01802814255926417 + odd3*0.00152170021558204
z := x - math.Floor(x) - 1.0/2.0
return ((((c5*z+c4)*z+c3)*z+c2)*z+c1)*z + c0
}
// Optimal2x6p5oF32 interpolates using optimal 2x (6-point, 5th-order) (z-form)
func Optimal2x6p5oF32(samples []float32, x float32) float32 {
const middle = 2
xi := int(x)
var s [6]float32
for i := -2; i <= 3; i++ {
if j := xi + i; j >= 0 && j < len(samples) {
s[middle+i] = float32(samples[j])
}
}
even1 := s[middle+1] + s[middle]
odd1 := s[middle+1] - s[middle]
even2 := s[middle+2] + s[middle-1]
odd2 := s[middle+2] - s[middle-1]
even3 := s[middle+3] + s[middle-2]
odd3 := s[middle+3] - s[middle-2]
c0 := even1*0.40513396007145713 + even2*0.09251794438424393 + even3*0.00234806603570670
c1 := odd1*0.28342806338906690 + odd2*0.21703277024054901 + odd3*0.01309294748731515
c2 := even1*-0.191337682540351941 + even2*0.16187844487943592 + even3*0.02946017143111912
c3 := odd1*-0.16471626190554542 + odd2*-0.00154547203542499 + odd3*0.03399271444851909
c4 := even1*0.03845798729588149 + even2*-0.05712936104242644 + even3*0.01866750929921070
c5 := odd1*0.04317950185225609 + odd2*-0.01802814255926417 + odd3*0.00152170021558204
z := x - float32(math.Floor(float64(x))) - 1.0/2.0
return ((((c5*z+c4)*z+c3)*z+c2)*z+c1)*z + c0
} | dsp/interpolate.go | 0.679923 | 0.546375 | interpolate.go | starcoder |
package gaia
import (
"fmt"
"github.com/globalsign/mgo/bson"
"github.com/mitchellh/copystructure"
"go.aporeto.io/elemental"
)
// NamespaceTypeIdentity represents the Identity of the object.
var NamespaceTypeIdentity = elemental.Identity{
Name: "namespacetype",
Category: "namespacetypes",
Package: "squall",
Private: false,
}
// NamespaceTypesList represents a list of NamespaceTypes
type NamespaceTypesList []*NamespaceType
// Identity returns the identity of the objects in the list.
func (o NamespaceTypesList) Identity() elemental.Identity {
return NamespaceTypeIdentity
}
// Copy returns a pointer to a copy the NamespaceTypesList.
func (o NamespaceTypesList) Copy() elemental.Identifiables {
copy := append(NamespaceTypesList{}, o...)
return ©
}
// Append appends the objects to the a new copy of the NamespaceTypesList.
func (o NamespaceTypesList) Append(objects ...elemental.Identifiable) elemental.Identifiables {
out := append(NamespaceTypesList{}, o...)
for _, obj := range objects {
out = append(out, obj.(*NamespaceType))
}
return out
}
// List converts the object to an elemental.IdentifiablesList.
func (o NamespaceTypesList) List() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i]
}
return out
}
// DefaultOrder returns the default ordering fields of the content.
func (o NamespaceTypesList) DefaultOrder() []string {
return []string{}
}
// ToSparse returns the NamespaceTypesList converted to SparseNamespaceTypesList.
// Objects in the list will only contain the given fields. No field means entire field set.
func (o NamespaceTypesList) ToSparse(fields ...string) elemental.Identifiables {
out := make(SparseNamespaceTypesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i].ToSparse(fields...).(*SparseNamespaceType)
}
return out
}
// Version returns the version of the content.
func (o NamespaceTypesList) Version() int {
return 1
}
// NamespaceType represents the model of a namespacetype
type NamespaceType struct {
// the namespace type for the current namespace.
Type string `json:"type" msgpack:"type" bson:"-" mapstructure:"type,omitempty"`
ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"`
}
// NewNamespaceType returns a new *NamespaceType
func NewNamespaceType() *NamespaceType {
return &NamespaceType{
ModelVersion: 1,
}
}
// Identity returns the Identity of the object.
func (o *NamespaceType) Identity() elemental.Identity {
return NamespaceTypeIdentity
}
// Identifier returns the value of the object's unique identifier.
func (o *NamespaceType) Identifier() string {
return ""
}
// SetIdentifier sets the value of the object's unique identifier.
func (o *NamespaceType) SetIdentifier(id string) {
}
// GetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *NamespaceType) GetBSON() (interface{}, error) {
if o == nil {
return nil, nil
}
s := &mongoAttributesNamespaceType{}
return s, nil
}
// SetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *NamespaceType) SetBSON(raw bson.Raw) error {
if o == nil {
return nil
}
s := &mongoAttributesNamespaceType{}
if err := raw.Unmarshal(s); err != nil {
return err
}
return nil
}
// Version returns the hardcoded version of the model.
func (o *NamespaceType) Version() int {
return 1
}
// BleveType implements the bleve.Classifier Interface.
func (o *NamespaceType) BleveType() string {
return "namespacetype"
}
// DefaultOrder returns the list of default ordering fields.
func (o *NamespaceType) DefaultOrder() []string {
return []string{}
}
// Doc returns the documentation for the object
func (o *NamespaceType) Doc() string {
return `Returns the type of the specified namespace.`
}
func (o *NamespaceType) String() string {
return fmt.Sprintf("<%s:%s>", o.Identity().Name, o.Identifier())
}
// ToSparse returns the sparse version of the model.
// The returned object will only contain the given fields. No field means entire field set.
func (o *NamespaceType) ToSparse(fields ...string) elemental.SparseIdentifiable {
if len(fields) == 0 {
// nolint: goimports
return &SparseNamespaceType{
Type: &o.Type,
}
}
sp := &SparseNamespaceType{}
for _, f := range fields {
switch f {
case "type":
sp.Type = &(o.Type)
}
}
return sp
}
// Patch apply the non nil value of a *SparseNamespaceType to the object.
func (o *NamespaceType) Patch(sparse elemental.SparseIdentifiable) {
if !sparse.Identity().IsEqual(o.Identity()) {
panic("cannot patch from a parse with different identity")
}
so := sparse.(*SparseNamespaceType)
if so.Type != nil {
o.Type = *so.Type
}
}
// DeepCopy returns a deep copy if the NamespaceType.
func (o *NamespaceType) DeepCopy() *NamespaceType {
if o == nil {
return nil
}
out := &NamespaceType{}
o.DeepCopyInto(out)
return out
}
// DeepCopyInto copies the receiver into the given *NamespaceType.
func (o *NamespaceType) DeepCopyInto(out *NamespaceType) {
target, err := copystructure.Copy(o)
if err != nil {
panic(fmt.Sprintf("Unable to deepcopy NamespaceType: %s", err))
}
*out = *target.(*NamespaceType)
}
// Validate valides the current information stored into the structure.
func (o *NamespaceType) Validate() error {
errors := elemental.Errors{}
requiredErrors := elemental.Errors{}
if len(requiredErrors) > 0 {
return requiredErrors
}
if len(errors) > 0 {
return errors
}
return nil
}
// SpecificationForAttribute returns the AttributeSpecification for the given attribute name key.
func (*NamespaceType) SpecificationForAttribute(name string) elemental.AttributeSpecification {
if v, ok := NamespaceTypeAttributesMap[name]; ok {
return v
}
// We could not find it, so let's check on the lower case indexed spec map
return NamespaceTypeLowerCaseAttributesMap[name]
}
// AttributeSpecifications returns the full attribute specifications map.
func (*NamespaceType) AttributeSpecifications() map[string]elemental.AttributeSpecification {
return NamespaceTypeAttributesMap
}
// ValueForAttribute returns the value for the given attribute.
// This is a very advanced function that you should not need but in some
// very specific use cases.
func (o *NamespaceType) ValueForAttribute(name string) interface{} {
switch name {
case "type":
return o.Type
}
return nil
}
// NamespaceTypeAttributesMap represents the map of attribute for NamespaceType.
var NamespaceTypeAttributesMap = map[string]elemental.AttributeSpecification{
"Type": {
AllowedChoices: []string{},
Autogenerated: true,
ConvertedName: "Type",
Description: `the namespace type for the current namespace.`,
Exposed: true,
Name: "type",
ReadOnly: true,
Type: "string",
},
}
// NamespaceTypeLowerCaseAttributesMap represents the map of attribute for NamespaceType.
var NamespaceTypeLowerCaseAttributesMap = map[string]elemental.AttributeSpecification{
"type": {
AllowedChoices: []string{},
Autogenerated: true,
ConvertedName: "Type",
Description: `the namespace type for the current namespace.`,
Exposed: true,
Name: "type",
ReadOnly: true,
Type: "string",
},
}
// SparseNamespaceTypesList represents a list of SparseNamespaceTypes
type SparseNamespaceTypesList []*SparseNamespaceType
// Identity returns the identity of the objects in the list.
func (o SparseNamespaceTypesList) Identity() elemental.Identity {
return NamespaceTypeIdentity
}
// Copy returns a pointer to a copy the SparseNamespaceTypesList.
func (o SparseNamespaceTypesList) Copy() elemental.Identifiables {
copy := append(SparseNamespaceTypesList{}, o...)
return ©
}
// Append appends the objects to the a new copy of the SparseNamespaceTypesList.
func (o SparseNamespaceTypesList) Append(objects ...elemental.Identifiable) elemental.Identifiables {
out := append(SparseNamespaceTypesList{}, o...)
for _, obj := range objects {
out = append(out, obj.(*SparseNamespaceType))
}
return out
}
// List converts the object to an elemental.IdentifiablesList.
func (o SparseNamespaceTypesList) List() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i]
}
return out
}
// DefaultOrder returns the default ordering fields of the content.
func (o SparseNamespaceTypesList) DefaultOrder() []string {
return []string{}
}
// ToPlain returns the SparseNamespaceTypesList converted to NamespaceTypesList.
func (o SparseNamespaceTypesList) ToPlain() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i].ToPlain()
}
return out
}
// Version returns the version of the content.
func (o SparseNamespaceTypesList) Version() int {
return 1
}
// SparseNamespaceType represents the sparse version of a namespacetype.
type SparseNamespaceType struct {
// the namespace type for the current namespace.
Type *string `json:"type,omitempty" msgpack:"type,omitempty" bson:"-" mapstructure:"type,omitempty"`
ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"`
}
// NewSparseNamespaceType returns a new SparseNamespaceType.
func NewSparseNamespaceType() *SparseNamespaceType {
return &SparseNamespaceType{}
}
// Identity returns the Identity of the sparse object.
func (o *SparseNamespaceType) Identity() elemental.Identity {
return NamespaceTypeIdentity
}
// Identifier returns the value of the sparse object's unique identifier.
func (o *SparseNamespaceType) Identifier() string {
return ""
}
// SetIdentifier sets the value of the sparse object's unique identifier.
func (o *SparseNamespaceType) SetIdentifier(id string) {
}
// GetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *SparseNamespaceType) GetBSON() (interface{}, error) {
if o == nil {
return nil, nil
}
s := &mongoAttributesSparseNamespaceType{}
return s, nil
}
// SetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *SparseNamespaceType) SetBSON(raw bson.Raw) error {
if o == nil {
return nil
}
s := &mongoAttributesSparseNamespaceType{}
if err := raw.Unmarshal(s); err != nil {
return err
}
return nil
}
// Version returns the hardcoded version of the model.
func (o *SparseNamespaceType) Version() int {
return 1
}
// ToPlain returns the plain version of the sparse model.
func (o *SparseNamespaceType) ToPlain() elemental.PlainIdentifiable {
out := NewNamespaceType()
if o.Type != nil {
out.Type = *o.Type
}
return out
}
// DeepCopy returns a deep copy if the SparseNamespaceType.
func (o *SparseNamespaceType) DeepCopy() *SparseNamespaceType {
if o == nil {
return nil
}
out := &SparseNamespaceType{}
o.DeepCopyInto(out)
return out
}
// DeepCopyInto copies the receiver into the given *SparseNamespaceType.
func (o *SparseNamespaceType) DeepCopyInto(out *SparseNamespaceType) {
target, err := copystructure.Copy(o)
if err != nil {
panic(fmt.Sprintf("Unable to deepcopy SparseNamespaceType: %s", err))
}
*out = *target.(*SparseNamespaceType)
}
type mongoAttributesNamespaceType struct {
}
type mongoAttributesSparseNamespaceType struct {
} | namespacetype.go | 0.81626 | 0.482734 | namespacetype.go | starcoder |
package jwe
import "github.com/urfave/cli"
// Command returns the jwe subcommand.
func Command() cli.Command {
return cli.Command{
Name: "jwe",
Usage: "encrypt and decrypt data and keys using JSON Web Encryption (JWE)",
UsageText: "step crypto jwe <subcommand> [arguments] [global-flags] [subcommand-flags]",
Description: `The **step crypto jwe** command group provides facilities for encrypting and
decrypting content and representing encrypted content using JSON-based data
structures as defined by the JSON Web Encryption (JWE) specification in
RFC7516, using algorithms defined in the JSON Web Algorithms (JWA)
specification in RFC7518. A JWE is a data structure representing an encrypted
and integrity-protected message.
There are two JWE serializations: the compact serialization is a small, URL-
safe representation that base64 encodes the JWE components. The compact
serialization is a URL-safe string, suitable for space-constrained
environments such as HTTP headers and URI query parameters. The JSON
serialization represents JWEs as JSON objects and allows the same content to
be encrypted to multiple parties (using multiple keys).
A typical JWE in compact serialization is a dot-separated string with five
parts:
* Header: metadata describing how the plaintext payload was processed to
produce ciphertext (e.g., which algorithms were used to encrypt the
content encryption key and the plaintext payload)
* Encrypted Key: the "content encryption key" that was used to encrypt the
plaintext payload, encrypted for the JWE recipient(s) (see: "what's with
the encrypted key" below)
* Initialization Vector: an initialization vector for use with the specified
encryption algorithm, if applicable
* Ciphertext: the ciphertext value resulting produced from authenticated
encryption of the plaintext with additional authenticated data
* Authentication Tag: value resulting fromthe authenticated encryption of
the plaintext with additional authenticated data
## What's with encrypted key?
This is somewhat confusing. Instead of directly encrypting the plaintext
payload, JWE typically generates a new "content encryption key" then encrypts
*that key* for the intended recipient(s).
While versatile, JWE is easy to use incorrectly. Therefore, any use of this
subcommand requires the use of the '--subtle' flag as a misuse prevention
mechanism. You should only use this subcommand if you know what you're doing.
If possible, you're better off using the higher level 'crypto nacl' command
group.
## EXAMPLES
This example demonstrates how to produce a JWE for a recipient using the
RSA-OAEP algorithm to encrypt the content encryption key (producing the
encrypted key), and the A256GCM (AES GCM with 256-bit key) algorithm to
produce the ciphertext and authentication tag.
1. Encode the JWE header with the desired "alg" and "enc" members then
encode it producing the *header*
'''raw
BASE64URL(UTF8({"alg":"RSA-OAEP","enc":"A256GCM"}))
=> <KEY>
'''
2. Generate a random content encryption key (CEK), encrypt it using
RSA-OAEP, producing the *encrypted key*
3. Generate a random initialization vector
4. Perform authenticated encryption over the plaintext using the content
encryption key and A256GCM algorithm with the base64-encoded JWE headers
provided as additional authenticated data producing the *ciphertext* and
*authentication tag*
5. Assemble the final result (compact serialization) to produce the string:
'''raw
BASE64URL(UTF8(header)) || '.'
|| BASE64URL(encrypted key) || '.'
|| BASE64URL(initialization vector) || '.'
|| BASE64URL(ciphertext) || '.'
|| BASE64URL(authentication tag)
'''
Producing a result like (line breaks for display purposes only):
'''raw
<KEY>
OKOawDo13gRp2ojaHV7LFpZcgV7T6DVZKTyKOMTYUmKoTCVJRgckCL9kiMT03JGe
ipsEdY3mx_etLbbWSrFr05kLzcSr4qKAq7YN7e9jwQRb23nfa6c9d-StnImGyFDb
Sv04uVuxIp5Zms1gNxKKK2Da14B8S4rzVRltdYwam_lDp5XnZAYpQdb76FdIKLaV
mqgfwX7XWRxv2322i-vDxRfqNzo_tETKzpVLzfiwQyeyPGLBIO56YJ7eObdv0je8
<KEY>.
<KEY>6UG9oMo4vpzs9tX_EFShS8iB7j6ji
SdiwkIr3ajwQzaBtQD_A.
XFBoMYUZodetZdvTiFvSkQ
'''
Create a JWK for encryption use:
'''
$ step crypto jwk create --use enc p256.enc.pub p256.enc.priv
'''
Encrypt a message using the previous public key (output indented for display purposes):
'''
$ echo The message | step crypto jwe encrypt --key p256.enc.pub
{
"protected":"eyJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTI1NkdDTSIsImVwayI6eyJrdHkiOiJFQyIsImNydiI6IlAtMjU2IiwieCI6Ii1hakZFVlZaSWNRa0RxbkhpZ0NOWU5fa29nZkhxZnRGX1N3c2ZQeXlSRUUiLCJ5IjoicGpjVnJJZHRHSVpka05HS1FETEpIdG5SLUxudUI2V3k4bHpuX3REdm9BUSJ9LCJraWQiOiJHd0tSTUdXY1pWNFE2dGZZblpjZm90N090N2hjQ0t2cUJPVWljX0JoZ0gwIn0",
"iv":"-10PlAIteHLVABtt",
"ciphertext":"_xnGoE7vPCrXRRlK",
"tag":"wcvj4sXXMc9qII_ySYNYGA"
}
'''
Decrypt the previous message using the private key:
'''
$ step crypto jwe decrypt --key p256.enc.priv \< message.json
Please enter the password to decrypt p256.enc.priv: ********
The message
'''
Encrypt a message using a shared password:
'''
$ echo The message | step crypto jwe encrypt --alg PBES2-HS256+A128KW
Please enter the password to encrypt the content encryption key: ********
{
"protected":"<KEY>",
"encrypted_key":"<KEY>",
"iv":"Jw4JCCr-lLrE0irT",
"ciphertext":"jcb3wKopsHmClh7s",
"tag":"7ttDDDfuqA45puDu7KbVkA"
}
'''
Decrypt a message protected with shared password:
'''
$ step crypto jwe decrypt \< message.json
Please enter the password to decrypt the content encryption key: ********
The message
'''`,
Subcommands: cli.Commands{
encryptCommand(),
decryptCommand(),
},
}
} | command/crypto/jwe/jwe.go | 0.789923 | 0.424293 | jwe.go | starcoder |
package lua
import (
"fmt"
"math"
)
// Implementation of tables (aka arrays, objects, or hash tables). Tables keep
// its elements in two parts: an array part and a hash part. Non-negative integer
// keys are all candidates to be kept in the array part. The actual size of the
// array is the largest 'n' such that more than half the slots between 1 and n
// are in use. Hash uses a mix of chained scatter table with Brent's variation.
// A main invariant of these tables is that, if an element is not in its main
// position (i.e. the 'original' position that its hash gives to it), then the
// colliding element is in its own main position. Hence even when the load factor
// reaches 100%, performance remains good.
type table struct {
// thread state
state *State
// table state
hash map[Value]Value
list []Value
meta *table
// iterator state
iter []Value
keys map[Value]int
}
func (x *table) String() string { return fmt.Sprintf("table: %p", x) }
func (x *table) Length() int { return len(x.list) }
func (x *table) Type() Type { return TableType }
func (x *table) ForEach(fn func(Value, Value)) {
if x.list != nil {
for i, v := range x.list {
fn(Int(i), v)
}
}
if x.hash != nil {
for k, v := range x.hash {
fn(k, v)
}
}
}
func (x *table) Index(index Value) Value {
return x.get(index)
}
// newtable returns a new table initialized using the provided sizes
// arrayN and hashN to create the underlying hash and array part.
func newTable(state *State, arrayN, hashN int) *table {
t := table{state: state}
if arrayN > 0 {
t.list = make([]Value, 0, arrayN)
}
if hashN > 0 {
t.hash = make(map[Value]Value, hashN)
} else {
t.hash = make(map[Value]Value)
}
return &t
}
func (t *table) set(k, v Value) {
if IsNone(k) {
return
}
if n, ok := k.(Number); ok {
i := arrayIndex(n) - 1
if i >= 0 && i < len(t.list) {
t.list[i] = v
return
}
if i == len(t.list) {
t.list = append(t.list, v)
return
}
// TODO: resize & rehash
}
if IsNone(v) {
delete(t.hash, k)
return
}
t.hash[k] = v
}
func (t *table) get(k Value) Value {
if IsNone(k) {
return None
}
if n, ok := k.(Number); ok {
i := arrayIndex(n) - 1
// fmt.Printf("table[%v] (%T) @ %d\n", k, k, i)
if i >= 0 && i < len(t.list) {
return t.list[i]
}
}
if v, ok := t.hash[k]; ok {
return v
}
return None
}
func (t *table) getStr(key string) Value {
return t.get(String(key))
}
func (t *table) getInt(key int64) Value {
return t.get(Int(key))
}
func (t *table) setStr(key string, value Value) {
t.set(String(key), value)
}
func (t *table) setInt(key int64, value Value) {
t.set(Int(key), value)
}
func (t *table) exists(key Value) bool {
return !IsNone(t.get(key))
}
func (t *table) length() int {
return len(t.list)
}
func (t *table) next(key Value) (k, v Value, more bool) {
if IsNone(key) || t.keys == nil { // first iteration?
t.keys = make(map[Value]int, len(t.hash))
t.iter = make([]Value, 0, len(t.hash))
for k := range t.hash {
t.iter = append(t.iter, k)
t.keys[k] = len(t.iter) - 1
}
}
if index := t.iterKey(key); index < len(t.list) {
for index++; index <= len(t.list); index++ {
if !IsNone(t.list[index-1]) {
k = Int(index)
v = t.list[index-1]
return k, v, true
}
}
} else {
if index = index - len(t.list); index < len(t.iter) {
k := t.iter[index]
v := t.hash[k]
return k, v, true
}
}
// Key did not exist or iteration ended.
t.iter = nil
t.keys = nil
return None, None, false
}
// iterKey returns the index of a 'key' for table traversals. First goes
// all elements in the array part, then elements in the hash part. The
// beginning of a traversal is signaled by 0.
func (t *table) iterKey(key Value) (index int) {
if IsNone(key) {
return 0
} // first iteration?
index = arrayIndex(key)
if index != 0 && index <= len(t.list) { // key in array?
return index // found index
}
// otherwise key is in hash part.
var found bool
if index, found = t.keys[key]; !found {
panic(runtimeErr(fmt.Errorf("invalid key (%v) to 'next'", key)))
}
// hash elements are numbered after array ones.
return index + 1 + len(t.list)
}
const maxInt = int(^uint(0) >> 1)
func arrayIndex(val Value) int {
switch val := val.(type) {
case Float:
if x, ok := float2int(float64(val)); ok {
return x
}
case Int:
if x := int(val); x > 0 && x < maxInt {
return x
}
}
return 0
}
func float2int(f64 float64) (int, bool) {
if math.IsInf(f64, 0) || math.IsNaN(f64) {
return 0, false
} else {
if i64 := int64(f64); float64(i64) == f64 {
return int(i64), true
}
return 0, false
}
} | lua/table.go | 0.619356 | 0.567637 | table.go | starcoder |
package main
import (
"bufio"
"fmt"
"sort"
"strings"
)
const n = 40
type countsMap map[byte]int
func (m countsMap) merge(m2 countsMap) {
for b, c := range m2 {
m[b] += c
}
}
func (m countsMap) toCounts() counts {
cc := make(counts, 0, len(m))
for b, c := range m {
cc = append(cc, count{
b: b,
c: c,
})
}
sort.Sort(cc)
return cc
}
type count struct {
b byte
c int
}
func (c count) String() string {
return fmt.Sprintf("%s:%d", []byte{c.b}, c.c)
}
type counts []count
func (c counts) Len() int { return len(c) }
func (c counts) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
func (c counts) Less(i, j int) bool { return c[i].c < c[j].c }
func (c counts) result() int {
return c[len(c)-1].c - c[0].c
}
func makeKey(a, b byte) string {
return string([]byte{a, b})
}
func main() {
scanner := bufio.NewScanner(strings.NewReader(input))
scanner.Scan()
scanner.Scan()
start := strings.TrimSpace(scanner.Text())
scanner.Scan()
rules := make(map[string]byte)
for scanner.Scan() {
split := strings.Split(strings.TrimSpace(scanner.Text()), " -> ")
if len(split) != 2 {
continue
}
from := split[0]
to := split[1][0]
rules[from] = to
}
cache := make(map[string][]countsMap)
for a := byte('A'); a <= byte('Z'); a++ {
for b := byte('A'); b <= byte('Z'); b++ {
key := makeKey(a, b)
cache[key] = make([]countsMap, n+1)
v := make(countsMap)
v[a]++
v[b]++
cache[key][0] = v
}
}
for s := 1; s <= n; s++ {
for a := byte('A'); a <= byte('Z'); a++ {
for b := byte('A'); b <= byte('Z'); b++ {
key := makeKey(a, b)
if to, ok := rules[key]; ok {
v := make(countsMap)
v.merge(cache[makeKey(a, to)][s-1])
v.merge(cache[makeKey(to, b)][s-1])
v[to]-- // overlaps and double counted
cache[key][s] = v
} else {
v := make(countsMap)
v[a]++
v[b]++
cache[key][s] = v
}
}
}
}
for nn := 1; nn <= n; nn++ {
m := make(countsMap)
m[start[0]]++
for i := 1; i < len(start); i++ {
key := makeKey(start[i-1], start[i])
v := cache[key][nn]
m.merge(v)
m[start[i-1]]-- // overlaps and double counted
}
fmt.Println(nn, m.toCounts().result(), m.toCounts())
}
}
const input = `
CVKKFSSNNHNPSPPKBHPB
OF -> S
VO -> F
BP -> S
FC -> S
PN -> K
HC -> P
PP -> N
FK -> V
KN -> C
BO -> O
KS -> B
FF -> S
KC -> B
FV -> C
VF -> N
HS -> H
OS -> F
VC -> S
VP -> P
BC -> O
HF -> F
HO -> F
PC -> B
CC -> K
NB -> N
KK -> N
KP -> V
BH -> H
BF -> O
OB -> F
VK -> P
FB -> O
NP -> B
CB -> C
PS -> S
KO -> V
SP -> C
BK -> O
NN -> O
OC -> F
VB -> B
ON -> K
NK -> B
CK -> H
NH -> N
CV -> C
PF -> P
PV -> V
CP -> N
FP -> N
SB -> B
SN -> N
KF -> F
HP -> S
BN -> V
NF -> B
PO -> O
CH -> O
VV -> S
OV -> V
SF -> P
BV -> S
FH -> V
CN -> H
VH -> V
HB -> B
FN -> P
OH -> S
SK -> H
OP -> H
VN -> V
HN -> P
BS -> S
CF -> B
PB -> H
SS -> K
NV -> P
FS -> N
CS -> O
OK -> B
CO -> O
VS -> F
OO -> B
NO -> H
SO -> F
HH -> K
FO -> H
SH -> O
HV -> B
SV -> N
PH -> F
BB -> P
KV -> B
KB -> H
KH -> N
NC -> P
SC -> S
PK -> B
NS -> V
HK -> B
` | 2021/14/main.go | 0.536313 | 0.441673 | main.go | starcoder |
package types
import (
"log"
"github.com/golang/geo/s2"
"github.com/twpayne/go-geom"
"github.com/dgraph-io/dgraph/x"
)
func parentCoverTokens(parents s2.CellUnion, cover s2.CellUnion) []string {
// We index parents and cover using different prefix, that makes it more
// performant at query time to only look up parents/cover depending on what
// kind of query it is.
tokens := make([]string, 0, len(parents)+len(cover))
tokens = append(tokens, createTokens(parents, parentPrefix)...)
tokens = append(tokens, createTokens(cover, coverPrefix)...)
x.AssertTruef(len(tokens) == len(parents)+len(cover), "%d %d %d",
len(tokens), len(parents), len(cover))
return tokens
}
// IndexTokens returns the tokens to be used in a geospatial index for the given geometry. If the
// geometry is not supported it returns an error.
func IndexGeoTokens(g geom.T) ([]string, error) {
parents, cover, err := indexCells(g)
if err != nil {
return nil, err
}
return parentCoverTokens(parents, cover), nil
}
// IndexKeysForCap returns the keys to be used in a geospatial index for a Cap.
func indexCellsForCap(c s2.Cap) s2.CellUnion {
rc := &s2.RegionCoverer{
MinLevel: MinCellLevel,
MaxLevel: MaxCellLevel,
LevelMod: 0,
MaxCells: MaxCells,
}
return rc.Covering(c)
}
const (
parentPrefix = "p/"
coverPrefix = "c/"
)
// IndexCells returns two cellunions. The first is a list of parents, which are all the cells upto
// the min level that contain this geometry. The second is the cover, which are the smallest
// possible cells required to cover the region. This makes it easier at query time to query only the
// parents or only the cover or both depending on whether it is a within, contains or intersects
// query.
func indexCells(g geom.T) (parents, cover s2.CellUnion, err error) {
if g.Stride() != 2 {
return nil, nil, x.Errorf("Covering only available for 2D co-ordinates.")
}
switch v := g.(type) {
case *geom.Point:
p, c := indexCellsForPoint(v, MinCellLevel, MaxCellLevel)
return p, c, nil
case *geom.Polygon:
l, err := loopFromPolygon(v)
if err != nil {
return nil, nil, err
}
cover := coverLoop(l, MinCellLevel, MaxCellLevel, MaxCells)
parents := getParentCells(cover, MinCellLevel)
return parents, cover, nil
case *geom.MultiPolygon:
var cover s2.CellUnion
// Convert each polygon to loop. Get cover for each and append to cover.
for i := 0; i < v.NumPolygons(); i++ {
p := v.Polygon(i)
l, err := loopFromPolygon(p)
if err != nil {
return nil, nil, err
}
cover = append(cover, coverLoop(l, MinCellLevel, MaxCellLevel, MaxCells)...)
}
// Get parents for all cells in cover.
parents := getParentCells(cover, MinCellLevel)
return parents, cover, nil
default:
return nil, nil, x.Errorf("Cannot index geometry of type %T", v)
}
}
const (
// MinCellLevel is the smallest cell level (largest cell size) used by indexing
MinCellLevel = 5 // Approx 250km x 380km
// MaxCellLevel is the largest cell level (smallest cell size) used by indexing
MaxCellLevel = 16 // Approx 120m x 180m
// MaxCells is the maximum number of cells to use when indexing regions.
MaxCells = 18
)
func pointFromCoord(r geom.Coord) s2.Point {
// The geojson spec says that coordinates are specified as [long, lat]
// We assume that any data encoded in the database follows that format.
ll := s2.LatLngFromDegrees(r.Y(), r.X())
return s2.PointFromLatLng(ll)
}
// PointFromPoint converts a geom.Point to a s2.Point
func pointFromPoint(p *geom.Point) s2.Point {
return pointFromCoord(p.Coords())
}
// loopFromPolygon converts a geom.Polygon to a s2.Loop. We use loops instead of s2.Polygon as the
// s2.Polygon implemention is incomplete.
func loopFromPolygon(p *geom.Polygon) (*s2.Loop, error) {
// go implementation of s2 does not support more than one loop (and will panic if the size of
// the loops array > 1). So we will skip the holes in the polygon and just use the outer loop.
r := p.LinearRing(0)
n := r.NumCoords()
if n < 4 {
return nil, x.Errorf("Can't convert ring with less than 4 pts")
}
if !r.Coord(0).Equal(geom.XY, r.Coord(n-1)) {
return nil, x.Errorf("Last coordinate not same as first for polygon: %+v\n", p)
}
// S2 specifies that the orientation of the polygons should be CCW. However there is no
// restriction on the orientation in WKB (or geojson). To get the correct orientation we assume
// that the polygons are always less than one hemisphere. If they are bigger, we flip the
// orientation.
reverse := isClockwise(r)
l := loopFromRing(r, reverse)
// Since our clockwise check was approximate, we check the cap and reverse if needed.
if l.CapBound().Radius().Degrees() > 90 {
l = loopFromRing(r, !reverse)
}
return l, nil
}
// Checks if a ring is clockwise or counter-clockwise. Note: This uses the algorithm for planar
// polygons and doesn't work for spherical polygons that contain the poles or the antimeridan
// discontinuity. We use this as a fast approximation instead.
func isClockwise(r *geom.LinearRing) bool {
// The algorithm is described here https://en.wikipedia.org/wiki/Shoelace_formula
var a float64
n := r.NumCoords()
for i := 0; i < n; i++ {
p1 := r.Coord(i)
p2 := r.Coord((i + 1) % n)
a += (p2.X() - p1.X()) * (p1.Y() + p2.Y())
}
return a > 0
}
func loopFromRing(r *geom.LinearRing, reverse bool) *s2.Loop {
// In WKB, the last coordinate is repeated for a ring to form a closed loop. For s2 the points
// aren't allowed to repeat and the loop is assumed to be closed, so we skip the last point.
n := r.NumCoords()
pts := make([]s2.Point, n-1)
for i := 0; i < n-1; i++ {
var c geom.Coord
if reverse {
c = r.Coord(n - 1 - i)
} else {
c = r.Coord(i)
}
p := pointFromCoord(c)
pts[i] = p
}
return s2.LoopFromPoints(pts)
}
// create cells for point from the minLevel to maxLevel both inclusive.
func indexCellsForPoint(p *geom.Point, minLevel, maxLevel int) (s2.CellUnion, s2.CellUnion) {
if maxLevel < minLevel {
log.Fatalf("Maxlevel should be greater than minLevel")
}
ll := s2.LatLngFromDegrees(p.Y(), p.X())
c := s2.CellIDFromLatLng(ll)
cells := make([]s2.CellID, maxLevel-minLevel+1)
for l := minLevel; l <= maxLevel; l++ {
cells[l-minLevel] = c.Parent(l)
}
return cells, []s2.CellID{c.Parent(maxLevel)}
}
func getParentCells(cu s2.CellUnion, minLevel int) s2.CellUnion {
parents := make(map[s2.CellID]bool)
for _, c := range cu {
for l := c.Level(); l >= minLevel; l-- {
parents[c.Parent(l)] = true
}
}
// convert the parents map to an array
cells := make([]s2.CellID, len(parents))
i := 0
for k := range parents {
cells[i] = k
i++
}
return cells
}
func coverLoop(l *s2.Loop, minLevel int, maxLevel int, maxCells int) s2.CellUnion {
rc := &s2.RegionCoverer{
MinLevel: minLevel,
MaxLevel: maxLevel,
LevelMod: 0,
MaxCells: maxCells,
}
return rc.Covering(l)
}
// appendTokens creates tokens with a certain prefix and append.
func createTokens(cu s2.CellUnion, prefix string) (toks []string) {
for _, c := range cu {
toks = append(toks, prefix+c.ToToken())
}
return toks
} | types/s2index.go | 0.777046 | 0.5794 | s2index.go | starcoder |
package endpoints
import (
"strings"
)
// Lookup returns the endpoint for the given service in the given region plus
// any overrides for the service name and region.
func Lookup(service, region string) (uri, newService, newRegion string) {
switch service {
case "cloudfront":
if !strings.HasPrefix(region, "cn-") {
return format("https://cloudfront.amazonaws.com", service, region), service, "us-east-1"
}
case "dynamodb":
if region == "local" {
return format("http://localhost:8000", service, region), "dynamodb", "us-east-1"
}
case "elasticmapreduce":
if strings.HasPrefix(region, "cn-") {
return format("https://elasticmapreduce.cn-north-1.amazonaws.com.cn", service, region), service, region
}
if region == "eu-central-1" {
return format("https://elasticmapreduce.eu-central-1.amazonaws.com", service, region), service, region
}
if region == "us-east-1" {
return format("https://elasticmapreduce.us-east-1.amazonaws.com", service, region), service, region
}
if region != "" {
return format("https://{region}.elasticmapreduce.amazonaws.com", service, region), service, region
}
case "iam":
if strings.HasPrefix(region, "cn-") {
return format("https://{service}.cn-north-1.amazonaws.com.cn", service, region), service, region
}
if strings.HasPrefix(region, "us-gov") {
return format("https://{service}.us-gov.amazonaws.com", service, region), service, region
}
return format("https://iam.amazonaws.com", service, region), service, "us-east-1"
case "importexport":
if !strings.HasPrefix(region, "cn-") {
return format("https://importexport.amazonaws.com", service, region), service, region
}
case "rds":
if region == "us-east-1" {
return format("https://rds.amazonaws.com", service, region), service, region
}
case "route53":
if !strings.HasPrefix(region, "cn-") {
return format("https://route53.amazonaws.com", service, region), service, region
}
case "s3":
if region == "us-east-1" || region == "" {
return format("{scheme}://s3.amazonaws.com", service, region), service, "us-east-1"
}
if strings.HasPrefix(region, "cn-") {
return format("{scheme}://{service}.{region}.amazonaws.com.cn", service, region), service, region
}
if region == "us-east-1" || region == "ap-northeast-1" || region == "sa-east-1" || region == "ap-southeast-1" || region == "ap-southeast-2" || region == "us-west-2" || region == "us-west-1" || region == "eu-west-1" || region == "us-gov-west-1" || region == "fips-us-gov-west-1" {
return format("{scheme}://{service}-{region}.amazonaws.com", service, region), service, region
}
if region != "" {
return format("{scheme}://{service}.{region}.amazonaws.com", service, region), service, region
}
case "sdb":
if region == "us-east-1" {
return format("https://sdb.amazonaws.com", service, region), service, region
}
case "sqs":
if region == "us-east-1" {
return format("https://queue.amazonaws.com", service, region), service, region
}
if strings.HasPrefix(region, "cn-") {
return format("https://{region}.queue.amazonaws.com.cn", service, region), service, region
}
if region != "" {
return format("https://{region}.queue.amazonaws.com", service, region), service, region
}
case "sts":
if strings.HasPrefix(region, "cn-") {
return format("{scheme}://{service}.cn-north-1.amazonaws.com.cn", service, region), service, region
}
if strings.HasPrefix(region, "us-gov") {
return format("https://{service}.{region}.amazonaws.com", service, region), service, region
}
return format("https://sts.amazonaws.com", service, region), service, "us-east-1"
}
if strings.HasPrefix(region, "cn-") {
return format("{scheme}://{service}.{region}.amazonaws.com.cn", service, region), service, region
}
if region != "" {
return format("{scheme}://{service}.{region}.amazonaws.com", service, region), service, region
}
panic("unknown endpoint for " + service + " in " + region)
}
func format(uri, service, region string) string {
uri = strings.Replace(uri, "{scheme}", "https", -1)
uri = strings.Replace(uri, "{service}", service, -1)
uri = strings.Replace(uri, "{region}", region, -1)
return uri
} | service/endpoints/endpoints.go | 0.690559 | 0.484197 | endpoints.go | starcoder |
package binarytree
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
func PreorderTraversal(root *TreeNode) []int {
var res []int
if root == nil {
return res
}
res = append(res, root.Val)
left := root.Left
if left != nil {
res = append(res, PreorderTraversal(left)...)
}
right := root.Right
if right != nil {
res = append(res, PreorderTraversal(right)...)
}
return res
}
func InorderTraversal(root *TreeNode) []int {
var res []int
if root == nil {
return res
}
left := root.Left
if left != nil {
res = append(res, InorderTraversal(left)...)
}
res = append(res, root.Val)
right := root.Right
if right != nil {
res = append(res, InorderTraversal(right)...)
}
return res
}
func PostorderTraversal(root *TreeNode) []int {
var res []int
if root == nil {
return res
}
left := root.Left
if left != nil {
res = append(res, PostorderTraversal(left)...)
}
right := root.Right
if right != nil {
res = append(res, PostorderTraversal(right)...)
}
res = append(res, root.Val)
return res
}
func LevelOrder(root *TreeNode) [][]int {
var toTra []*TreeNode
var isEnd = false
var res [][]int
if nil == root {
return res
}
toTra = append(toTra, root)
for ; !isEnd; {
var level []int
var toTraTmp []*TreeNode
for _, node := range toTra {
level = append(level, node.Val)
if nil != node.Left {
toTraTmp = append(toTraTmp, node.Left)
}
if nil != node.Right {
toTraTmp = append(toTraTmp, node.Right)
}
}
res = append(res, level)
if len(toTraTmp) > 0 {
toTra = toTraTmp
isEnd = false
} else {
isEnd = true
}
}
return res
}
func maxDepth(root *TreeNode) int {
if root == nil {
return 0
}
leftDp := 1 + maxDepth(root.Left)
rightDp := 1 + maxDepth(root.Right)
if leftDp > rightDp {
return leftDp
} else {
return rightDp
}
}
func IsSymmetric(root *TreeNode) bool {
if root == nil {
return true
}
return is(root.Left, root.Right)
}
func is(left, right *TreeNode) bool {
if left == nil || right == nil {
return left == right
}
if left.Val != right.Val {
return false
}
return is(left.Left, right.Right) && is(left.Right, right.Left)
}
func AsPathSum(root *TreeNode, sum int) bool {
var fun func(node *TreeNode, tem int) bool
fun = func(node *TreeNode, tem int) bool {
if node == nil {
return false
}
if tem+node.Val == sum && node.Left == nil && node.Right == nil {
return true
}
return fun(node.Left, tem+node.Val) || fun(node.Right, tem+node.Val)
}
return fun(root, 0)
} | gofun/letcode/binarytree/binarytree.go | 0.79538 | 0.499451 | binarytree.go | starcoder |
package pendingtxs
import (
"fmt"
"github.com/spacemeshos/go-spacemesh/common/types"
)
type meshProjector interface {
GetProjection(addr types.Address, prevNonce, prevBalance uint64) (nonce, balance uint64, err error)
}
type poolProjector interface {
GetProjection(addr types.Address, prevNonce, prevBalance uint64) (nonce, balance uint64)
}
// MeshAndPoolProjector provides nonce and balance projections based on unapplied transactions from the mesh and the
// mempool.
type MeshAndPoolProjector struct {
mesh meshProjector
pool poolProjector
}
// GetProjection returns a projected nonce and balance after applying transactions from the mesh and mempool, given the
// previous values. Errors can stem from database errors in the mesh (IO or deserialization errors).
func (p *MeshAndPoolProjector) GetProjection(addr types.Address, prevNonce, prevBalance uint64) (nonce, balance uint64, err error) {
nonce, balance, err = p.mesh.GetProjection(addr, prevNonce, prevBalance)
if err != nil {
return 0, 0, fmt.Errorf("get mesh projection: %w", err)
}
nonce, balance = p.pool.GetProjection(addr, nonce, balance)
return nonce, balance, nil
}
// NewMeshAndPoolProjector returns a new MeshAndPoolProjector.
func NewMeshAndPoolProjector(mesh meshProjector, pool poolProjector) *MeshAndPoolProjector {
return &MeshAndPoolProjector{mesh: mesh, pool: pool}
}
type globalState interface {
GetBalance(addr types.Address) uint64
GetNonce(addr types.Address) uint64
}
// StateAndMeshProjector provides nonce and balance projections based on the global state and unapplied transactions on
// the mesh.
type StateAndMeshProjector struct {
state globalState
mesh meshProjector
}
// NewStateAndMeshProjector returns a new StateAndMeshProjector.
func NewStateAndMeshProjector(state globalState, mesh meshProjector) *StateAndMeshProjector {
return &StateAndMeshProjector{state: state, mesh: mesh}
}
// GetProjection returns a projected nonce and balance after applying transactions from the mesh on top of the global
// state. Errors can stem from database errors in the mesh (IO or deserialization errors).
func (p *StateAndMeshProjector) GetProjection(addr types.Address) (nonce, balance uint64, err error) {
nonce, balance, err = p.mesh.GetProjection(addr, p.state.GetNonce(addr), p.state.GetBalance(addr))
if err != nil {
return nonce, balance, fmt.Errorf("get mesh projection: %w", err)
}
return nonce, balance, nil
} | pendingtxs/projector.go | 0.744099 | 0.438424 | projector.go | starcoder |
package some_structures
import (
"fmt"
"sync"
)
type TreeNode struct {
key int
value int
leftNode *TreeNode
rightNode *TreeNode
}
type BinarySearchTree struct {
rootNode *TreeNode
lock sync.RWMutex
}
func (tree *BinarySearchTree) InsertElement(key int, value int) {
tree.lock.Lock()
defer tree.lock.Unlock()
var treeNode *TreeNode
treeNode = &TreeNode{key, value, nil, nil}
if tree.rootNode == nil {
tree.rootNode = treeNode
} else {
insertTreeNode(tree.rootNode, treeNode)
}
}
func insertTreeNode(rootNode *TreeNode, newTreeNode *TreeNode) {
if newTreeNode.key < rootNode.key {
if rootNode.leftNode == nil {
rootNode.leftNode = newTreeNode
} else {
if rootNode.rightNode == nil {
rootNode.rightNode = newTreeNode
} else {
insertTreeNode(rootNode.rightNode, newTreeNode)
}
}
}
}
func (tree *BinarySearchTree) InOrderTraverseTree(function func(int)) {
tree.lock.RLock()
defer tree.lock.Unlock()
inOrderTraverseTree(tree.rootNode, function)
}
func inOrderTraverseTree(treeNode *TreeNode, function func(int)) {
if treeNode != nil {
inOrderTraverseTree(treeNode.leftNode, function)
function(treeNode.value)
inOrderTraverseTree(treeNode.rightNode, function)
}
}
func (tree *BinarySearchTree) PreOrderTraverseTree(function func(int)) {
tree.lock.Lock()
defer tree.lock.Unlock()
preOrderTraverseTree(tree.rootNode, function)
}
func preOrderTraverseTree(treeNode *TreeNode, function func(int)) {
if treeNode != nil {
function(treeNode.value)
preOrderTraverseTree(treeNode.leftNode, function)
preOrderTraverseTree(treeNode.rightNode, function)
}
}
func (tree *BinarySearchTree) PostOrderTraverseTree(function func(int)) {
tree.lock.Lock()
defer tree.lock.Unlock()
postOrderTraverseTree(tree.rootNode, function)
}
func postOrderTraverseTree(treeNode *TreeNode, function func(int)) {
if treeNode != nil {
postOrderTraverseTree(treeNode.leftNode, function)
postOrderTraverseTree(treeNode.rightNode, function)
function(treeNode.value)
}
}
func (tree *BinarySearchTree) MinNode() *int {
tree.lock.Lock()
defer tree.lock.RUnlock()
var treeNode *TreeNode
treeNode = tree.rootNode
if treeNode == nil {
return (*int)(nil)
}
for {
if treeNode.leftNode == nil {
return &treeNode.value
}
treeNode = treeNode.leftNode
}
}
func (tree *BinarySearchTree) MaxNode() *int {
tree.lock.RLock()
defer tree.lock.RUnlock()
var treeNode *TreeNode
treeNode = tree.rootNode
if treeNode == nil {
return (*int)(nil)
}
for {
if treeNode.rightNode == nil {
return &treeNode.value
}
treeNode = treeNode.rightNode
}
}
// SearchNode method
func (tree *BinarySearchTree) SearchNode(key int) bool {
tree.lock.RLock()
defer tree.lock.RUnlock()
return searchNode(tree.rootNode, key)
}
func searchNode(treeNode *TreeNode, key int) bool {
if treeNode == nil {
return false
}
if key < treeNode.key {
return searchNode(treeNode.leftNode, key)
}
if key > treeNode.key {
return searchNode(treeNode.rightNode, key)
}
return true
}
func removeNode(treeNode *TreeNode, key int) *TreeNode {
if treeNode == nil {
return nil
}
if key < treeNode.key {
treeNode.leftNode = removeNode(treeNode.leftNode, key)
}
if key > treeNode.key {
treeNode.rightNode = removeNode(treeNode.rightNode, key)
return treeNode
}
if treeNode.leftNode == nil && treeNode.rightNode == nil {
treeNode = nil
return nil
}
if treeNode.leftNode == nil {
treeNode = treeNode.rightNode
return treeNode
}
if treeNode.rightNode == nil {
treeNode = treeNode.leftNode
return treeNode
}
var leftmostrightNode *TreeNode
leftmostrightNode = treeNode.rightNode
for {
if leftmostrightNode != nil && leftmostrightNode.leftNode != nil {
leftmostrightNode = leftmostrightNode.leftNode
} else {
break
}
}
treeNode.key, treeNode.value = leftmostrightNode.key, leftmostrightNode.value
treeNode.rightNode = removeNode(treeNode.rightNode, treeNode.key)
return treeNode
}
func (tree *BinarySearchTree) String() {
tree.lock.Lock()
defer tree.lock.Lock()
fmt.Println("************************************************")
stringify(tree.rootNode, 0)
fmt.Println("************************************************")
}
func stringify(treeNode *TreeNode, level int) {
if treeNode != nil {
format := ""
for i := 0; i < level; i++ {
format += " "
}
format += "***> "
level++
stringify(treeNode.leftNode, level)
fmt.Printf(format+"%d\n", treeNode.key)
stringify(treeNode.rightNode, level)
}
}
func print(tree *BinarySearchTree) {
if tree != nil {
fmt.Println(" Value", tree.rootNode.value)
fmt.Printf("Root Tree Node")
printTreeNode(tree.rootNode)
} else {
fmt.Printf("Nil\n")
}
}
func printTreeNode(treeNode *TreeNode) {
if treeNode != nil {
fmt.Println(" Value", treeNode.value)
fmt.Printf("TreeNode Left")
printTreeNode(treeNode.leftNode)
fmt.Printf("TreeNode Right")
printTreeNode(treeNode.rightNode)
} else {
fmt.Printf("Nil\n")
}
}
// main method
func handleRunBinarySearchTree() {
var tree *BinarySearchTree = &BinarySearchTree{}
tree.InsertElement(8, 8)
tree.InsertElement(3, 3)
tree.InsertElement(10, 10)
tree.InsertElement(1, 1)
tree.InsertElement(6, 6)
tree.String()
} | some-structures/binary_search_tree.go | 0.53607 | 0.453867 | binary_search_tree.go | starcoder |
package memory
import (
"sort"
"time"
info "github.com/google/cadvisor/info/v1"
)
// A circular buffer for ContainerStats.
type StatsBuffer struct {
buffer []*info.ContainerStats
size int
index int
}
// Returns a new thread-compatible StatsBuffer.
func NewStatsBuffer(size int) *StatsBuffer {
return &StatsBuffer{
buffer: make([]*info.ContainerStats, size),
size: 0,
index: size - 1,
}
}
// Adds an element to the start of the buffer (removing one from the end if necessary).
func (self *StatsBuffer) Add(item *info.ContainerStats) {
if self.size < len(self.buffer) {
self.size++
}
self.index = (self.index + 1) % len(self.buffer)
copied := *item
self.buffer[self.index] = &copied
}
// Returns up to maxResult elements in the specified time period (inclusive).
// Results are from first to last. maxResults of -1 means no limit. When first
// and last are specified, maxResults is ignored.
func (self *StatsBuffer) InTimeRange(start, end time.Time, maxResults int) []*info.ContainerStats {
// No stats, return empty.
if self.size == 0 {
return []*info.ContainerStats{}
}
// Return all results in a time range if specified.
if !start.IsZero() && !end.IsZero() {
maxResults = -1
}
// NOTE: Since we store the elments in descending timestamp order "start" will
// be a higher index than "end".
var startIndex int
if start.IsZero() {
// None specified, start at the beginning.
startIndex = self.size - 1
} else {
// Start is the index before the elements smaller than it. We do this by
// finding the first element smaller than start and taking the index
// before that element
startIndex = sort.Search(self.size, func(index int) bool {
// buffer[index] < start
return self.Get(index).Timestamp.Before(start)
}) - 1
// Check if start is after all the data we have.
if startIndex < 0 {
return []*info.ContainerStats{}
}
}
var endIndex int
if end.IsZero() {
// None specified, end with the latest stats.
endIndex = 0
} else {
// End is the first index smaller than or equal to it (so, not larger).
endIndex = sort.Search(self.size, func(index int) bool {
// buffer[index] <= t -> !(buffer[index] > t)
return !self.Get(index).Timestamp.After(end)
})
// Check if end is before all the data we have.
if endIndex == self.size {
return []*info.ContainerStats{}
}
}
// Trim to maxResults size.
numResults := startIndex - endIndex + 1
if maxResults != -1 && numResults > maxResults {
startIndex -= numResults - maxResults
numResults = maxResults
}
// Return in sorted timestamp order so from the "back" to "front".
result := make([]*info.ContainerStats, numResults)
for i := 0; i < numResults; i++ {
result[i] = self.Get(startIndex - i)
}
return result
}
// TODO(vmarmol): Remove this function as it will no longer be neededt.
// Returns the first N elements in the buffer. If N > size of buffer, size of buffer elements are returned.
// Returns the elements in ascending timestamp order.
func (self *StatsBuffer) FirstN(n int) []*info.ContainerStats {
// Cap n at the number of elements we have.
if n > self.size {
n = self.size
}
// index points to the latest element, get n before that one (keeping in mind we may have gone through 0).
start := self.index - (n - 1)
if start < 0 {
start += len(self.buffer)
}
// Copy the elements.
res := make([]*info.ContainerStats, n)
for i := 0; i < n; i++ {
index := (start + i) % len(self.buffer)
res[i] = self.buffer[index]
}
return res
}
// Gets the element at the specified index. Note that elements are stored in LIFO order.
func (self *StatsBuffer) Get(index int) *info.ContainerStats {
calculatedIndex := self.index - index
if calculatedIndex < 0 {
calculatedIndex += len(self.buffer)
}
return self.buffer[calculatedIndex]
}
func (self *StatsBuffer) Size() int {
return self.size
} | Godeps/_workspace/src/github.com/google/cadvisor/storage/memory/stats_buffer.go | 0.689096 | 0.407746 | stats_buffer.go | starcoder |
package ds;
type SparseList[T any] struct {
items []T;
free []uint32;
}
func NewSparseList[T any](capacity uint32, freeCapacity uint32) *SparseList[T] {
list := new(SparseList[T]);
list.items = make([]T, 0, capacity);
list.free = make([]uint32, 0, freeCapacity);
return list;
}
func (this *SparseList[T]) At(index uint32) *T {
return &this.items[index];
}
// Returns a pointer and the index to a value in the list.
// The value could be in any state since.
func (this *SparseList[T]) Take() (*T, uint32) {
var index uint32
lastFree := len(this.free) - 1;
if (lastFree >= 0) {
index = this.free[lastFree];
this.free = this.free[:lastFree]
} else {
var value T
index = uint32(len(this.items));
this.items = append(this.items, value);
}
return &this.items[index], index;
}
/**
*
*/
func (this *SparseList[T]) Add(value T) uint32 {
ref, index := this.Take();
*ref = value;
return index;
}
func (this *SparseList[T]) Free(index uint32) {
this.free = append(this.free, index);
}
// Removes the value at the given index and replaces it with the value at the end of the list and returns
// the index of that last item.
func (this *SparseList[T]) Remove(index int) int {
replacedWith := len(this.items) - 1;
this.items[index] = this.items[replacedWith];
this.items = this.items[:replacedWith];
return replacedWith;
}
func (this *SparseList[T]) Compress(moved func (newIndex uint32, oldIndex uint32, item *T)) {
if (len(this.free) > 0) {
freeMap := this.FreeMap();
var newIndex uint32;
for oi, item := range this.items {
oldIndex := uint32(oi);
if (!freeMap[oldIndex]) {
if (newIndex != oldIndex) {
moved(newIndex, oldIndex, &item);
this.items[newIndex] = item;
}
newIndex++;
}
}
this.items = this.items[:newIndex];
this.free = this.free[:0];
}
}
func (this *SparseList[T]) FreeMap() map[uint32]bool {
freeMap := map[uint32]bool{};
for _, index := range this.free {
freeMap[index] = true;
}
return freeMap;
}
func (this *SparseList[T]) Iterate(handle func (item *T, index uint32, liveIndex uint32)) {
if (len(this.free) == 0) {
for i := range this.items {
index := uint32(i);
handle(&this.items[index], index, index);
}
} else {
freeMap := this.FreeMap();
liveIndex := uint32(0);
for i := range this.items {
index := uint32(i);
if (!freeMap[index]) {
handle(&this.items[index], index, liveIndex);
liveIndex++;
}
}
}
}
func (this *SparseList[T]) Pointers() []*T {
slice := make([]*T, 0, this.Size());
this.Iterate(func (item *T, _ uint32, _ uint32) {
slice = append(slice, item);
});
return slice;
}
func (this *SparseList[T]) Values() []T {
slice := make([]T, 0, this.Size());
this.Iterate(func (item *T, _ uint32, _ uint32) {
slice = append(slice, *item);
});
return slice;
}
func (this *SparseList[T]) Size() int {
return len(this.items) - len(this.free);
}
func (this *SparseList[T]) Remaining() int {
return cap(this.items) - len(this.items) + len(this.free);
} | pkg/ds/sparse.go | 0.641422 | 0.402451 | sparse.go | starcoder |
package main
import "log"
type Dense struct {
Weights Matrix
Bias Matrix
Cols int
prev Layer
next Layer
Activation func(float32) float32
ActivationDerivative func(float32) float32
name string
}
func MakeDense(prev Layer, cols int, activation func(float32) float32, derAct func(float32) float32, name string) *Dense {
d := &Dense{
MakeRandomMatrix(prev.NumCols(), cols),
MakeRandomMatrix(1, cols),
cols,
prev,
nil,
activation,
derAct,
name,
}
prev.SetNext(d)
return d
}
func (d *Dense) Predict(m Matrix) Matrix {
ret := m.DotPlusAct(d.Weights, d.Bias, d.Activation)
if d.next != nil {
return d.next.Predict(ret)
}
return ret
}
func (d *Dense) Forward(m Matrix) (Matrix, Matrix) {
z := m.DotPlusAct(d.Weights, d.Bias, nil)
return z, z.Activation(d.Activation)
}
func (d *Dense) Train(x Matrix, y Matrix, params TrainParams) Matrix {
z, a := d.Forward(x)
if d.next == nil {
/* loss := params.Loss(a, y).Total()
log.Println("train loss:", loss)
if loss > .5 {
log.Println("high loss for", y)
}*/
gradient := params.LossDerivative(a, y)
return d.Back(x, z, a, gradient, params.LearningRate)
}
gradient := d.next.Train(a, y, params)
return d.Back(x, z, a, gradient, params.LearningRate)
}
func (d *Dense) Back(x Matrix, z Matrix, a Matrix, gradient Matrix, lr float32) Matrix {
if false {
log.Println("x", x)
log.Println("z", z)
log.Println("a", a)
log.Println("gradient", gradient)
}
weightsCopy := d.Weights.Clone()
// lossSig is used in all functions
// lossSig := act' * loss'
sigPrime := z.Activation(d.ActivationDerivative)
zPrimeLossPrime := sigPrime.Hadamard(gradient)
// log.Println("zPrimeLossPrime", zPrimeLossPrime)
// 1 * z' * loss'
d.Bias = d.Bias.Diff(zPrimeLossPrime.Mult(lr))
//log.Println("weights", d.Weights)
// a(prev layer) * loss' * z'
d.Weights = d.Weights.Diff(d.weightUpdate(x, zPrimeLossPrime).Mult(lr))
//log.Println("weights updated!")
//log.Println("weights", d.Weights)
// weights * z' * loss'
// log.Println(zPrimeLossPrime.T())
ret := weightsCopy.Dot(zPrimeLossPrime.T()).T()
// log.Println("zPrev loss calculated", ret)
return ret
}
func (d Dense) weightUpdate(aPrev Matrix, zPrimeLossPrime Matrix) Matrix {
ret := d.Weights.Clone()
for i := 0; i < aPrev.Cols(); i++ {
for j := 0; j < zPrimeLossPrime.Cols(); j++ {
ret[i][j] = zPrimeLossPrime[0][j] * aPrev[0][i]
}
}
return ret
}
func (d Dense) Prev() Layer {
return d.prev
}
func (d Dense) Next() Layer {
return d.next
}
func (d *Dense) SetNext(next Layer) {
d.next = next
}
func (d Dense) NumCols() int {
return d.Cols
}
func (d Dense) NumRows() int {
return len(d.Weights)
}
func (d Dense) Name() string {
return d.name
} | dense.go | 0.756537 | 0.501465 | dense.go | starcoder |
package cryptoapis
import (
"encoding/json"
)
// AddressCoinsTransactionUnconfirmedDataItem Defines an `item` as one result.
type AddressCoinsTransactionUnconfirmedDataItem struct {
// Represents the specific blockchain protocol name, e.g. Ethereum, Bitcoin, etc.
Blockchain string `json:"blockchain"`
// Represents the name of the blockchain network used; blockchain networks are usually identical as technology and software, but they differ in data, e.g. - \"mainnet\" is the live network with actual data while networks like \"testnet\", \"ropsten\", \"rinkeby\" are test networks.
Network string `json:"network"`
// Defines the specific address to which the coin transaction has been sent and is pending confirmation.
Address string `json:"address"`
// Defines the unique ID of the specific transaction, i.e. its identification number.
TransactionId string `json:"transactionId"`
// Defines the amount of coins sent with the transaction that is pending confirmation.
Amount string `json:"amount"`
// Defines the unit of the transaction, e.g. BTC.
Unit string `json:"unit"`
// Defines whether the transaction is \"incoming\" or \"outgoing\".
Direction string `json:"direction"`
// Defines the exact time the transaction has been first accepted into the mempool to await confirmation as timestamp.
FirstSeenInMempoolTimestamp int32 `json:"firstSeenInMempoolTimestamp"`
}
// NewAddressCoinsTransactionUnconfirmedDataItem instantiates a new AddressCoinsTransactionUnconfirmedDataItem object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewAddressCoinsTransactionUnconfirmedDataItem(blockchain string, network string, address string, transactionId string, amount string, unit string, direction string, firstSeenInMempoolTimestamp int32) *AddressCoinsTransactionUnconfirmedDataItem {
this := AddressCoinsTransactionUnconfirmedDataItem{}
this.Blockchain = blockchain
this.Network = network
this.Address = address
this.TransactionId = transactionId
this.Amount = amount
this.Unit = unit
this.Direction = direction
this.FirstSeenInMempoolTimestamp = firstSeenInMempoolTimestamp
return &this
}
// NewAddressCoinsTransactionUnconfirmedDataItemWithDefaults instantiates a new AddressCoinsTransactionUnconfirmedDataItem object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewAddressCoinsTransactionUnconfirmedDataItemWithDefaults() *AddressCoinsTransactionUnconfirmedDataItem {
this := AddressCoinsTransactionUnconfirmedDataItem{}
return &this
}
// GetBlockchain returns the Blockchain field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetBlockchain() string {
if o == nil {
var ret string
return ret
}
return o.Blockchain
}
// GetBlockchainOk returns a tuple with the Blockchain field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetBlockchainOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Blockchain, true
}
// SetBlockchain sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetBlockchain(v string) {
o.Blockchain = v
}
// GetNetwork returns the Network field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetNetwork() string {
if o == nil {
var ret string
return ret
}
return o.Network
}
// GetNetworkOk returns a tuple with the Network field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetNetworkOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Network, true
}
// SetNetwork sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetNetwork(v string) {
o.Network = v
}
// GetAddress returns the Address field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetAddress() string {
if o == nil {
var ret string
return ret
}
return o.Address
}
// GetAddressOk returns a tuple with the Address field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetAddressOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Address, true
}
// SetAddress sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetAddress(v string) {
o.Address = v
}
// GetTransactionId returns the TransactionId field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetTransactionId() string {
if o == nil {
var ret string
return ret
}
return o.TransactionId
}
// GetTransactionIdOk returns a tuple with the TransactionId field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetTransactionIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TransactionId, true
}
// SetTransactionId sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetTransactionId(v string) {
o.TransactionId = v
}
// GetAmount returns the Amount field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetAmount() string {
if o == nil {
var ret string
return ret
}
return o.Amount
}
// GetAmountOk returns a tuple with the Amount field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetAmountOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Amount, true
}
// SetAmount sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetAmount(v string) {
o.Amount = v
}
// GetUnit returns the Unit field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetUnit() string {
if o == nil {
var ret string
return ret
}
return o.Unit
}
// GetUnitOk returns a tuple with the Unit field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetUnitOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Unit, true
}
// SetUnit sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetUnit(v string) {
o.Unit = v
}
// GetDirection returns the Direction field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetDirection() string {
if o == nil {
var ret string
return ret
}
return o.Direction
}
// GetDirectionOk returns a tuple with the Direction field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetDirectionOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Direction, true
}
// SetDirection sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetDirection(v string) {
o.Direction = v
}
// GetFirstSeenInMempoolTimestamp returns the FirstSeenInMempoolTimestamp field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetFirstSeenInMempoolTimestamp() int32 {
if o == nil {
var ret int32
return ret
}
return o.FirstSeenInMempoolTimestamp
}
// GetFirstSeenInMempoolTimestampOk returns a tuple with the FirstSeenInMempoolTimestamp field value
// and a boolean to check if the value has been set.
func (o *AddressCoinsTransactionUnconfirmedDataItem) GetFirstSeenInMempoolTimestampOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.FirstSeenInMempoolTimestamp, true
}
// SetFirstSeenInMempoolTimestamp sets field value
func (o *AddressCoinsTransactionUnconfirmedDataItem) SetFirstSeenInMempoolTimestamp(v int32) {
o.FirstSeenInMempoolTimestamp = v
}
func (o AddressCoinsTransactionUnconfirmedDataItem) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["blockchain"] = o.Blockchain
}
if true {
toSerialize["network"] = o.Network
}
if true {
toSerialize["address"] = o.Address
}
if true {
toSerialize["transactionId"] = o.TransactionId
}
if true {
toSerialize["amount"] = o.Amount
}
if true {
toSerialize["unit"] = o.Unit
}
if true {
toSerialize["direction"] = o.Direction
}
if true {
toSerialize["firstSeenInMempoolTimestamp"] = o.FirstSeenInMempoolTimestamp
}
return json.Marshal(toSerialize)
}
type NullableAddressCoinsTransactionUnconfirmedDataItem struct {
value *AddressCoinsTransactionUnconfirmedDataItem
isSet bool
}
func (v NullableAddressCoinsTransactionUnconfirmedDataItem) Get() *AddressCoinsTransactionUnconfirmedDataItem {
return v.value
}
func (v *NullableAddressCoinsTransactionUnconfirmedDataItem) Set(val *AddressCoinsTransactionUnconfirmedDataItem) {
v.value = val
v.isSet = true
}
func (v NullableAddressCoinsTransactionUnconfirmedDataItem) IsSet() bool {
return v.isSet
}
func (v *NullableAddressCoinsTransactionUnconfirmedDataItem) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableAddressCoinsTransactionUnconfirmedDataItem(val *AddressCoinsTransactionUnconfirmedDataItem) *NullableAddressCoinsTransactionUnconfirmedDataItem {
return &NullableAddressCoinsTransactionUnconfirmedDataItem{value: val, isSet: true}
}
func (v NullableAddressCoinsTransactionUnconfirmedDataItem) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableAddressCoinsTransactionUnconfirmedDataItem) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_address_coins_transaction_unconfirmed_data_item.go | 0.843799 | 0.451871 | model_address_coins_transaction_unconfirmed_data_item.go | starcoder |
package losses
import (
"github.com/nlpodyssey/spago/ag"
"github.com/nlpodyssey/spago/mat"
)
// MAE measures the mean absolute error (a.k.a. L1 Loss) between each element in the input x and target y.
func MAE[T mat.DType](x ag.Node[T], y ag.Node[T], reduceMean bool) ag.Node[T] {
loss := ag.Abs(ag.Sub(x, y))
if reduceMean {
return ag.ReduceMean(loss)
}
return ag.ReduceSum(loss)
}
// MSE measures the mean squared error (squared L2 norm) between each element in the input x and target y.
func MSE[T mat.DType](x ag.Node[T], y ag.Node[T], reduceMean bool) ag.Node[T] {
loss := ag.ProdScalar(ag.Square(ag.Sub(x, y)), ag.Constant[T](0.5))
if reduceMean {
return ag.ReduceMean(loss)
}
return ag.ReduceSum(loss)
}
// NLL returns the loss of the input x respect to the target y.
// The target is expected to be a one-hot vector.
func NLL[T mat.DType](x ag.Node[T], y ag.Node[T]) ag.Node[T] {
return ag.Neg(ag.ReduceSum(ag.Prod(y, ag.Log(x))))
}
// CrossEntropy implements a cross-entropy loss function.
// x is the raw scores for each class (logits).
// c is the index of the gold class.
func CrossEntropy[T mat.DType](x ag.Node[T], c int) ag.Node[T] {
return ag.Add(ag.Neg(ag.AtVec(x, c)), ag.LogSumExp(x))
}
// WeightedCrossEntropy implements a weighted cross-entropy loss function.
// x is the raw scores for each class (logits).
// c is the index of the gold class.
// This function is scaled by a weighting factor weights[class] ∈ [0,1]
func WeightedCrossEntropy[T mat.DType](weights []T) func(x ag.Node[T], c int) ag.Node[T] {
return func(x ag.Node[T], c int) ag.Node[T] {
return ag.ProdScalar(CrossEntropy(x, c), ag.NewScalar[T](weights[c]))
}
}
// FocalLoss implements a variant of the CrossEntropy loss that reduces
// the loss contribution from "easy" examples and increases the importance
// of correcting misclassified examples.
// x is the raw scores for each class (logits).
// c is the index of the gold class.
// gamma is the focusing parameter (gamma ≥ 0).
func FocalLoss[T mat.DType](x ag.Node[T], c int, gamma T) ag.Node[T] {
ce := CrossEntropy(x, c)
p := ag.Exp(ag.Neg(ce))
sub := ag.ReverseSub(p, ag.NewScalar[T](1.0))
a := ag.Pow(sub, gamma)
return ag.Prod(a, ce)
}
// WeightedFocalLoss implements a variant of the CrossEntropy loss that reduces
// the loss contribution from "easy" examples and increases the importance
// of correcting misclassified examples.
// x is the raw scores for each class (logits).
// c is the index of the gold class.
// gamma is the focusing parameter (gamma ≥ 0).
// This function is scaled by a weighting factor weights[class] ∈ [0,1].
func WeightedFocalLoss[T mat.DType](weights []T) func(x ag.Node[T], c int, gamma T) ag.Node[T] {
return func(x ag.Node[T], c int, gamma T) ag.Node[T] {
ce := CrossEntropy(x, c)
p := ag.Exp(ag.Neg(ce))
sub := ag.ReverseSub(p, ag.NewScalar[T](1.0))
b := ag.Pow(sub, gamma)
fl := ag.Prod(b, ce)
return ag.ProdScalar(fl, ag.NewScalar[T](weights[c]))
}
}
// Perplexity computes the perplexity, implemented as exp over the cross-entropy.
func Perplexity[T mat.DType](x ag.Node[T], c int) ag.Node[T] {
return ag.Exp(CrossEntropy(x, c))
}
// ZeroOneQuantization is a loss function that is minimized when each component
// of x satisfies x(i) ≡ [x]i ∈ {0, 1}.
func ZeroOneQuantization[T mat.DType](x ag.Node[T]) ag.Node[T] {
return ag.ReduceSum(ag.Prod(ag.Square(x), ag.Square(ag.ReverseSub(x, ag.NewScalar[T](1.0)))))
}
// Norm2Quantization is a loss function that is minimized when norm2(x) = 1.
func Norm2Quantization[T mat.DType](x ag.Node[T]) ag.Node[T] {
return ag.Square(ag.SubScalar(ag.ReduceSum(ag.Square(x)), ag.NewScalar[T](1.0)))
}
// OneHotQuantization is a loss function that pushes towards the x vector to be 1-hot.
// q is the quantization regularizer weight (suggested 0.00001).
func OneHotQuantization[T mat.DType](x ag.Node[T], q T) ag.Node[T] {
return ag.ProdScalar(ag.Add(ZeroOneQuantization(x), Norm2Quantization(x)), ag.NewScalar[T](q))
}
// Distance is a loss function that calculates the distance between target and x.
func Distance[T mat.DType](x ag.Node[T], target T) ag.Node[T] {
return ag.Abs(ag.Sub(ag.NewScalar[T](target), x))
}
// MSESeq calculates the MSE loss on the given sequence.
func MSESeq[T mat.DType](predicted []ag.Node[T], target []ag.Node[T], reduceMean bool) ag.Node[T] {
loss := MSE(predicted[0], target[0], false)
for i := 1; i < len(predicted); i++ {
loss = ag.Add(loss, MSE(predicted[i], target[i], false))
}
if reduceMean {
return ag.DivScalar(loss, ag.NewScalar[T](T(len(predicted))))
}
return loss
}
// MAESeq calculates the MAE loss on the given sequence.
func MAESeq[T mat.DType](predicted []ag.Node[T], target []ag.Node[T], reduceMean bool) ag.Node[T] {
loss := MAE(predicted[0], target[0], false)
for i := 1; i < len(predicted); i++ {
loss = ag.Add(loss, MAE(predicted[i], target[i], false))
}
if reduceMean {
return ag.DivScalar(loss, ag.NewScalar[T](T(len(predicted))))
}
return loss
}
// CrossEntropySeq calculates the CrossEntropy loss on the given sequence.
func CrossEntropySeq[T mat.DType](predicted []ag.Node[T], target []int, reduceMean bool) ag.Node[T] {
loss := CrossEntropy(predicted[0], target[0])
for i := 1; i < len(predicted); i++ {
loss = ag.Add(loss, CrossEntropy(predicted[i], target[i]))
}
if reduceMean {
return ag.DivScalar(loss, ag.NewScalar[T](T(len(predicted))))
}
return loss
}
// SPG (Softmax Policy Gradient) is a Gradient Policy used in Reinforcement Learning.
// logPropActions are the log-probability of the chosen action by the Agent at each time;
// logProbTargets are results of the reward function i.e. the predicted log-likelihood of the ground truth at each time;
func SPG[T mat.DType](logPropActions []ag.Node[T], logProbTargets []ag.Node[T]) ag.Node[T] {
var loss ag.Node[T]
for t := 0; t < len(logPropActions); t++ {
loss = ag.Add(loss, ag.Prod(logPropActions[t], logProbTargets[t]))
}
return ag.Neg(loss)
} | losses/losses.go | 0.906815 | 0.734739 | losses.go | starcoder |
package events
import (
"bytes"
"fmt"
"strings"
"text/template"
)
// MarkdownRenderer renders responses as markdown.
type MarkdownRenderer struct{}
// CommonData is data that all responses have.
type CommonData struct {
Command string
Verbose bool
Log string
}
// ErrData is data about an error response.
type ErrData struct {
Error string
CommonData
}
// FailureData is data about a failure response.
type FailureData struct {
Failure string
CommonData
}
// ResultData is data about a successful response.
type ResultData struct {
Results map[string]string
CommonData
}
// Render formats the data into a markdown string.
// nolint: interfacer
func (g *MarkdownRenderer) Render(res CommandResponse, cmdName CommandName, log string, verbose bool) string {
if cmdName == Help {
return g.renderTemplate(helpTmpl, nil)
}
commandStr := strings.Title(cmdName.String())
common := CommonData{commandStr, verbose, log}
if res.Error != nil {
return g.renderTemplate(errWithLogTmpl, ErrData{res.Error.Error(), common})
}
if res.Failure != "" {
return g.renderTemplate(failureWithLogTmpl, FailureData{res.Failure, common})
}
return g.renderProjectResults(res.ProjectResults, common)
}
func (g *MarkdownRenderer) renderProjectResults(pathResults []ProjectResult, common CommonData) string {
results := make(map[string]string)
for _, result := range pathResults {
if result.Error != nil {
results[result.Path] = g.renderTemplate(errTmpl, struct {
Command string
Error string
}{
Command: common.Command,
Error: result.Error.Error(),
})
} else if result.Failure != "" {
results[result.Path] = g.renderTemplate(failureTmpl, struct {
Command string
Failure string
}{
Command: common.Command,
Failure: result.Failure,
})
} else if result.PlanSuccess != nil {
results[result.Path] = g.renderTemplate(planSuccessTmpl, *result.PlanSuccess)
} else if result.ApplySuccess != "" {
results[result.Path] = g.renderTemplate(applySuccessTmpl, struct{ Output string }{result.ApplySuccess})
} else {
results[result.Path] = "Found no template. This is a bug!"
}
}
var tmpl *template.Template
if len(results) == 1 {
tmpl = singleProjectTmpl
} else {
tmpl = multiProjectTmpl
}
return g.renderTemplate(tmpl, ResultData{results, common})
}
func (g *MarkdownRenderer) renderTemplate(tmpl *template.Template, data interface{}) string {
buf := &bytes.Buffer{}
if err := tmpl.Execute(buf, data); err != nil {
return fmt.Sprintf("Failed to render template, this is a bug: %v", err)
}
return buf.String()
}
var helpTmpl = template.Must(template.New("").Parse("```cmake\n" +
`atlantis - Terraform collaboration tool that enables you to collaborate on infrastructure
safely and securely.
Usage: atlantis <command> [workspace] [--verbose]
Commands:
plan Runs 'terraform plan' on the files changed in the pull request
apply Runs 'terraform apply' using the plans generated by 'atlantis plan'
help Get help
Examples:
# Generates a plan for staging workspace
atlantis plan staging
# Generates a plan for a standalone terraform project
atlantis plan
# Applies a plan for staging workspace
atlantis apply staging
# Applies a plan for a standalone terraform project
atlantis apply
`))
var singleProjectTmpl = template.Must(template.New("").Parse("{{ range $result := .Results }}{{$result}}{{end}}\n" + logTmpl))
var multiProjectTmpl = template.Must(template.New("").Parse(
"Ran {{.Command}} in {{ len .Results }} directories:\n" +
"{{ range $path, $result := .Results }}" +
" * `{{$path}}`\n" +
"{{end}}\n" +
"{{ range $path, $result := .Results }}" +
"## {{$path}}/\n" +
"{{$result}}\n" +
"---\n{{end}}" +
logTmpl))
var planSuccessTmpl = template.Must(template.New("").Parse(
"```diff\n" +
"{{.TerraformOutput}}\n" +
"```\n\n" +
"* To **discard** this plan click [here]({{.LockURL}})."))
var applySuccessTmpl = template.Must(template.New("").Parse(
"```diff\n" +
"{{.Output}}\n" +
"```"))
var errTmplText = "**{{.Command}} Error**\n" +
"```\n" +
"{{.Error}}\n" +
"```\n"
var errTmpl = template.Must(template.New("").Parse(errTmplText))
var errWithLogTmpl = template.Must(template.New("").Parse(errTmplText + logTmpl))
var failureTmplText = "**{{.Command}} Failed**: {{.Failure}}\n"
var failureTmpl = template.Must(template.New("").Parse(failureTmplText))
var failureWithLogTmpl = template.Must(template.New("").Parse(failureTmplText + logTmpl))
var logTmpl = "{{if .Verbose}}\n<details><summary>Log</summary>\n <p>\n\n```\n{{.Log}}```\n</p></details>{{end}}\n" | server/events/markdown_renderer.go | 0.614741 | 0.412826 | markdown_renderer.go | starcoder |
package tree
import (
"fmt"
"log"
"math"
"sort"
"github.com/m1gwings/treedrawer/drawer"
)
// stringify takes a pointer to a node and draws all the tree below in a drawer.
// Returns the drawn drawer.
// This function is called recursively
func stringify(t *Tree) *drawer.Drawer {
// Getting drawer and dimensions of this NodeValue
dVal := t.val.Draw()
dValW, dValH := dVal.Dimens()
// No children
if len(t.Children()) == 0 {
// Allocating new drawer to return
// Ensuring that width is odd
d, err := drawer.NewDrawer(dValW+2+1-dValW%2, dValH+2)
if err != nil {
log.Fatal(fmt.Errorf("error while allocating new drawer with no children: %v", err))
}
// Drawing dVal drawer onto the drawer to return
err = d.DrawDrawer(dVal, 1, 1)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing val with no children: %v", err))
}
// Adding a box in the drawer to return, around where the dVal drawer has been drawn
err = addBoxAround(d, 0, 0, dValW+1, dValH+1)
if err != nil {
log.Fatal(fmt.Errorf("error while adding box with no children: %v", err))
}
return d
}
// One child
if len(t.Children()) == 1 {
// Drawer of the child
var dChild *drawer.Drawer
// Recursively calling stringify of the child and initializing dChild drawer
tChild, err := t.Child(0)
if err != nil {
log.Fatal(fmt.Errorf("error while getting child 0 with one child: %v", err))
}
dChild = stringify(tChild)
// Getting dimensions of dChild drawer
dChildW, dChildH := dChild.Dimens()
// w and h represent respectively width and height of the drawer to return
// w is the max between the width of dVal + 2 (considering the box) and the width of the one child
// h is equal to the height of dVal + 2 (considering the box) + 1 (considering the "pipe") + the height of dChild
w := int(math.Max(float64(dValW+2), float64(dChildW)))
// Ensuring that w is odd
w += 1 - w%2
h := dValH + 3 + dChildH
// Allocating new drawer to return
d, err := drawer.NewDrawer(w, h)
if err != nil {
log.Fatal(fmt.Errorf("error while allocating new drawer with one child: %v", err))
}
// Drawing dVal onto the drawer to return with x in (w-dValW)/2 to put dVal in the middle
// (remember that drawer.DrawDrawer takes coordinates of top left corner)
// and y in 1 (considering the box)
err = d.DrawDrawer(dVal, (w-dValW)/2, 1)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing val with one child: %v", err))
}
// Adding a box in the drawer to return, around where the dVal drawer has been drawn
// start coordinates are taken considering d.DrawDrawer above - 1 in order to not overwrite
// end coordinates are just start coordinates plus respectively dValW+1 and dValH+1 in order to not overwrite
err = addBoxAround(d, (w-dValW)/2-1, 0, (w-dValW)/2+dValW, dValH+1)
if err != nil {
log.Fatal(fmt.Errorf("error while adding box with one child: %v", err))
}
// Drawing the upper-link onto the drawer to return with x in the middle
// and y just above the pipe
err = d.DrawRune('┬', w/2, dValH+1)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing ┬ with one child: %v", err))
}
// Drawing the pipe onto the drawer to return with x in the middle
// and y in dValH + 2 (considering the box)
err = d.DrawRune('│', w/2, dValH+2)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing | with one child: %v", err))
}
// Drawing dChild onto the drawer to return with x in (w-dChildW)/2 to put dChild in the middle
// (remember that drawer.DrawDrawer takes coordinates of top left corner)
// and y in dValH + 3 (considering the box and pipe)
err = d.DrawDrawer(dChild, (w-dChildW)/2, dValH+3)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing child drawer with one child: %v", err))
}
// Drawing the lower-link onto the drawer to return with x in the middle
// and y just below the pipe
// this drawing must be the latest because it has to overwrite dChild
err = d.DrawRune('┴', w/2, dValH+3)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing ┴ with one child: %v", err))
}
return d
}
// More children
// nChildren is the number of children of t
nChildren := len(t.Children())
// dChildren stores the result of recursively call of stringify for each child
dChildren := make([]*drawer.Drawer, 0, nChildren)
// childrenLeft is a slice with the x coordinate of the upper-left corner of each child drawer to draw onto d
childrenLeft := make([]int, 0, nChildren)
// childrenMiddle is a slice with the x coordinate of the middle of each child drawer to draw onto d
childrenMiddle := make([]int, 0, nChildren)
// childrenW is the width required to draw children
// it is incremented child by child to obtain the x coordinate of the upper-left corner for each child
childrenW := 0
// maxChildH is the maximum height of a child
maxChildH := 0
// Iterates over children to calculate maxChildH, childrenLeft and childrenMiddle
for i, tChild := range t.Children() {
dChild := stringify(tChild)
dChildren = append(dChildren, dChild)
dChildW, dChildH := dChild.Dimens()
maxChildH = int(math.Max(float64(maxChildH), float64(dChildH)))
if i == nChildren-1 {
// When the child is the last
if (childrenW+dChildW)%2 == 1 {
// If final childrenW (notice that childrenW gets incremented at the end) is odd than we just have to add dChildW
childrenLeft = append(childrenLeft, childrenW)
childrenMiddle = append(childrenMiddle, childrenW+dChildW/2)
childrenW += dChildW
} else {
// Otherwise we add one more space to make childrenW odd
childrenLeft = append(childrenLeft, childrenW+1)
childrenMiddle = append(childrenMiddle, childrenW+1+dChildW/2)
childrenW += dChildW + 1
}
} else {
// When the child isn't the last just add it to the left of the child before with a space in between
childrenLeft = append(childrenLeft, childrenW)
childrenMiddle = append(childrenMiddle, childrenW+dChildW/2)
childrenW += dChildW + 1
}
}
// Assert that childrenLeft and childrenMiddle are sorted, this is required because we are going to use binary search later
sorted := sort.SliceIsSorted(childrenLeft, func(i, j int) bool { return childrenLeft[i] < childrenLeft[j] })
if !sorted {
log.Fatal(fmt.Errorf("childrenLeft is not sorted"))
}
sorted = sort.SliceIsSorted(childrenMiddle, func(i, j int) bool { return childrenMiddle[i] < childrenMiddle[j] })
if !sorted {
log.Fatal(fmt.Errorf("childrenMiddle is not sorted"))
}
// w is the width of the final drawer and is equal to the maximum between dValW+2 and childrenW
var w int
if dValW+2 > childrenW {
w = dValW + 2
// If parent width is greater than children width, children get centered by shifting each child
for i := 0; i < nChildren; i++ {
childrenLeft[i] += (w - childrenW) / 2
childrenMiddle[i] += (w - childrenW) / 2
}
} else {
w = childrenW
}
h := dValH + 3 + maxChildH
// Allocating new drawer to return
d, err := drawer.NewDrawer(w, h)
if err != nil {
log.Fatal(fmt.Errorf("error while allocating new drawer with more children: %v", err))
}
// Drawing dVal onto the drawer to return with x in (w-dValW)/2 to put dVal in the middle
// (remember that drawer.DrawDrawer takes coordinates of top left corner)
// and y in 1 (considering the box)
err = d.DrawDrawer(dVal, (w-dValW)/2, 1)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing val with more children: %v", err))
}
// Adding a box in the drawer to return, around where the dVal drawer has been drawn
// start coordinates are taken considering d.DrawDrawer above - 1 in order to not overwrite
// end coordinates are just start coordinates plus respectively dValW+1 and dValH+1 in order to not overwrite
err = addBoxAround(d, (w-dValW)/2-1, 0, (w-dValW)/2+dValW, dValH+1)
if err != nil {
log.Fatal(fmt.Errorf("error while adding box with more children: %v", err))
}
// Drawing children onto the drawer to return
for i := 0; i < nChildren; i++ {
err = d.DrawDrawer(dChildren[i], childrenLeft[i], dValH+3)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing %d child: %v", i, err))
}
}
// Drawing upper-link ┬ under the parent
err = d.DrawRune('┬', w/2, dValH+1)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing upper-link ┬ under the parent: %v", err))
}
// Drawing lower-link ┴ above the children
for i, x := range childrenMiddle {
err = d.DrawRune('┴', x, dValH+3)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing lower-link ┴ above the %dth child: %v", i, err))
}
}
// Drawing left-corner ╭ above the left most child
err = d.DrawRune('╭', childrenMiddle[0], dValH+2)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing left-corner ╭ above the left most child: %v", err))
}
// Drawing right-corner ╮ above the right most child
err = d.DrawRune('╮', childrenMiddle[len(childrenMiddle)-1], dValH+2)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing right-corner ╮ above the right most child: %v", err))
}
// Finish to connect the pipe
for x := childrenMiddle[0] + 1; x < childrenMiddle[len(childrenMiddle)-1]; x++ {
underParent := x == w/2
shouldBeAt := sort.SearchInts(childrenMiddle, x)
aboveChild := shouldBeAt < len(childrenMiddle) && childrenMiddle[shouldBeAt] == x
var connection rune
switch {
case underParent && aboveChild:
connection = '┼'
case underParent:
connection = '┴'
case aboveChild:
connection = '┬'
default:
connection = '─'
}
err = d.DrawRune(connection, x, dValH+2)
if err != nil {
log.Fatal(fmt.Errorf("error while drawing %c at position %d to finish connection: %v", connection, x, err))
}
}
return d
}
// addBoxAround draws a box onto d
// the box starts at startX and startY coordinates
// and ends at endX and endY
func addBoxAround(d *drawer.Drawer, startX, startY, endX, endY int) error {
// Checking that start and end coordinates are valid
if startX < 0 || startY < 0 || endX < 0 || endY < 0 {
return fmt.Errorf("can't draw on negative coordinates %d %d %d %d", startX, startY, endX, endY)
}
if startX > endX || startY > endY {
return fmt.Errorf("start should be before end %d %d %d %d", startX, startY, endX, endY)
}
dW, dH := d.Dimens()
if endX >= dW || endY >= dH {
return fmt.Errorf("end overflows the drawer with dimes %d %d, %d %d %d %d", dW, dH, startX, startY, endX, endY)
}
// Drawing corners
err := d.DrawRune('╭', startX, startY)
if err != nil {
return fmt.Errorf("error while drawing ╭: %v", err)
}
err = d.DrawRune('╮', endX, startY)
if err != nil {
return fmt.Errorf("error while drawing ╮: %v", err)
}
err = d.DrawRune('╰', startX, endY)
if err != nil {
return fmt.Errorf("error while drawing ╰: %v", err)
}
err = d.DrawRune('╯', endX, endY)
if err != nil {
return fmt.Errorf("error while drawing ╯: %v", err)
}
// Drawing edges
for x := startX + 1; x < endX; x++ {
for yMul := 0; yMul <= 1; yMul++ {
err = d.DrawRune('─', x, yMul*(endY-startY)+startY)
if err != nil {
return fmt.Errorf("error while drawing ─: %v", err)
}
}
}
for y := startY + 1; y < endY; y++ {
for xMul := 0; xMul <= 1; xMul++ {
err = d.DrawRune('│', xMul*(endX-startX)+startX, y)
if err != nil {
return fmt.Errorf("error while drawing │: %v", err)
}
}
}
return nil
} | tree/stringify.go | 0.661595 | 0.410697 | stringify.go | starcoder |
package router
import (
"errors"
"reflect"
"strconv"
"strings"
)
var (
ErrInvalidBool = errors.New("invalid bool [true/false]")
)
// Parseable represents a parseable argument.
type Parseable interface {
Parse(arg string) error
}
// ManualParseable represents a manually parseable argument.
type ManualParseable interface {
ParseContent(arg string) error
}
// Formatter represents an argument that can be formatted for use in a usage string.
type Formatter interface {
Format(field string) string
}
// argumentValueFn represents an argument value function.
type argumentValueFn func(string) (reflect.Value, error)
// getArgumentValueFn returns an argument value function for the given type
// that handles the type conversion of a string argument.
func getArgumentValueFn(t reflect.Type) (argumentValueFn, error) {
// IParseable
if t.Implements(typeIParseable) {
return parseableArgumentValue(t), nil
}
// IManualParseable
if t.Implements(typeIManualParseable) {
return manualParseableArgumentValue(t), nil
}
var fn argumentValueFn
switch t.Kind() {
case reflect.String:
fn = stringArgumentValue()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
fn = intArgumentValue(t)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
fn = uintArgumentValue(t)
case reflect.Float32, reflect.Float64:
fn = floatArgumentValue(t)
case reflect.Bool:
fn = boolArgumentValue()
}
if fn == nil {
return nil, errors.New("invalid type: " + t.String())
}
return fn, nil
}
func parseableArgumentValue(t reflect.Type) argumentValueFn {
mt, ok := t.MethodByName("Parse")
if !ok {
panic("router: type IParseable does not implement Parse")
}
return func(input string) (reflect.Value, error) {
v := reflect.New(t.Elem())
ret := mt.Func.Call([]reflect.Value{
v, reflect.ValueOf(input),
})
if err := errorReturns(ret); err != nil {
return nilV, err
}
return v, nil
}
}
func manualParseableArgumentValue(t reflect.Type) argumentValueFn {
mt, ok := t.MethodByName("ParseContent")
if !ok {
panic("router: type IManualParseable does not implement ParseContent")
}
return func(input string) (reflect.Value, error) {
v := reflect.New(t.Elem())
ret := mt.Func.Call([]reflect.Value{
v, reflect.ValueOf(input),
})
if err := errorReturns(ret); err != nil {
return nilV, err
}
return v, nil
}
}
func stringArgumentValue() argumentValueFn {
return func(input string) (reflect.Value, error) {
return reflect.ValueOf(input), nil
}
}
func intArgumentValue(t reflect.Type) argumentValueFn {
return func(input string) (reflect.Value, error) {
i, err := strconv.ParseInt(input, 10, 64)
return quickRet(i, err, t)
}
}
func uintArgumentValue(t reflect.Type) argumentValueFn {
return func(input string) (reflect.Value, error) {
u, err := strconv.ParseUint(input, 10, 64)
return quickRet(u, err, t)
}
}
func floatArgumentValue(t reflect.Type) argumentValueFn {
return func(input string) (reflect.Value, error) {
f, err := strconv.ParseFloat(input, 64)
return quickRet(f, err, t)
}
}
func boolArgumentValue() argumentValueFn {
return func(input string) (reflect.Value, error) {
switch strings.ToLower(input) {
case "true", "yes", "y", "1":
return reflect.ValueOf(true), nil
case "false", "no", "n", "0":
return reflect.ValueOf(false), nil
default:
return nilV, ErrInvalidBool
}
}
} | argument.go | 0.68595 | 0.438545 | argument.go | starcoder |
package models
import "github.com/go-gl/mathgl/mgl32"
type MinimalAABB struct {
Min mgl32.Vec3
Max mgl32.Vec3
}
// Axis-aligned bounding box
type AABB struct {
Position mgl32.Vec3
Width float32
Height float32
Length float32
Min mgl32.Vec3
Max mgl32.Vec3
// Contains Min and Max for indexing
Bounds [2]mgl32.Vec3
}
func NewAABBParametric(position mgl32.Vec3, width float32, height float32, length float32) *AABB {
min := mgl32.Vec3{
position.X() - width/2.0,
position.Y() - height/2.0,
position.Z() - length/2.0,
}
max := mgl32.Vec3{
position.X() + width/2.0,
position.Y() + height/2.0,
position.Z() + length/2.0,
}
return &AABB{
Position: position,
Width: width,
Height: height,
Length: length,
Min: min,
Max: max,
Bounds: [2]mgl32.Vec3{min, max},
}
}
func NewAABBMinMax(min mgl32.Vec3, max mgl32.Vec3) *AABB {
return &AABB{
Position: min.Add(max).Mul(0.5),
Width: max.X() - min.X(),
Height: max.Y() - min.Y(),
Length: max.Z() - min.Z(),
Min: min,
Max: max,
Bounds: [2]mgl32.Vec3{min, max},
}
}
func NewAABBFromMinimal(aabb MinimalAABB) *AABB {
return NewAABBMinMax(aabb.Min, aabb.Max)
}
func (aabb *AABB) RayIntersect(ray *Ray) (bool, float32, float32) {
// From https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-box-intersection
var tmin, tmax, tymin, tymax, tzmin, tzmax float32
tmin = (aabb.Bounds[ray.Sign[0]].X() - ray.Origin.X()) * ray.InvDirection.X()
tmax = (aabb.Bounds[1-ray.Sign[0]].X() - ray.Origin.X()) * ray.InvDirection.X()
tymin = (aabb.Bounds[ray.Sign[1]].Y() - ray.Origin.Y()) * ray.InvDirection.Y()
tymax = (aabb.Bounds[1-ray.Sign[1]].Y() - ray.Origin.Y()) * ray.InvDirection.Y()
if tmin > tymax || tymin > tmax {
return false, 0, 0
}
if tymin > tmin {
tmin = tymin
}
if tymax < tmax {
tmax = tymax
}
tzmin = (aabb.Bounds[ray.Sign[2]].Z() - ray.Origin.Z()) * ray.InvDirection.Z()
tzmax = (aabb.Bounds[1-ray.Sign[2]].Z() - ray.Origin.Z()) * ray.InvDirection.Z()
if tmin > tzmax || tzmin > tmax {
return false, 0, 0
}
if tzmin > tmin {
tmin = tzmin
}
if tzmax < tmax {
tmax = tzmax
}
return true, tmin, tmax
}
func (aabb *AABB) Area() float32 {
d := aabb.Max.Sub(aabb.Min)
return 2 * (d.X()*d.Y() + d.X()*d.Z() + d.Y()*d.Z())
}
func (aabb *MinimalAABB) Area() float32 {
d := aabb.Max.Sub(aabb.Min)
return 2 * (d.X()*d.Y() + d.X()*d.Z() + d.Y()*d.Z())
} | src/backend/models/aabb.go | 0.836888 | 0.492432 | aabb.go | starcoder |
package sde
var MarketGroups = map[int]*MarketGroup{
2: &MarketGroup{
ID: 2,
ParentID: 0,
Name: `Blueprints & Reactions`,
Description: `Blueprints are data items used in industry for manufacturing, research and invention jobs`,
},
4: &MarketGroup{
ID: 4,
ParentID: 0,
Name: `Ships`,
Description: `Capsuleer spaceships of all sizes and roles, including advanced and faction variants of many hull types`,
},
5: &MarketGroup{
ID: 5,
ParentID: 1361,
Name: `Standard Frigates`,
Description: `Small, fast vessels suited to a variety of purposes.`,
},
6: &MarketGroup{
ID: 6,
ParentID: 1367,
Name: `Standard Cruisers`,
Description: `The middle children of the starship industry, but not to be underestimated.`,
},
7: &MarketGroup{
ID: 7,
ParentID: 1376,
Name: `Standard Battleships`,
Description: `The foundations of any respectable fighting force.`,
},
8: &MarketGroup{
ID: 8,
ParentID: 1382,
Name: `Standard Industrial Ships`,
Description: `Built for the transportation of vast amounts of cargo.`,
},
9: &MarketGroup{
ID: 9,
ParentID: 0,
Name: `Ship Equipment`,
Description: `Equipment modules for spaceships, including many types of weapon, electronic, engineering, armor, shield and propulsion systems`,
},
10: &MarketGroup{
ID: 10,
ParentID: 9,
Name: `Turrets & Launchers`,
Description: `Weapon systems to inflict damage on enemy targets`,
},
11: &MarketGroup{
ID: 11,
ParentID: 0,
Name: `Ammunition & Charges`,
Description: `Ammunition for turret, missile and bomb systems can be found in this category, together with cap booster charges, mining crystals, probes, nanite repair paste and module scripts`,
},
14: &MarketGroup{
ID: 14,
ParentID: 9,
Name: `Hull & Armor `,
Description: `Modules that strengthen the physical frame of a spaceship against external dangers`,
},
19: &MarketGroup{
ID: 19,
ParentID: 0,
Name: `Trade Goods`,
Description: `Many unusual goods are traded by capsuleers, including various tags, chips, tokens, charters and tools that are used in special activities or have exchange value with non-capsuleers`,
},
20: &MarketGroup{
ID: 20,
ParentID: 19,
Name: `Industrial Goods`,
Description: `Various products used in a variety of industries`,
},
22: &MarketGroup{
ID: 22,
ParentID: 19,
Name: `Radioactive Goods`,
Description: `Chemicals and waste, pulsing with radiation. Can be bought and sold for profit`,
},
23: &MarketGroup{
ID: 23,
ParentID: 19,
Name: `Passengers`,
Description: `Ferrying passengers can give a nice profit`,
},
24: &MarketGroup{
ID: 24,
ParentID: 0,
Name: `Implants & Boosters`,
Description: `Capsuleers have several options for modifying their personal capabilities, including cybernetic implants and biochemical boosters`,
},
27: &MarketGroup{
ID: 27,
ParentID: 24,
Name: `Implants`,
Description: `Implants improve a pilot's performance in various areas. They cannot be removed once inserted`,
},
52: &MarketGroup{
ID: 52,
ParentID: 9,
Name: `Propulsion `,
Description: `Modules that affect the navigational properties of a spaceship`,
},
54: &MarketGroup{
ID: 54,
ParentID: 1031,
Name: `Standard Ores`,
Description: `Chunks of raw material harvested from asteroids.`,
},
61: &MarketGroup{
ID: 61,
ParentID: 5,
Name: `Caldari`,
Description: `Caldari frigate designs.`,
},
64: &MarketGroup{
ID: 64,
ParentID: 5,
Name: `Minmatar`,
Description: `Minmatar frigate designs.`,
},
65: &MarketGroup{
ID: 65,
ParentID: 1035,
Name: `Advanced Components`,
Description: `Components used in the manufacture of Tech II vessels and equipment.`,
},
72: &MarketGroup{
ID: 72,
ParentID: 5,
Name: `Amarr`,
Description: `Amarr frigate designs.`,
},
73: &MarketGroup{
ID: 73,
ParentID: 6,
Name: `Minmatar`,
Description: `Minmatar cruiser designs.`,
},
74: &MarketGroup{
ID: 74,
ParentID: 6,
Name: `Amarr`,
Description: `Amarr cruiser designs.`,
},
75: &MarketGroup{
ID: 75,
ParentID: 6,
Name: `Caldari`,
Description: `Caldari cruiser designs.`,
},
76: &MarketGroup{
ID: 76,
ParentID: 6,
Name: `Gallente`,
Description: `Gallente cruiser designs.`,
},
77: &MarketGroup{
ID: 77,
ParentID: 5,
Name: `Gallente`,
Description: `Gallente frigate designs.`,
},
78: &MarketGroup{
ID: 78,
ParentID: 7,
Name: `Minmatar`,
Description: `Minmatar battleship designs.`,
},
79: &MarketGroup{
ID: 79,
ParentID: 7,
Name: `Amarr`,
Description: `Amarr battleship designs.`,
},
80: &MarketGroup{
ID: 80,
ParentID: 7,
Name: `Caldari`,
Description: `Caldari battleship designs.`,
},
81: &MarketGroup{
ID: 81,
ParentID: 7,
Name: `Gallente`,
Description: `Gallente battleship designs.`,
},
82: &MarketGroup{
ID: 82,
ParentID: 8,
Name: `Minmatar`,
Description: `Minmatar industrial designs.`,
},
83: &MarketGroup{
ID: 83,
ParentID: 8,
Name: `Gallente`,
Description: `Gallente industrial designs.`,
},
84: &MarketGroup{
ID: 84,
ParentID: 8,
Name: `Caldari`,
Description: `Caldari industrial designs.`,
},
85: &MarketGroup{
ID: 85,
ParentID: 8,
Name: `Amarr`,
Description: `Amarr industrial designs.`,
},
86: &MarketGroup{
ID: 86,
ParentID: 10,
Name: `Hybrid Turrets`,
Description: `Hybrid turrets are specifically designed to house railguns and blasters.`,
},
87: &MarketGroup{
ID: 87,
ParentID: 10,
Name: `Projectile Turrets`,
Description: `Built to house cannons capable of firing superheated gunshells.`,
},
88: &MarketGroup{
ID: 88,
ParentID: 10,
Name: `Energy Turrets`,
Description: `Laser turrets are built to house beam and pulse lasers.`,
},
99: &MarketGroup{
ID: 99,
ParentID: 11,
Name: `Projectile Ammo`,
Description: `Metal-cased shells, fired by artillery guns and autocannons`,
},
100: &MarketGroup{
ID: 100,
ParentID: 11,
Name: `Hybrid Charges`,
Description: `Charges with a plasma core designed for railguns and blasters`,
},
101: &MarketGroup{
ID: 101,
ParentID: 11,
Name: `Frequency Crystals`,
Description: `Modulate energy into several different frequencies for beam lasers and pulse lasers `,
},
102: &MarketGroup{
ID: 102,
ParentID: 851,
Name: `Small`,
Description: `Small frequency crystals, for use with frigate-sized lasers.`,
},
103: &MarketGroup{
ID: 103,
ParentID: 851,
Name: `Medium`,
Description: `Medium frequency crystals, for use with cruiser-sized lasers.`,
},
105: &MarketGroup{
ID: 105,
ParentID: 851,
Name: `Large`,
Description: `Large frequency crystals, for use with battleship-sized lasers.`,
},
106: &MarketGroup{
ID: 106,
ParentID: 848,
Name: `Large`,
Description: `Large hybrid shells, fired by battleship-sized guns.`,
},
107: &MarketGroup{
ID: 107,
ParentID: 848,
Name: `Small`,
Description: `Small hybrid shells, fired by frigate-sized guns.`,
},
108: &MarketGroup{
ID: 108,
ParentID: 848,
Name: `Medium`,
Description: `Medium hybrid shells, fired by cruiser-sized guns.`,
},
109: &MarketGroup{
ID: 109,
ParentID: 845,
Name: `Large`,
Description: `Large projectile shells, fired by battleship-sized guns.`,
},
112: &MarketGroup{
ID: 112,
ParentID: 845,
Name: `Medium`,
Description: `Medium projectile shells, fired by cruiser-sized guns.`,
},
113: &MarketGroup{
ID: 113,
ParentID: 845,
Name: `Small`,
Description: `Small projectile shells, fired by frigate-sized guns.`,
},
114: &MarketGroup{
ID: 114,
ParentID: 11,
Name: `Missiles`,
Description: `Self-propelled projectiles with various payloads`,
},
115: &MarketGroup{
ID: 115,
ParentID: 114,
Name: `Auto-Targeting`,
Description: `For when targeting systems fail.`,
},
116: &MarketGroup{
ID: 116,
ParentID: 114,
Name: `Defender`,
Description: `The anti-missile missile.`,
},
117: &MarketGroup{
ID: 117,
ParentID: 114,
Name: `Light Missiles`,
Description: `Jet-propelled projectiles for standard-sized launchers.`,
},
118: &MarketGroup{
ID: 118,
ParentID: 114,
Name: `Rockets`,
Description: `Hundreds of tiny bites will wear away even the toughest opponent.`,
},
120: &MarketGroup{
ID: 120,
ParentID: 11,
Name: `Probes`,
Description: `Deployed into space for scanning or disruption purposes`,
},
126: &MarketGroup{
ID: 126,
ParentID: 554,
Name: `Shield Rechargers`,
Description: `Provide an increase to the shield's recharge rate.`,
},
128: &MarketGroup{
ID: 128,
ParentID: 554,
Name: `Remote Shield Boosters`,
Description: `Transfer shields to another ship.`,
},
131: &MarketGroup{
ID: 131,
ParentID: 52,
Name: `Microwarpdrives`,
Description: `Ultra-advanced propulsion subsystems giving a massive speed boost.`,
},
132: &MarketGroup{
ID: 132,
ParentID: 52,
Name: `Propulsion Upgrades`,
Description: `Hull modifications designed to increase a ship's speed.`,
},
133: &MarketGroup{
ID: 133,
ParentID: 14,
Name: `Armor Plates`,
Description: `Reinforced armor plates for additional protection.`,
},
134: &MarketGroup{
ID: 134,
ParentID: 14,
Name: `Armor Repairers`,
Description: `Nano-assembler technology at its best. Repair armor on the fly.`,
},
135: &MarketGroup{
ID: 135,
ParentID: 14,
Name: `Hull Upgrades`,
Description: `Hull modifications with various applications.`,
},
139: &MarketGroup{
ID: 139,
ParentID: 11,
Name: `Cap Booster Charges`,
Description: `Give an instant boost to a spaceship's capacitor`,
},
140: &MarketGroup{
ID: 140,
ParentID: 10,
Name: `Missile Launchers`,
Description: `For the launching of jet-propelled guided missiles.`,
},
141: &MarketGroup{
ID: 141,
ParentID: 9,
Name: `Smartbombs`,
Description: `Omnidirectional energy blasts that damage everything in a radius. Good against drones`,
},
143: &MarketGroup{
ID: 143,
ParentID: 10,
Name: `Weapon Upgrades`,
Description: `Subsystems designed to improve weapon efficiency.`,
},
150: &MarketGroup{
ID: 150,
ParentID: 0,
Name: `Skills`,
Description: `Skills are learned by capsuleer pilots using special data chips known as skillbooks, and these are available for almost any imaginable activity that can be carried out in space`,
},
157: &MarketGroup{
ID: 157,
ParentID: 0,
Name: `Drones`,
Description: `Drones are semi-autonomous robotic devices used for military and industrial purposes throughout space`,
},
158: &MarketGroup{
ID: 158,
ParentID: 157,
Name: `Mining Drones`,
Description: `Drones capable of mining asteroids for raw materials`,
},
159: &MarketGroup{
ID: 159,
ParentID: 157,
Name: `Combat Drones`,
Description: `Drones with combat capabilities`,
},
204: &MarketGroup{
ID: 204,
ParentID: 2,
Name: `Ships`,
Description: `Blueprints of capsuleer spaceships`,
},
205: &MarketGroup{
ID: 205,
ParentID: 204,
Name: `Frigates`,
Description: `Blueprints of frigate-class vessels.`,
},
206: &MarketGroup{
ID: 206,
ParentID: 204,
Name: `Cruisers`,
Description: `Blueprints of cruiser-class vessels.`,
},
207: &MarketGroup{
ID: 207,
ParentID: 204,
Name: `Battleships`,
Description: `Blueprints of battleship-class vessels.`,
},
208: &MarketGroup{
ID: 208,
ParentID: 204,
Name: `Industrial Ships`,
Description: `Blueprints of industrial-class vessels.`,
},
209: &MarketGroup{
ID: 209,
ParentID: 2,
Name: `Ship Equipment`,
Description: `Blueprints of modules and equipment for capsuleer spaceships`,
},
210: &MarketGroup{
ID: 210,
ParentID: 209,
Name: `Turrets & Bays`,
Description: `Blueprints of weapons systems for capsule-fitted vessels.`,
},
211: &MarketGroup{
ID: 211,
ParentID: 2,
Name: `Ammunition & Charges`,
Description: `Blueprints of ammunition and charges`,
},
214: &MarketGroup{
ID: 214,
ParentID: 209,
Name: `Hull & Armor `,
Description: `Blueprints of hull and armor systems.`,
},
252: &MarketGroup{
ID: 252,
ParentID: 209,
Name: `Propulsion `,
Description: `Blueprints of propulsion systems.`,
},
261: &MarketGroup{
ID: 261,
ParentID: 205,
Name: `Caldari`,
Description: `Blueprints of Caldari frigate designs.`,
},
264: &MarketGroup{
ID: 264,
ParentID: 205,
Name: `Minmatar`,
Description: `Blueprints of Minmatar frigate designs.`,
},
272: &MarketGroup{
ID: 272,
ParentID: 205,
Name: `Amarr`,
Description: `Blueprints of Amarr frigate designs.`,
},
273: &MarketGroup{
ID: 273,
ParentID: 206,
Name: `Minmatar`,
Description: `Blueprints of Minmatar cruiser designs.`,
},
274: &MarketGroup{
ID: 274,
ParentID: 206,
Name: `Amarr`,
Description: `Blueprints of Amarr cruiser designs.`,
},
275: &MarketGroup{
ID: 275,
ParentID: 206,
Name: `Caldari`,
Description: `Blueprints of Caldari cruiser designs.`,
},
276: &MarketGroup{
ID: 276,
ParentID: 206,
Name: `Gallente`,
Description: `Blueprints of Gallente cruiser designs.`,
},
277: &MarketGroup{
ID: 277,
ParentID: 205,
Name: `Gallente`,
Description: `Blueprints of Gallente frigate designs.`,
},
278: &MarketGroup{
ID: 278,
ParentID: 207,
Name: `Minmatar`,
Description: `Blueprints of Minmatar battleship designs.`,
},
279: &MarketGroup{
ID: 279,
ParentID: 207,
Name: `Amarr`,
Description: `Blueprints of Amarr battleship designs.`,
},
280: &MarketGroup{
ID: 280,
ParentID: 207,
Name: `Caldari`,
Description: `Blueprints of Caldari battleship designs.`,
},
281: &MarketGroup{
ID: 281,
ParentID: 207,
Name: `Gallente`,
Description: `Blueprints of Gallente battleship designs.`,
},
282: &MarketGroup{
ID: 282,
ParentID: 208,
Name: `Minmatar`,
Description: `Blueprints of Minmatar industrial designs.`,
},
283: &MarketGroup{
ID: 283,
ParentID: 208,
Name: `Gallente`,
Description: `Blueprints of Gallente industrial designs.`,
},
284: &MarketGroup{
ID: 284,
ParentID: 208,
Name: `Caldari`,
Description: `Blueprints of Caldari industrial designs.`,
},
285: &MarketGroup{
ID: 285,
ParentID: 208,
Name: `Amarr`,
Description: `Blueprints of Amarr industrial designs.`,
},
286: &MarketGroup{
ID: 286,
ParentID: 210,
Name: `Hybrid Turrets`,
Description: `Blueprints of hybrid turrets for capsule-fitted vessels.`,
},
287: &MarketGroup{
ID: 287,
ParentID: 210,
Name: `Projectile Turrets`,
Description: `Blueprints of projectile turrets for capsule-fitted vessels.`,
},
288: &MarketGroup{
ID: 288,
ParentID: 210,
Name: `Energy Turrets`,
Description: `Blueprints of energy turrets for capsule-fitted ships.`,
},
289: &MarketGroup{
ID: 289,
ParentID: 286,
Name: `Large`,
Description: `Blueprints of large hybrid turrets.`,
},
290: &MarketGroup{
ID: 290,
ParentID: 286,
Name: `Medium`,
Description: `Blueprints of medium hybrid turrets.`,
},
291: &MarketGroup{
ID: 291,
ParentID: 286,
Name: `Small`,
Description: `Blueprints of small hybrid turrets.`,
},
292: &MarketGroup{
ID: 292,
ParentID: 288,
Name: `Small`,
Description: `Blueprints of small laser turrets.`,
},
293: &MarketGroup{
ID: 293,
ParentID: 288,
Name: `Medium`,
Description: `Blueprints of medium laser turrets.`,
},
295: &MarketGroup{
ID: 295,
ParentID: 288,
Name: `Large`,
Description: `Blueprints of large laser turrets.`,
},
296: &MarketGroup{
ID: 296,
ParentID: 287,
Name: `Small`,
Description: `Blueprints of small projectile turrets.`,
},
297: &MarketGroup{
ID: 297,
ParentID: 287,
Name: `Medium`,
Description: `Blueprints of medium projectile turrets.`,
},
298: &MarketGroup{
ID: 298,
ParentID: 287,
Name: `Large`,
Description: `Blueprints of large projectile turrets.`,
},
299: &MarketGroup{
ID: 299,
ParentID: 211,
Name: `Projectile Ammo`,
Description: `Projectile ammo blueprints.`,
},
300: &MarketGroup{
ID: 300,
ParentID: 211,
Name: `Hybrid Charges`,
Description: `Hybrid charge blueprints.`,
},
301: &MarketGroup{
ID: 301,
ParentID: 211,
Name: `Frequency Crystals`,
Description: `Frequency crystal blueprints.`,
},
302: &MarketGroup{
ID: 302,
ParentID: 301,
Name: `Small`,
Description: `Blueprints of small frequency crystals.`,
},
303: &MarketGroup{
ID: 303,
ParentID: 301,
Name: `Medium`,
Description: `Blueprints of medium frequency crystals.`,
},
305: &MarketGroup{
ID: 305,
ParentID: 301,
Name: `Large`,
Description: `Blueprints of large frequency crystals.`,
},
306: &MarketGroup{
ID: 306,
ParentID: 300,
Name: `Large`,
Description: `Blueprints of large hybrid ammunition.`,
},
307: &MarketGroup{
ID: 307,
ParentID: 300,
Name: `Small`,
Description: `Blueprints of small hybrid ammunition.`,
},
308: &MarketGroup{
ID: 308,
ParentID: 300,
Name: `Medium`,
Description: `Blueprints of medium hybrid ammunition.`,
},
309: &MarketGroup{
ID: 309,
ParentID: 299,
Name: `Large`,
Description: `Blueprints of large projectile ammunition.`,
},
312: &MarketGroup{
ID: 312,
ParentID: 299,
Name: `Medium`,
Description: `Blueprints of medium projectile ammunition.`,
},
313: &MarketGroup{
ID: 313,
ParentID: 299,
Name: `Small`,
Description: `Blueprints of small projectile ammunition.`,
},
314: &MarketGroup{
ID: 314,
ParentID: 211,
Name: `Missiles`,
Description: `Missile blueprints.`,
},
315: &MarketGroup{
ID: 315,
ParentID: 314,
Name: `Auto-Targeting`,
Description: `Blueprints of friend-or-foe missiles.`,
},
316: &MarketGroup{
ID: 316,
ParentID: 314,
Name: `Defender`,
Description: `Blueprints of defender missiles.`,
},
318: &MarketGroup{
ID: 318,
ParentID: 314,
Name: `Rockets`,
Description: `Blueprints of rockets.`,
},
320: &MarketGroup{
ID: 320,
ParentID: 1520,
Name: `Scan Probes`,
Description: `Blueprints of scan probes.`,
},
325: &MarketGroup{
ID: 325,
ParentID: 1710,
Name: `Scanners`,
Description: `Blueprints of scanners.`,
},
331: &MarketGroup{
ID: 331,
ParentID: 252,
Name: `Microwarpdrives`,
Description: `Blueprints of microwarpdrives.`,
},
332: &MarketGroup{
ID: 332,
ParentID: 252,
Name: `Propulsion Upgrades`,
Description: `Blueprints of propulsion upgrades.`,
},
335: &MarketGroup{
ID: 335,
ParentID: 214,
Name: `Hull Upgrades`,
Description: `Blueprints of hull upgrades.`,
},
338: &MarketGroup{
ID: 338,
ParentID: 1711,
Name: `Mining Lasers`,
Description: `Blueprints of mining lasers.`,
},
339: &MarketGroup{
ID: 339,
ParentID: 211,
Name: `Cap Booster Charges`,
Description: `Blueprints of cap booster charges.`,
},
340: &MarketGroup{
ID: 340,
ParentID: 210,
Name: `Missile Launchers`,
Description: `Blueprints of missile launchers.`,
},
341: &MarketGroup{
ID: 341,
ParentID: 209,
Name: `Smartbombs`,
Description: `Blueprints of smartbombs.`,
},
343: &MarketGroup{
ID: 343,
ParentID: 210,
Name: `Weapon Upgrades`,
Description: `Blueprints of weapon upgrades.`,
},
357: &MarketGroup{
ID: 357,
ParentID: 2,
Name: `Drones`,
Description: `Blueprints of drone designs`,
},
358: &MarketGroup{
ID: 358,
ParentID: 357,
Name: `Mining Drones`,
Description: `Blueprints of mining drone designs.`,
},
359: &MarketGroup{
ID: 359,
ParentID: 1530,
Name: `Heavy Attack Drones`,
Description: `Blueprints of heavy attack drone designs.`,
},
364: &MarketGroup{
ID: 364,
ParentID: 150,
Name: `Gunnery`,
Description: `Skills pertaining to the efficient use of turret-based weapon systems`,
},
365: &MarketGroup{
ID: 365,
ParentID: 150,
Name: `Corporation Management`,
Description: `Skills pertaining to management of large social groups`,
},
366: &MarketGroup{
ID: 366,
ParentID: 150,
Name: `Drones`,
Description: `Skills pertaining to the efficient operation of drones`,
},
367: &MarketGroup{
ID: 367,
ParentID: 150,
Name: `Electronic Systems`,
Description: `Skills pertaining to management of a spaceship's electronic systems`,
},
368: &MarketGroup{
ID: 368,
ParentID: 150,
Name: `Engineering`,
Description: `Skills pertaining to management of a spaceship's hardware`,
},
369: &MarketGroup{
ID: 369,
ParentID: 150,
Name: `Production`,
Description: `Skills pertaining to the efficient use of manufacturing facilities and industrial devices`,
},
370: &MarketGroup{
ID: 370,
ParentID: 150,
Name: `Fleet Support`,
Description: `Skills pertaining to the operation of Command Burst modules`,
},
372: &MarketGroup{
ID: 372,
ParentID: 150,
Name: `Rigging`,
Description: `Skills pertaining to the fine tuning of a spaceship's installed systems`,
},
373: &MarketGroup{
ID: 373,
ParentID: 150,
Name: `Missiles`,
Description: `Skills pertaining to the efficient use of self-propelled warheads`,
},
374: &MarketGroup{
ID: 374,
ParentID: 150,
Name: `Navigation`,
Description: `Skills pertaining to navigating your spaceship as quickly and efficiently as possible`,
},
375: &MarketGroup{
ID: 375,
ParentID: 150,
Name: `Science`,
Description: `Skills pertaining to various fields of scientific knowledge`,
},
376: &MarketGroup{
ID: 376,
ParentID: 150,
Name: `Social`,
Description: `Skills pertaining to efficient navigation through the social landscape`,
},
377: &MarketGroup{
ID: 377,
ParentID: 150,
Name: `Spaceship Command`,
Description: `Skills required for commanding all shapes and sizes of spaceships`,
},
378: &MarketGroup{
ID: 378,
ParentID: 150,
Name: `Trade`,
Description: `Skills pertaining to managing commercial activities`,
},
379: &MarketGroup{
ID: 379,
ParentID: 404,
Name: `Cargo Containers`,
Description: `For deep-space storage of valuable materials.`,
},
380: &MarketGroup{
ID: 380,
ParentID: 141,
Name: `Micro`,
Description: `The smallest smartbombs available, for when powergrid and CPU are scarce.`,
},
381: &MarketGroup{
ID: 381,
ParentID: 141,
Name: `Large`,
Description: `Battleship-sized smartbombs.`,
},
382: &MarketGroup{
ID: 382,
ParentID: 141,
Name: `Small`,
Description: `Frigate-sized smartbombs.`,
},
383: &MarketGroup{
ID: 383,
ParentID: 141,
Name: `Medium`,
Description: `Cruiser-sized smartbombs.`,
},
387: &MarketGroup{
ID: 387,
ParentID: 114,
Name: `Torpedoes`,
Description: `Devastating jet-propelled projectiles carrying huge payloads.`,
},
390: &MarketGroup{
ID: 390,
ParentID: 314,
Name: `Torpedoes`,
Description: `Blueprints of torpedoes.`,
},
391: &MarketGroup{
ID: 391,
ParentID: 4,
Name: `Shuttles`,
Description: `Fast and cheap vessels for easy transport`,
},
393: &MarketGroup{
ID: 393,
ParentID: 391,
Name: `Amarr`,
Description: `Amarr shuttle designs.`,
},
394: &MarketGroup{
ID: 394,
ParentID: 391,
Name: `Minmatar`,
Description: `Minmatar shuttle designs.`,
},
395: &MarketGroup{
ID: 395,
ParentID: 391,
Name: `Gallente`,
Description: `Gallente shuttle designs.`,
},
396: &MarketGroup{
ID: 396,
ParentID: 391,
Name: `Caldari`,
Description: `Caldari shuttle designs.`,
},
399: &MarketGroup{
ID: 399,
ParentID: 1364,
Name: `Interceptors`,
Description: `Lightning-fast, highly maneuverable frigates.`,
},
400: &MarketGroup{
ID: 400,
ParentID: 399,
Name: `Amarr`,
Description: `Amarr interceptor designs.`,
},
401: &MarketGroup{
ID: 401,
ParentID: 399,
Name: `Caldari`,
Description: `Caldari interceptor designs.`,
},
402: &MarketGroup{
ID: 402,
ParentID: 399,
Name: `Gallente`,
Description: `Gallente interceptor designs.`,
},
403: &MarketGroup{
ID: 403,
ParentID: 399,
Name: `Minmatar`,
Description: `Minmatar interceptor designs.`,
},
404: &MarketGroup{
ID: 404,
ParentID: 477,
Name: `Deployable Structures`,
Description: `Various structures that can be deployed on behalf of an individual or group of capsuleers`,
},
405: &MarketGroup{
ID: 405,
ParentID: 404,
Name: `Warp Disruption Fields `,
Description: `Trap a fly in your web.`,
},
406: &MarketGroup{
ID: 406,
ParentID: 1338,
Name: `Deployable Structures`,
Description: `Blueprints of deployable structures.`,
},
407: &MarketGroup{
ID: 407,
ParentID: 406,
Name: `Warp Disruption Fields`,
Description: `Blueprints of Warp disruption fields.`,
},
408: &MarketGroup{
ID: 408,
ParentID: 204,
Name: `Interceptors`,
Description: `Blueprints of interceptor-class vessels.`,
},
410: &MarketGroup{
ID: 410,
ParentID: 408,
Name: `Amarr`,
Description: `Blueprints of Amarr interceptor designs.`,
},
411: &MarketGroup{
ID: 411,
ParentID: 408,
Name: `Caldari`,
Description: `Blueprints of Caldari interceptor designs.`,
},
412: &MarketGroup{
ID: 412,
ParentID: 408,
Name: `Gallente`,
Description: `Blueprints of Gallente interceptor designs.`,
},
413: &MarketGroup{
ID: 413,
ParentID: 408,
Name: `Minmatar`,
Description: `Blueprints of Minmatar interceptor designs.`,
},
414: &MarketGroup{
ID: 414,
ParentID: 204,
Name: `Shuttles`,
Description: `Blueprints of shuttle-class vessels.`,
},
415: &MarketGroup{
ID: 415,
ParentID: 414,
Name: `Amarr`,
Description: `Blueprints of Amarr shuttle designs.`,
},
416: &MarketGroup{
ID: 416,
ParentID: 414,
Name: `Caldari`,
Description: `Blueprints of Caldari shuttle designs.`,
},
417: &MarketGroup{
ID: 417,
ParentID: 414,
Name: `Gallente`,
Description: `Blueprints of Gallente shuttle designs.`,
},
418: &MarketGroup{
ID: 418,
ParentID: 414,
Name: `Minmatar`,
Description: `Blueprints of Minmatar shuttle designs.`,
},
419: &MarketGroup{
ID: 419,
ParentID: 204,
Name: `Covert Ops`,
Description: `Blueprints of covert ops-class vessels.`,
},
420: &MarketGroup{
ID: 420,
ParentID: 1364,
Name: `<NAME>`,
Description: `Frigates designed for clandestine operations.`,
},
421: &MarketGroup{
ID: 421,
ParentID: 420,
Name: `Amarr`,
Description: `Amarr covert ops designs.`,
},
422: &MarketGroup{
ID: 422,
ParentID: 420,
Name: `Caldari`,
Description: `Caldari covert ops designs.`,
},
423: &MarketGroup{
ID: 423,
ParentID: 420,
Name: `Gallente`,
Description: `Gallente covert ops designs.`,
},
424: &MarketGroup{
ID: 424,
ParentID: 420,
Name: `Minmatar`,
Description: `Minmatar covert ops designs.`,
},
425: &MarketGroup{
ID: 425,
ParentID: 419,
Name: `Amarr`,
Description: `Blueprints of Amarr covert ops designs.`,
},
427: &MarketGroup{
ID: 427,
ParentID: 419,
Name: `Caldari`,
Description: `Blueprints of Caldari covert ops designs.`,
},
428: &MarketGroup{
ID: 428,
ParentID: 419,
Name: `Gallente`,
Description: `Blueprints of Gallente covert ops designs.`,
},
429: &MarketGroup{
ID: 429,
ParentID: 419,
Name: `Minmatar`,
Description: `Blueprints of Minmatar covert ops designs.`,
},
430: &MarketGroup{
ID: 430,
ParentID: 1578,
Name: `Cloaking Devices`,
Description: `Blueprints of cloaking devices.`,
},
432: &MarketGroup{
ID: 432,
ParentID: 1364,
Name: `Assault Frigates`,
Description: `Sturdy and powerful frigates built for all-out combat.`,
},
433: &MarketGroup{
ID: 433,
ParentID: 432,
Name: `Amarr`,
Description: `Amarr assault frigate designs.`,
},
434: &MarketGroup{
ID: 434,
ParentID: 432,
Name: `Caldari`,
Description: `Caldari assault frigate designs.`,
},
435: &MarketGroup{
ID: 435,
ParentID: 432,
Name: `Gallente`,
Description: `Gallente assault frigate designs.`,
},
436: &MarketGroup{
ID: 436,
ParentID: 432,
Name: `Minmatar`,
Description: `Minmatar assault frigate designs.`,
},
437: &MarketGroup{
ID: 437,
ParentID: 1368,
Name: `Logistics`,
Description: `Support ships, engineered for field assistance in large engagements.`,
},
438: &MarketGroup{
ID: 438,
ParentID: 437,
Name: `Amarr`,
Description: `Amarr logistics vessel designs.`,
},
439: &MarketGroup{
ID: 439,
ParentID: 437,
Name: `Caldari`,
Description: `Caldari logistics vessel designs.`,
},
440: &MarketGroup{
ID: 440,
ParentID: 437,
Name: `Gallente`,
Description: `Gallente logistics vessel designs.`,
},
441: &MarketGroup{
ID: 441,
ParentID: 437,
Name: `Minmatar`,
Description: `Minmatar logistics vessel designs.`,
},
442: &MarketGroup{
ID: 442,
ParentID: 204,
Name: `Logistics`,
Description: `Blueprints of Logistics-class vessels.`,
},
443: &MarketGroup{
ID: 443,
ParentID: 442,
Name: `Amarr`,
Description: `Blueprints of Amarr logistics designs.`,
},
444: &MarketGroup{
ID: 444,
ParentID: 442,
Name: `Caldari`,
Description: `Blueprints of Caldari logistics designs.`,
},
445: &MarketGroup{
ID: 445,
ParentID: 442,
Name: `Gallente`,
Description: `Blueprints of Gallente logistics designs.`,
},
446: &MarketGroup{
ID: 446,
ParentID: 442,
Name: `Minmatar`,
Description: `Blueprints of Minmatar logistics designs.`,
},
448: &MarketGroup{
ID: 448,
ParentID: 1368,
Name: `Heavy Assault Cruisers`,
Description: `Sturdy and powerful cruisers built for all-out combat.`,
},
449: &MarketGroup{
ID: 449,
ParentID: 448,
Name: `Amarr`,
Description: `Amarr heavy assault cruiser designs.`,
},
450: &MarketGroup{
ID: 450,
ParentID: 448,
Name: `Caldari`,
Description: `Caldari heavy assault cruiser designs.`,
},
451: &MarketGroup{
ID: 451,
ParentID: 448,
Name: `Gallente`,
Description: `Gallente heavy assault cruiser designs.`,
},
452: &MarketGroup{
ID: 452,
ParentID: 448,
Name: `Minmatar`,
Description: `Minmatar heavy assault cruiser designs.`,
},
453: &MarketGroup{
ID: 453,
ParentID: 204,
Name: `Heavy Assault Cruisers`,
Description: `Blueprints of heavy assault-class vessels.`,
},
454: &MarketGroup{
ID: 454,
ParentID: 453,
Name: `Amarr`,
Description: `Blueprints of Amarr heavy assault cruiser designs.`,
},
455: &MarketGroup{
ID: 455,
ParentID: 453,
Name: `Caldari`,
Description: `Blueprints of Caldari heavy assault cruiser designs.`,
},
456: &MarketGroup{
ID: 456,
ParentID: 453,
Name: `Gallente`,
Description: `Blueprints of Gallente heavy assault cruiser designs.`,
},
457: &MarketGroup{
ID: 457,
ParentID: 453,
Name: `Minmatar`,
Description: `Blueprints of Minmatar heavy assault cruiser designs.`,
},
458: &MarketGroup{
ID: 458,
ParentID: 204,
Name: `Assault Frigates`,
Description: `Blueprints of assault-class vessels.`,
},
459: &MarketGroup{
ID: 459,
ParentID: 458,
Name: `Amarr`,
Description: `Blueprints of Amarr assault frigate designs.`,
},
461: &MarketGroup{
ID: 461,
ParentID: 458,
Name: `Caldari`,
Description: `Blueprints of Caldari assault frigate designs.`,
},
462: &MarketGroup{
ID: 462,
ParentID: 458,
Name: `Gallente`,
Description: `Blueprints of Gallente assault frigate designs.`,
},
463: &MarketGroup{
ID: 463,
ParentID: 458,
Name: `Minmatar`,
Description: `Blueprints of Minmatar assault frigate designs.`,
},
464: &MarketGroup{
ID: 464,
ParentID: 1372,
Name: `Standard Destroyers`,
Description: `Anti-frigate gunboats. The middle ground between a frigate and a cruiser.`,
},
465: &MarketGroup{
ID: 465,
ParentID: 464,
Name: `Amarr`,
Description: `Amarr destroyer designs.`,
},
466: &MarketGroup{
ID: 466,
ParentID: 464,
Name: `Caldari`,
Description: `Caldari destroyer designs.`,
},
467: &MarketGroup{
ID: 467,
ParentID: 464,
Name: `Gallente`,
Description: `Gallente destroyer designs.`,
},
468: &MarketGroup{
ID: 468,
ParentID: 464,
Name: `Minmatar`,
Description: `Minmatar destroyer designs.`,
},
469: &MarketGroup{
ID: 469,
ParentID: 1374,
Name: `Standard Battlecruisers`,
Description: `A class of powerful combat vessels midway between cruisers and battleships.`,
},
470: &MarketGroup{
ID: 470,
ParentID: 469,
Name: `Amarr`,
Description: `Amarr battlecruiser designs.`,
},
471: &MarketGroup{
ID: 471,
ParentID: 469,
Name: `Caldari`,
Description: `Caldari battlecruiser designs.`,
},
472: &MarketGroup{
ID: 472,
ParentID: 469,
Name: `Gallente`,
Description: `Gallente battlecruiser designs.`,
},
473: &MarketGroup{
ID: 473,
ParentID: 469,
Name: `Minmatar`,
Description: `Minmatar battlecruiser designs.`,
},
475: &MarketGroup{
ID: 475,
ParentID: 0,
Name: `Manufacture & Research`,
Description: `The perpetual engine of space industry is driven by vast quantities of materials, components and research equipment that are constantly being traded on the capsuleer market`,
},
477: &MarketGroup{
ID: 477,
ParentID: 0,
Name: `Structures`,
Description: `Capsuleers have many options when they decide to set up a home in space, from personal deployables to capsuleer-controlled outpost stations`,
},
478: &MarketGroup{
ID: 478,
ParentID: 1285,
Name: `Control Towers`,
Description: `The starbase's backbone.`,
},
479: &MarketGroup{
ID: 479,
ParentID: 480,
Name: `Missile Batteries`,
Description: `Stationary missile batteries for starbase defense.`,
},
480: &MarketGroup{
ID: 480,
ParentID: 1285,
Name: `Weapon Batteries`,
Description: `Stationary weapon batteries for starbase defense.`,
},
481: &MarketGroup{
ID: 481,
ParentID: 480,
Name: `Electronic Warfare Batteries`,
Description: `Batteries providing electronic countermeasures for starbase defense.`,
},
482: &MarketGroup{
ID: 482,
ParentID: 1285,
Name: `Reprocessing Arrays`,
Description: `Anchorable reprocessing structures.`,
},
483: &MarketGroup{
ID: 483,
ParentID: 1285,
Name: `Silos`,
Description: `For storing or providing resources, or regulating material flow in industrial processes. `,
},
484: &MarketGroup{
ID: 484,
ParentID: 1285,
Name: `Ship Maintenance Arrays`,
Description: `Mobile hangar and fitting structures.`,
},
485: &MarketGroup{
ID: 485,
ParentID: 1285,
Name: `Shield Hardening Arrays`,
Description: `Anchorable structures for the boosting of a control tower's shield resistances.`,
},
488: &MarketGroup{
ID: 488,
ParentID: 1285,
Name: `Moon Harvesting Arrays`,
Description: `Deployable arrays designed to gather minerals from moons.`,
},
490: &MarketGroup{
ID: 490,
ParentID: 1285,
Name: `Reactors`,
Description: `Deployable structures where chemical processes take place.`,
},
491: &MarketGroup{
ID: 491,
ParentID: 19,
Name: `Narcotics`,
Description: `Illegal substances that alter brain chemistry, for good or ill. Transport with caution`,
},
492: &MarketGroup{
ID: 492,
ParentID: 19,
Name: `Consumer Products`,
Description: `Products in high demand with the materialistic public. Can be bought and sold for profit`,
},
494: &MarketGroup{
ID: 494,
ParentID: 1384,
Name: `Mining Barges`,
Description: `Lumbering, voracious hulks, expressly created for ore accumulation.`,
},
496: &MarketGroup{
ID: 496,
ParentID: 204,
Name: `Mining Barges`,
Description: `Blueprints of mining barge-class vessels.`,
},
497: &MarketGroup{
ID: 497,
ParentID: 496,
Name: `ORE`,
Description: `Blueprints of ORE mining barge designs.`,
},
499: &MarketGroup{
ID: 499,
ParentID: 1034,
Name: `Advanced Moon Materials`,
Description: `Materials used for Tech II vessel and equipment production.`,
},
500: &MarketGroup{
ID: 500,
ParentID: 1034,
Name: `Processed Moon Materials`,
Description: `Compounds used in complex reactions.`,
},
501: &MarketGroup{
ID: 501,
ParentID: 1034,
Name: `Raw Moon Materials`,
Description: `Pure elements and other raw materials from nature.`,
},
502: &MarketGroup{
ID: 502,
ParentID: 845,
Name: `Extra Large`,
Description: `Fired by dreadnaught-sized guns and stationary defense systems.`,
},
503: &MarketGroup{
ID: 503,
ParentID: 851,
Name: `Extra Large`,
Description: `For use with dreadnought-sized lasers and stationary defense systems.`,
},
504: &MarketGroup{
ID: 504,
ParentID: 848,
Name: `Extra Large`,
Description: `Fired by dreadnought-sized guns and stationary defense systems.`,
},
505: &MarketGroup{
ID: 505,
ParentID: 114,
Name: `XL Torpedoes`,
Description: `The most powerful missiles to be found anywhere.`,
},
506: &MarketGroup{
ID: 506,
ParentID: 1285,
Name: `Corporate Hangar Array`,
Description: `Communal hangar structures with divisional compartments.`,
},
512: &MarketGroup{
ID: 512,
ParentID: 54,
Name: `Arkonor`,
Description: `Sub-types of arkonor ore.`,
},
514: &MarketGroup{
ID: 514,
ParentID: 54,
Name: `Bistot`,
Description: `Sub-types of bistot ore.`,
},
515: &MarketGroup{
ID: 515,
ParentID: 54,
Name: `Pyroxeres`,
Description: `Sub-types of Pyroxeres ore.`,
},
516: &MarketGroup{
ID: 516,
ParentID: 54,
Name: `Plagioclase`,
Description: `Sub-types of plagioclase ore.`,
},
517: &MarketGroup{
ID: 517,
ParentID: 54,
Name: `Spodumain`,
Description: `Sub-types of spodumain ore.`,
},
518: &MarketGroup{
ID: 518,
ParentID: 54,
Name: `Veldspar`,
Description: `Sub-types of veldspar ore.
`,
},
519: &MarketGroup{
ID: 519,
ParentID: 54,
Name: `Scordite`,
Description: `Sub-types of scordite ore.
`,
},
521: &MarketGroup{
ID: 521,
ParentID: 54,
Name: `Crokite`,
Description: `Sub-types of crokite ore.`,
},
522: &MarketGroup{
ID: 522,
ParentID: 54,
Name: `<NAME>`,
Description: `Sub-types of dark ochre ore.`,
},
523: &MarketGroup{
ID: 523,
ParentID: 54,
Name: `Kernite`,
Description: `Sub-types of kernite ore.`,
},
525: &MarketGroup{
ID: 525,
ParentID: 54,
Name: `Gneiss`,
Description: `Sub-types of gneiss ore.`,
},
526: &MarketGroup{
ID: 526,
ParentID: 54,
Name: `Omber`,
Description: `Sub-types of omber ore.`,
},
527: &MarketGroup{
ID: 527,
ParentID: 54,
Name: `Hedbergite`,
Description: `Sub-types of hedbergite ore.`,
},
528: &MarketGroup{
ID: 528,
ParentID: 54,
Name: `Hemorphite`,
Description: `Sub-types of hemorphite ore.`,
},
529: &MarketGroup{
ID: 529,
ParentID: 54,
Name: `Jaspet`,
Description: `Sub-types of jaspet ore.`,
},
530: &MarketGroup{
ID: 530,
ParentID: 54,
Name: `Mercoxit`,
Description: `An extremely rare and valuable ore, yielding a unique mineral.`,
},
531: &MarketGroup{
ID: 531,
ParentID: 27,
Name: `Skill Hardwiring`,
Description: `Implants designed to increase a pilot's aptitude in a specialized area.`,
},
532: &MarketGroup{
ID: 532,
ParentID: 27,
Name: `Attribute Enhancers`,
Description: `Implants designed to augment specific aspects of the pilot's mental make-up.`,
},
533: &MarketGroup{
ID: 533,
ParentID: 475,
Name: `Materials`,
Description: `Various materials used in manufacturing`,
},
535: &MarketGroup{
ID: 535,
ParentID: 14,
Name: `Armor Hardeners`,
Description: `Electronically enhanced armor plating augmentation.`,
},
537: &MarketGroup{
ID: 537,
ParentID: 14,
Name: `Remote Armor Repairers`,
Description: `Patch up your comrades at a distance.`,
},
538: &MarketGroup{
ID: 538,
ParentID: 14,
Name: `Hull Repairers`,
Description: `For on-the-fly structural restoration.`,
},
540: &MarketGroup{
ID: 540,
ParentID: 14,
Name: `Armor Resistance Coatings`,
Description: `Armor Resistance Coatings`,
},
541: &MarketGroup{
ID: 541,
ParentID: 14,
Name: `Energized Armor Resistance Membranes`,
Description: `Energized Armor Resistance Membranes`,
},
542: &MarketGroup{
ID: 542,
ParentID: 52,
Name: `Afterburners`,
Description: `Provide additional thrust to the ship's engines.`,
},
550: &MarketGroup{
ID: 550,
ParentID: 554,
Name: `Shield Resistance Amplifiers`,
Description: `Strengthen various aspects of the shields' subatomic containment fields.`,
},
551: &MarketGroup{
ID: 551,
ParentID: 554,
Name: `Shield Extenders`,
Description: `Boost the maximum strength of the ship's shield system.`,
},
552: &MarketGroup{
ID: 552,
ParentID: 554,
Name: `Shield Boosters`,
Description: `Convert energy from the ship's core into additional shield power.`,
},
553: &MarketGroup{
ID: 553,
ParentID: 554,
Name: `Shield Hardeners`,
Description: `Computerized systems designed to give a temporary increase in shield resistance.`,
},
554: &MarketGroup{
ID: 554,
ParentID: 9,
Name: `Shield`,
Description: `Modules that strengthen the energy barrier surrounding spaceships against external dangers`,
},
555: &MarketGroup{
ID: 555,
ParentID: 86,
Name: `Railguns`,
Description: `Railguns use magnetic rails to fire solid chunks of matter at hypersonic speed.`,
},
556: &MarketGroup{
ID: 556,
ParentID: 86,
Name: `Blasters`,
Description: `Blasters fire magnetically contained balls of subatomic particles.`,
},
557: &MarketGroup{
ID: 557,
ParentID: 88,
Name: `Beam Lasers`,
Description: `Beam lasers fire a concentrated, persistent stream of energy at their target.`,
},
558: &MarketGroup{
ID: 558,
ParentID: 88,
Name: `Pulse Lasers`,
Description: `Pulse lasers emit an oscillating energy beam at their target.`,
},
559: &MarketGroup{
ID: 559,
ParentID: 87,
Name: `Autocannons`,
Description: `Autocannons are effective at close ranges and possess great tracking ability.`,
},
560: &MarketGroup{
ID: 560,
ParentID: 87,
Name: `<NAME>`,
Description: `Slow-firing long-range cannons, able to deal crippling blows with great economy.`,
},
561: &MarketGroup{
ID: 561,
ParentID: 556,
Name: `Small`,
Description: `Fast-tracking, low-damage blasters, for use on frigates.`,
},
562: &MarketGroup{
ID: 562,
ParentID: 556,
Name: `Medium`,
Description: `Medium-sized blasters, for use on cruisers.`,
},
563: &MarketGroup{
ID: 563,
ParentID: 556,
Name: `Large`,
Description: `Slow-tracking, high-damage blasters, for use on battleships.`,
},
564: &MarketGroup{
ID: 564,
ParentID: 555,
Name: `Small`,
Description: `Fast-tracking, low-damage railguns, for use on frigates.`,
},
565: &MarketGroup{
ID: 565,
ParentID: 555,
Name: `Medium`,
Description: `Medium-sized railguns, for use on cruisers.`,
},
566: &MarketGroup{
ID: 566,
ParentID: 555,
Name: `Large`,
Description: `Slow-tracking, high-damage railguns, for use on battleships.`,
},
567: &MarketGroup{
ID: 567,
ParentID: 557,
Name: `Small`,
Description: `Small beam lasers, suitable for frigate use.`,
},
568: &MarketGroup{
ID: 568,
ParentID: 557,
Name: `Medium`,
Description: `Medium beam lasers, suitable for cruiser use.`,
},
569: &MarketGroup{
ID: 569,
ParentID: 557,
Name: `Large`,
Description: `Large beam lasers, suitable for battleship use.`,
},
570: &MarketGroup{
ID: 570,
ParentID: 558,
Name: `Small`,
Description: `Small pulse lasers, suitable for frigate use.`,
},
572: &MarketGroup{
ID: 572,
ParentID: 558,
Name: `Medium`,
Description: `Medium pulse lasers, suitable for cruiser use.`,
},
573: &MarketGroup{
ID: 573,
ParentID: 558,
Name: `Large`,
Description: `Large pulse lasers, suitable for battleship use.`,
},
574: &MarketGroup{
ID: 574,
ParentID: 559,
Name: `Small`,
Description: `Frigate-sized autocannons, effective at very close ranges.`,
},
575: &MarketGroup{
ID: 575,
ParentID: 559,
Name: `Medium`,
Description: `Cruiser-sized weapons. Deliver a rapid-fire barrage of shells at close ranges.`,
},
576: &MarketGroup{
ID: 576,
ParentID: 559,
Name: `Large`,
Description: `Battleship-sized autocannons. Deal massive damage, up close and personal.`,
},
577: &MarketGroup{
ID: 577,
ParentID: 560,
Name: `Small`,
Description: `Small artillery cannons, suitable for frigate use.`,
},
578: &MarketGroup{
ID: 578,
ParentID: 560,
Name: `Medium`,
Description: `Medium artillery cannons, suitable for cruiser use.`,
},
579: &MarketGroup{
ID: 579,
ParentID: 560,
Name: `Large`,
Description: `Large artillery cannons, suitable for battleship use.`,
},
580: &MarketGroup{
ID: 580,
ParentID: 114,
Name: `Cruise Missiles`,
Description: `Lifting-wing, jet-propelled guided missiles with heavy payloads.`,
},
581: &MarketGroup{
ID: 581,
ParentID: 114,
Name: `Heavy Missiles`,
Description: `Jet-propelled projectiles for heavy launchers.`,
},
582: &MarketGroup{
ID: 582,
ParentID: 204,
Name: `Destroyers`,
Description: `Blueprints of destroyer-class vessels.`,
},
583: &MarketGroup{
ID: 583,
ParentID: 582,
Name: `Amarr`,
Description: `Blueprints of Amarr destroyer designs.`,
},
584: &MarketGroup{
ID: 584,
ParentID: 582,
Name: `Caldari`,
Description: `Blueprints of Caldari destroyer designs.`,
},
585: &MarketGroup{
ID: 585,
ParentID: 582,
Name: `Gallente`,
Description: `Blueprints of Gallente destroyer designs.`,
},
586: &MarketGroup{
ID: 586,
ParentID: 582,
Name: `Minmatar`,
Description: `Blueprints of Minmatar destroyer designs.`,
},
588: &MarketGroup{
ID: 588,
ParentID: 204,
Name: `Battlecruisers`,
Description: `Blueprints of battlecruiser-class vessels.`,
},
589: &MarketGroup{
ID: 589,
ParentID: 588,
Name: `Amarr`,
Description: `Blueprints of Amarr battlecruiser designs.`,
},
590: &MarketGroup{
ID: 590,
ParentID: 588,
Name: `Caldari`,
Description: `Blueprints of Caldari battlecruiser designs.`,
},
591: &MarketGroup{
ID: 591,
ParentID: 588,
Name: `Gallente`,
Description: `Blueprints of Gallente battlecruiser designs.`,
},
592: &MarketGroup{
ID: 592,
ParentID: 588,
Name: `Minmatar`,
Description: `Blueprints of Minmatar battlecruiser designs.`,
},
593: &MarketGroup{
ID: 593,
ParentID: 11,
Name: `Mining Crystals`,
Description: `Frequency crystals custom-cut for different ore types`,
},
594: &MarketGroup{
ID: 594,
ParentID: 480,
Name: `Projectile Batteries`,
Description: `Projectile turret batteries.`,
},
595: &MarketGroup{
ID: 595,
ParentID: 480,
Name: `Hybrid Batteries`,
Description: `Hybrid turret batteries.`,
},
596: &MarketGroup{
ID: 596,
ParentID: 480,
Name: `Laser Batteries`,
Description: `Laser turret batteries.`,
},
597: &MarketGroup{
ID: 597,
ParentID: 299,
Name: `Extra Large`,
Description: `Blueprints of capital-sized projectile ammunition.`,
},
598: &MarketGroup{
ID: 598,
ParentID: 300,
Name: `Extra Large`,
Description: `Blueprints of capital-sized hybrid ammunition.`,
},
599: &MarketGroup{
ID: 599,
ParentID: 301,
Name: `Extra Large`,
Description: `Blueprints of capital-sized frequency crystals.`,
},
600: &MarketGroup{
ID: 600,
ParentID: 128,
Name: `Capital`,
Description: `Capital ship-sized shield transport units.`,
},
601: &MarketGroup{
ID: 601,
ParentID: 128,
Name: `Large`,
Description: `Battleship-sized shield transport units.`,
},
602: &MarketGroup{
ID: 602,
ParentID: 128,
Name: `Medium`,
Description: `Cruiser-sized shield transport units.`,
},
603: &MarketGroup{
ID: 603,
ParentID: 128,
Name: `Small`,
Description: `Frigate-sized shield transport units.`,
},
604: &MarketGroup{
ID: 604,
ParentID: 128,
Name: `Micro`,
Description: `The smallest of the shield transporters, for when CPU and powergrid are scarce.`,
},
605: &MarketGroup{
ID: 605,
ParentID: 551,
Name: `Small`,
Description: `Frigate-class shield extension systems.`,
},
606: &MarketGroup{
ID: 606,
ParentID: 551,
Name: `Medium`,
Description: `Cruiser-class shield extension systems.`,
},
608: &MarketGroup{
ID: 608,
ParentID: 551,
Name: `Large`,
Description: `Battleship-class shield extension systems.`,
},
609: &MarketGroup{
ID: 609,
ParentID: 552,
Name: `Small`,
Description: `Frigate-class shield boosters.`,
},
610: &MarketGroup{
ID: 610,
ParentID: 552,
Name: `Medium`,
Description: `Cruiser-class shield boosters.`,
},
611: &MarketGroup{
ID: 611,
ParentID: 552,
Name: `Large`,
Description: `Battleship-class shield boosters.`,
},
612: &MarketGroup{
ID: 612,
ParentID: 552,
Name: `Extra Large`,
Description: `Supercharged shield boosting units for advanced battleship pilots.`,
},
613: &MarketGroup{
ID: 613,
ParentID: 552,
Name: `Boost Amplifiers`,
Description: `Focus and amplify the efficiency of shield boosting modules.`,
},
614: &MarketGroup{
ID: 614,
ParentID: 19,
Name: `Criminal Evidence`,
Description: `Proof positive of a bounty hunter's job well done`,
},
615: &MarketGroup{
ID: 615,
ParentID: 14,
Name: `Damage Controls`,
Description: `Containment field emitters and redundancy systems for prevention of critical system damage. `,
},
616: &MarketGroup{
ID: 616,
ParentID: 19,
Name: `Insignias`,
Description: `DNA-imprinted personal insignias, used by empire navies. Transport with caution`,
},
617: &MarketGroup{
ID: 617,
ParentID: 314,
Name: `XL Torpedoes`,
Description: `Blueprints of XL torpedoes.`,
},
618: &MarketGroup{
ID: 618,
ParentID: 532,
Name: `Implant Slot 01`,
Description: `Implants intended for Subcervical Processing Slot 1.`,
},
619: &MarketGroup{
ID: 619,
ParentID: 532,
Name: `Implant Slot 02`,
Description: `Implants intended for Subcervical Processing Slot 2.`,
},
620: &MarketGroup{
ID: 620,
ParentID: 532,
Name: `Implant Slot 03`,
Description: `Implants intended for Subcervical Processing Slot 3.`,
},
621: &MarketGroup{
ID: 621,
ParentID: 532,
Name: `Implant Slot 04`,
Description: `Implants intended for Subcervical Processing Slot 4.`,
},
622: &MarketGroup{
ID: 622,
ParentID: 532,
Name: `Implant Slot 05`,
Description: `Implants intended for Subcervical Processing Slot 5.`,
},
629: &MarketGroup{
ID: 629,
ParentID: 1385,
Name: `Transport Ships`,
Description: `Advanced technologies for the transportation of valuable materials.`,
},
630: &MarketGroup{
ID: 630,
ParentID: 629,
Name: `Amarr`,
Description: `Amarr transport ship designs.`,
},
631: &MarketGroup{
ID: 631,
ParentID: 629,
Name: `Caldari`,
Description: `Caldari transport ship designs.`,
},
632: &MarketGroup{
ID: 632,
ParentID: 629,
Name: `Gallente`,
Description: `Gallente transport ship designs.`,
},
633: &MarketGroup{
ID: 633,
ParentID: 629,
Name: `Minmatar`,
Description: `Minmatar transport ship designs.`,
},
634: &MarketGroup{
ID: 634,
ParentID: 204,
Name: `Transport Ships`,
Description: `Blueprints of transport-class vessels.`,
},
635: &MarketGroup{
ID: 635,
ParentID: 634,
Name: `Amarr`,
Description: `Blueprints of Amarr transport designs.`,
},
636: &MarketGroup{
ID: 636,
ParentID: 634,
Name: `Caldari`,
Description: `Blueprints of Caldari transport designs.`,
},
637: &MarketGroup{
ID: 637,
ParentID: 634,
Name: `Gallente`,
Description: `Blueprints of Gallente transport designs.`,
},
638: &MarketGroup{
ID: 638,
ParentID: 634,
Name: `Minmatar`,
Description: `Blueprints of Minmatar transport designs.`,
},
639: &MarketGroup{
ID: 639,
ParentID: 140,
Name: `Rocket Launchers`,
Description: `For the launching of swift-moving rocket projectiles.`,
},
640: &MarketGroup{
ID: 640,
ParentID: 140,
Name: `Light Missile Launchers`,
Description: `For the launching of light missiles.`,
},
641: &MarketGroup{
ID: 641,
ParentID: 140,
Name: `Rapid Light Missile Launchers`,
Description: `Fast-launching, cruiser-sized standard missile bays.`,
},
642: &MarketGroup{
ID: 642,
ParentID: 140,
Name: `Heavy Launchers`,
Description: `For the launching of heavy missiles.`,
},
643: &MarketGroup{
ID: 643,
ParentID: 140,
Name: `Cruise Launchers`,
Description: `For the launching of cruise missiles. Can also fit torpedoes.`,
},
644: &MarketGroup{
ID: 644,
ParentID: 140,
Name: `Torpedo Launchers`,
Description: `For the launching of torpedoes and cruise missiles.`,
},
645: &MarketGroup{
ID: 645,
ParentID: 143,
Name: `Ballistic Control Systems`,
Description: `Systems designed to improve missile launcher efficiency.`,
},
646: &MarketGroup{
ID: 646,
ParentID: 143,
Name: `Gyrostabilizers`,
Description: `Systems designed to improve projectile weapon efficiency.`,
},
647: &MarketGroup{
ID: 647,
ParentID: 143,
Name: `Heat Sinks`,
Description: `Systems designed to improve laser weapon efficiency.`,
},
648: &MarketGroup{
ID: 648,
ParentID: 143,
Name: `Magnetic Field Stabilizers`,
Description: `Systems designed to improve hybrid weapon efficiency.`,
},
655: &MarketGroup{
ID: 655,
ParentID: 9,
Name: `Engineering Equipment`,
Description: `Modules that affect a spaceship's capacitor and modify fitting capabilities`,
},
656: &MarketGroup{
ID: 656,
ParentID: 9,
Name: `Electronics and Sensor Upgrades`,
Description: `Modules that strengthen the sensory capabilities of a spaceship`,
},
657: &MarketGroup{
ID: 657,
ParentID: 9,
Name: `Electronic Warfare`,
Description: `Modules that can disrupt capabilities of enemy targets and defend against same`,
},
658: &MarketGroup{
ID: 658,
ParentID: 655,
Name: `Power Diagnostic Systems`,
Description: `Monitor and optimize the power grid and capacitor systems.`,
},
659: &MarketGroup{
ID: 659,
ParentID: 655,
Name: `Reactor Control Units`,
Description: `Run power core optimization subroutines for maximum efficiency.`,
},
660: &MarketGroup{
ID: 660,
ParentID: 655,
Name: `Auxiliary Power Controls`,
Description: `Supplementary systems providing a flat boost to powercore energy.`,
},
661: &MarketGroup{
ID: 661,
ParentID: 655,
Name: `Energy Neutralizers`,
Description: `Neutralize a portion of the target ship's capacitor energy.`,
},
662: &MarketGroup{
ID: 662,
ParentID: 655,
Name: `Energy Nosferatu`,
Description: `Steal a portion of the target ship's capacitor energy.`,
},
663: &MarketGroup{
ID: 663,
ParentID: 655,
Name: `Remote Capacitor Transmitters`,
Description: `Systems allowing transfer of capacitor energy to another ship.`,
},
664: &MarketGroup{
ID: 664,
ParentID: 655,
Name: `Capacitor Batteries`,
Description: `Allow the capacitor to store more energy.`,
},
665: &MarketGroup{
ID: 665,
ParentID: 655,
Name: `Capacitor Rechargers`,
Description: `Provide a boost to the capacitor's recharge rate.`,
},
666: &MarketGroup{
ID: 666,
ParentID: 655,
Name: `Capacitor Flux Coils`,
Description: `Increase capacitor recharge rate while lowering maximum capacitor capacity.`,
},
667: &MarketGroup{
ID: 667,
ParentID: 655,
Name: `Capacitor Power Relays`,
Description: `Increase capacitor recharge rate at the expense of shield boosting capability.`,
},
668: &MarketGroup{
ID: 668,
ParentID: 655,
Name: `Capacitor Boosters`,
Description: `Provide quick injections of power into the capacitor.`,
},
669: &MarketGroup{
ID: 669,
ParentID: 656,
Name: `Signal Amplifiers`,
Description: `Augment a ship's targeting range and target acquisition time.`,
},
670: &MarketGroup{
ID: 670,
ParentID: 656,
Name: `Automated Targeting Systems`,
Description: `For automated targeting of hostiles.`,
},
671: &MarketGroup{
ID: 671,
ParentID: 656,
Name: `Sensor Boosters`,
Description: `Provide a boost to a ship's targeting range and target acquisition time.`,
},
672: &MarketGroup{
ID: 672,
ParentID: 656,
Name: `Passive Targeting Systems`,
Description: `Allow for surreptitious targeting of hostiles.`,
},
673: &MarketGroup{
ID: 673,
ParentID: 656,
Name: `Remote Sensor Boosters`,
Description: `Augment target ship's targeting range and target acquisition time.`,
},
675: &MarketGroup{
ID: 675,
ParentID: 656,
Name: `Cloaking Devices`,
Description: `Use advanced spatial distortion technology to render ships invisible.`,
},
676: &MarketGroup{
ID: 676,
ParentID: 656,
Name: `CPU Upgrades`,
Description: `Provide an increase in the ship's CPU output.`,
},
677: &MarketGroup{
ID: 677,
ParentID: 657,
Name: `Electronic Counter Measures`,
Description: `Designed to disrupt and lock down enemy ships' sensor arrays.`,
},
678: &MarketGroup{
ID: 678,
ParentID: 657,
Name: `ECM Bursts`,
Description: `Targeting jammers emitting multi-frequency disruptive signals.`,
},
679: &MarketGroup{
ID: 679,
ParentID: 657,
Name: `Remote Sensor Dampeners`,
Description: `Decrease the targeting speed and range of target ship.`,
},
680: &MarketGroup{
ID: 680,
ParentID: 657,
Name: `Weapon Disruptors`,
Description: `Weapon Disruptors`,
},
681: &MarketGroup{
ID: 681,
ParentID: 657,
Name: `Sensor Backup Arrays`,
Description: `Beef up your sensor strength to resist target jamming.`,
},
683: &MarketGroup{
ID: 683,
ParentID: 657,
Name: `Stasis Webifiers`,
Description: `Slow them down, then smack them down.`,
},
685: &MarketGroup{
ID: 685,
ParentID: 657,
Name: `ECCM`,
Description: `Auxiliary systems giving a boost to sensor strength for a short time.`,
},
686: &MarketGroup{
ID: 686,
ParentID: 657,
Name: `Projected ECCM`,
Description: `Boost a target ship's sensor strength for a short time.`,
},
687: &MarketGroup{
ID: 687,
ParentID: 554,
Name: `Shield Flux Coils`,
Description: `Increase shield recharge rate while lowering maximum shield capacity.`,
},
688: &MarketGroup{
ID: 688,
ParentID: 554,
Name: `Shield Power Relays`,
Description: `Divert power from ship's core to shields, increasing shield recharge rate.`,
},
689: &MarketGroup{
ID: 689,
ParentID: 661,
Name: `Small`,
Description: `Frigate-sized energy destabilizers.`,
},
690: &MarketGroup{
ID: 690,
ParentID: 661,
Name: `Medium`,
Description: `Cruiser-sized energy destabilizers.`,
},
691: &MarketGroup{
ID: 691,
ParentID: 661,
Name: `Heavy`,
Description: `Battleship-sized energy destabilizers.`,
},
692: &MarketGroup{
ID: 692,
ParentID: 662,
Name: `Small`,
Description: `Frigate-sized energy vampires.`,
},
693: &MarketGroup{
ID: 693,
ParentID: 662,
Name: `Medium`,
Description: `Cruiser-sized energy vampires.`,
},
694: &MarketGroup{
ID: 694,
ParentID: 662,
Name: `Heavy`,
Description: `Battleship-sized energy vampires.`,
},
695: &MarketGroup{
ID: 695,
ParentID: 663,
Name: `Small`,
Description: `Frigate-sized energy transfer arrays.`,
},
696: &MarketGroup{
ID: 696,
ParentID: 663,
Name: `Medium`,
Description: `Cruiser-sized energy transfer arrays.`,
},
697: &MarketGroup{
ID: 697,
ParentID: 663,
Name: `Large`,
Description: `Battleship-sized energy transfer arrays.`,
},
698: &MarketGroup{
ID: 698,
ParentID: 668,
Name: `Micro`,
Description: `Small capacitor boosters, for when powergrid and CPU are scarce.`,
},
699: &MarketGroup{
ID: 699,
ParentID: 668,
Name: `Small`,
Description: `Frigate-sized capacitor boosters.`,
},
700: &MarketGroup{
ID: 700,
ParentID: 668,
Name: `Medium`,
Description: `Cruiser-sized capacitor boosters.`,
},
701: &MarketGroup{
ID: 701,
ParentID: 668,
Name: `Heavy`,
Description: `Battleship-sized capacitor boosters.`,
},
702: &MarketGroup{
ID: 702,
ParentID: 664,
Name: `Micro`,
Description: `Give a small increase in capacitor energy.`,
},
703: &MarketGroup{
ID: 703,
ParentID: 664,
Name: `Small`,
Description: `Frigate-sized capacitor batteries.`,
},
704: &MarketGroup{
ID: 704,
ParentID: 664,
Name: `Medium`,
Description: `Cruiser-sized capacitor batteries.`,
},
705: &MarketGroup{
ID: 705,
ParentID: 664,
Name: `Large`,
Description: `Battleship-sized capacitor batteries.`,
},
706: &MarketGroup{
ID: 706,
ParentID: 143,
Name: `Tracking Computers`,
Description: `Computerized systems designed to improve turret tracking.`,
},
707: &MarketGroup{
ID: 707,
ParentID: 143,
Name: `Tracking Enhancers`,
Description: `Hardwired systems designed to improve turret tracking.`,
},
708: &MarketGroup{
ID: 708,
ParentID: 143,
Name: `Remote Tracking Computers`,
Description: `Designed to give a boost to another ship's tracking speed.`,
},
711: &MarketGroup{
ID: 711,
ParentID: 1708,
Name: `Cargo Scanners`,
Description: `For remote scanning of a target ship's cargo hold.`,
},
712: &MarketGroup{
ID: 712,
ParentID: 1708,
Name: `Scan Probe Launchers`,
Description: `Launcher bays designed to fire scan probes for stellar triangulation.`,
},
713: &MarketGroup{
ID: 713,
ParentID: 1708,
Name: `Ship Scanners`,
Description: `Provide tactical analysis of a target ship's capabilities.`,
},
714: &MarketGroup{
ID: 714,
ParentID: 1708,
Name: `Survey Scanners`,
Description: `For analysis of the composition of asteroids and other celestial objects.`,
},
715: &MarketGroup{
ID: 715,
ParentID: 677,
Name: `Magnetometric Jammers`,
Description: `Targeting jammers specialized for use against magnetometric sensors.`,
},
716: &MarketGroup{
ID: 716,
ParentID: 677,
Name: `Ladar Jammers`,
Description: `Targeting jammers specialized for use against Ladar sensors.`,
},
717: &MarketGroup{
ID: 717,
ParentID: 677,
Name: `Gravimetric Jammers`,
Description: `Targeting jammers specialized for use against gravimetric sensors.`,
},
718: &MarketGroup{
ID: 718,
ParentID: 677,
Name: `Radar Jammers`,
Description: `Targeting jammers specialized for use against radar sensors.`,
},
719: &MarketGroup{
ID: 719,
ParentID: 677,
Name: `Multi Spectrum Jammers`,
Description: `Targeting jammers that induce multispectral sensor disruption.`,
},
720: &MarketGroup{
ID: 720,
ParentID: 681,
Name: `Gravimetric Backup Arrays`,
Description: `Reduces the sensors' vulnerability to gravimetric jamming.`,
},
721: &MarketGroup{
ID: 721,
ParentID: 681,
Name: `Ladar Backup Arrays`,
Description: `Reduces the sensors' vulnerability to Ladar jamming.`,
},
722: &MarketGroup{
ID: 722,
ParentID: 681,
Name: `Radar Backup Arrays`,
Description: `Reduces the sensors' vulnerability to Radar jamming.`,
},
723: &MarketGroup{
ID: 723,
ParentID: 681,
Name: `Magnetometric Backup Arrays`,
Description: `Reduces the sensors' vulnerability to magnetometric jamming.`,
},
724: &MarketGroup{
ID: 724,
ParentID: 681,
Name: `Multi-Frequency Backup Arrays`,
Description: `Reduces the sensors' vulnerability to multiple forms of jamming.`,
},
725: &MarketGroup{
ID: 725,
ParentID: 685,
Name: `Gravimetric Sensors`,
Description: `Boost a ship's gravimetric sensor strength for a short duration.`,
},
726: &MarketGroup{
ID: 726,
ParentID: 685,
Name: `Ladar Sensors`,
Description: `Boost a ship's Ladar sensor strength for a short duration.`,
},
727: &MarketGroup{
ID: 727,
ParentID: 685,
Name: `Magnetometric Sensors`,
Description: `Boost a ship's magnetometric sensor strength for a short duration.`,
},
728: &MarketGroup{
ID: 728,
ParentID: 685,
Name: `Multi-Spectrum Sensors`,
Description: `Boost a ship's sensors across the spectrum for a short duration.`,
},
729: &MarketGroup{
ID: 729,
ParentID: 685,
Name: `Radar Sensors`,
Description: `Boost a ship's radar sensor strength for a short duration.`,
},
730: &MarketGroup{
ID: 730,
ParentID: 616,
Name: `Amarr Navy`,
Description: `Officially sanctioned Imperial Navy insignias.`,
},
731: &MarketGroup{
ID: 731,
ParentID: 616,
Name: `Ammatar Navy`,
Description: `Officially sanctioned Ammatar Navy insignias.`,
},
732: &MarketGroup{
ID: 732,
ParentID: 616,
Name: `Caldari Navy`,
Description: `Officially sanctioned Caldari Navy insignias.`,
},
733: &MarketGroup{
ID: 733,
ParentID: 616,
Name: `CONCORD`,
Description: `Officially sanctioned CONCORD insignias.`,
},
734: &MarketGroup{
ID: 734,
ParentID: 616,
Name: `Gallente Navy`,
Description: `Officially sanctioned Gallente Navy insignias.`,
},
735: &MarketGroup{
ID: 735,
ParentID: 616,
Name: `Khanid Navy`,
Description: `Officially sanctioned Khanid Navy insignias.`,
},
736: &MarketGroup{
ID: 736,
ParentID: 616,
Name: `Minmatar Fleet`,
Description: `Officially sanctioned Minmatar Fleet insignias.`,
},
737: &MarketGroup{
ID: 737,
ParentID: 616,
Name: `Individuals`,
Description: `Individual identification tags.`,
},
738: &MarketGroup{
ID: 738,
ParentID: 19,
Name: `Nexus Chips`,
Description: `Stores artificial intelligence systems. Used when acquiring special ships from Loyalty Point stores`,
},
739: &MarketGroup{
ID: 739,
ParentID: 19,
Name: `Criminal Dog Tags`,
Description: `Criminal organizations' identification tags. Can be traded to the empires`,
},
740: &MarketGroup{
ID: 740,
ParentID: 739,
Name: `Angels`,
Description: `Angel Cartel identification tags.`,
},
741: &MarketGroup{
ID: 741,
ParentID: 739,
Name: `Blood Raiders`,
Description: `Blood Raider identification tags.`,
},
742: &MarketGroup{
ID: 742,
ParentID: 739,
Name: `Dark Blood`,
Description: `Dark Blood identification tags.`,
},
743: &MarketGroup{
ID: 743,
ParentID: 739,
Name: `Domination`,
Description: `Angel Dominations identification tags.`,
},
744: &MarketGroup{
ID: 744,
ParentID: 739,
Name: `Dread Guristas`,
Description: `Dread Guristas identification tags.`,
},
745: &MarketGroup{
ID: 745,
ParentID: 739,
Name: `Guristas`,
Description: `Guristas identification tags.`,
},
746: &MarketGroup{
ID: 746,
ParentID: 739,
Name: `Sansha`,
Description: `Sansha's Nation identification tags.`,
},
747: &MarketGroup{
ID: 747,
ParentID: 739,
Name: `Serpentis`,
Description: `Serpentis identification tags.`,
},
748: &MarketGroup{
ID: 748,
ParentID: 739,
Name: `<NAME>`,
Description: `Shadow Serpentis identification tags.`,
},
749: &MarketGroup{
ID: 749,
ParentID: 739,
Name: `<NAME>`,
Description: `True Sansha identification tags.`,
},
750: &MarketGroup{
ID: 750,
ParentID: 739,
Name: `Commanders`,
Description: `The identification tags of CONCORD's most wanted.`,
},
751: &MarketGroup{
ID: 751,
ParentID: 614,
Name: `Overseer's Personal Effects`,
Description: `Proof that there's one less evil bastard in the world.`,
},
752: &MarketGroup{
ID: 752,
ParentID: 614,
Name: `Criminal DNA Patterns`,
Description: `Physical proof of a dastardly villain's demise.`,
},
753: &MarketGroup{
ID: 753,
ParentID: 211,
Name: `Mining Crystals`,
Description: `Blueprints of mining crystals.`,
},
754: &MarketGroup{
ID: 754,
ParentID: 19,
Name: `Political Paraphernalia`,
Description: `Political propaganda and those that engage in it`,
},
757: &MarketGroup{
ID: 757,
ParentID: 657,
Name: `<NAME>`,
Description: `Project an electronic signal towards a target, increasing its signature radius.`,
},
761: &MarketGroup{
ID: 761,
ParentID: 1381,
Name: `Dreadnoughts`,
Description: `Capital ships created for extended sieges of stationary installations.`,
},
762: &MarketGroup{
ID: 762,
ParentID: 761,
Name: `Amarr`,
Description: `Amarr dreadnought designs.`,
},
763: &MarketGroup{
ID: 763,
ParentID: 761,
Name: `Caldari`,
Description: `Caldari dreadnought designs.`,
},
764: &MarketGroup{
ID: 764,
ParentID: 761,
Name: `Gallente`,
Description: `Gallente dreadnought designs.`,
},
765: &MarketGroup{
ID: 765,
ParentID: 761,
Name: `Minmatar`,
Description: `Minmatar dreadnought designs.`,
},
766: &MarketGroup{
ID: 766,
ParentID: 1381,
Name: `Freighters`,
Description: `Capital ships, able to transport a world and a half and then some.`,
},
767: &MarketGroup{
ID: 767,
ParentID: 766,
Name: `Amarr`,
Description: `Amarr freighter designs.`,
},
768: &MarketGroup{
ID: 768,
ParentID: 766,
Name: `Caldari`,
Description: `Caldari freighter designs.`,
},
769: &MarketGroup{
ID: 769,
ParentID: 766,
Name: `Gallente`,
Description: `Gallente freighter designs.`,
},
770: &MarketGroup{
ID: 770,
ParentID: 766,
Name: `Minmatar`,
Description: `Minmatar freighter designs.`,
},
771: &MarketGroup{
ID: 771,
ParentID: 556,
Name: `Extra Large`,
Description: `Capital ship blasters, for use on dreadnoughts and titans.`,
},
772: &MarketGroup{
ID: 772,
ParentID: 555,
Name: `Extra Large`,
Description: `Capital ship railguns, for use on dreadnoughts and titans.`,
},
773: &MarketGroup{
ID: 773,
ParentID: 557,
Name: `Extra Large`,
Description: `Capital ship beam lasers, for use on dreadnoughts and titans.`,
},
774: &MarketGroup{
ID: 774,
ParentID: 558,
Name: `Extra Large`,
Description: `Capital ship pulse lasers, for use on dreadnoughts and titans.`,
},
775: &MarketGroup{
ID: 775,
ParentID: 560,
Name: `Extra Large`,
Description: `Capital ship artillery cannons, for use on dreadnoughts and titans.`,
},
776: &MarketGroup{
ID: 776,
ParentID: 559,
Name: `Extra Large`,
Description: `Capital ship autocannons, for use on dreadnoughts and titans.`,
},
777: &MarketGroup{
ID: 777,
ParentID: 140,
Name: `XL Launchers`,
Description: `For the launching of XL torpedoes. Can only be fitted on dreadnoughts and titans.`,
},
778: &MarketGroup{
ID: 778,
ParentID: 552,
Name: `Capital`,
Description: `Shield boosters for capital ships`,
},
779: &MarketGroup{
ID: 779,
ParentID: 9,
Name: `Fleet Assistance Modules`,
Description: `Modules used to assist fleet members`,
},
781: &MarketGroup{
ID: 781,
ParentID: 1035,
Name: `Standard Capital Ship Components`,
Description: `Modular components used in the manufacture of capital ships.`,
},
782: &MarketGroup{
ID: 782,
ParentID: 204,
Name: `Dreadnoughts`,
Description: `Blueprints of dreadnought-class vessels.`,
},
783: &MarketGroup{
ID: 783,
ParentID: 782,
Name: `Amarr`,
Description: `Blueprints of Amarr dreadnought designs.`,
},
784: &MarketGroup{
ID: 784,
ParentID: 782,
Name: `Caldari`,
Description: `Blueprints of Caldari dreadnought designs.`,
},
785: &MarketGroup{
ID: 785,
ParentID: 782,
Name: `Gallente`,
Description: `Blueprints of Gallente dreadnought designs.`,
},
786: &MarketGroup{
ID: 786,
ParentID: 782,
Name: `Minmatar`,
Description: `Blueprints of Minmatar dreadnought designs.`,
},
787: &MarketGroup{
ID: 787,
ParentID: 204,
Name: `Freighters`,
Description: `Blueprints of freighter-class vessels.`,
},
788: &MarketGroup{
ID: 788,
ParentID: 787,
Name: `Amarr`,
Description: `Blueprints of Amarr freighter designs.`,
},
789: &MarketGroup{
ID: 789,
ParentID: 787,
Name: `Caldari`,
Description: `Blueprints of Caldari freighter designs.`,
},
790: &MarketGroup{
ID: 790,
ParentID: 787,
Name: `Gallente`,
Description: `Blueprints of Gallente freighter designs.`,
},
791: &MarketGroup{
ID: 791,
ParentID: 787,
Name: `Minmatar`,
Description: `Blueprints of Minmatar freighter designs.`,
},
792: &MarketGroup{
ID: 792,
ParentID: 286,
Name: `Extra Large`,
Description: `Blueprints of capital-sized hybrid turrets.`,
},
793: &MarketGroup{
ID: 793,
ParentID: 287,
Name: `Extra Large`,
Description: `Blueprints of capital-sized projectile turrets.`,
},
794: &MarketGroup{
ID: 794,
ParentID: 288,
Name: `Extra Large`,
Description: `Blueprints of capital-sized laser turrets.`,
},
796: &MarketGroup{
ID: 796,
ParentID: 800,
Name: `Standard Capital Ship Components`,
Description: `Blueprints of Capital Ship Components.`,
},
798: &MarketGroup{
ID: 798,
ParentID: 800,
Name: `Outpost Components`,
Description: `Blueprints of Outpost Construction Platforms.`,
},
799: &MarketGroup{
ID: 799,
ParentID: 209,
Name: `Fleet Assistance Modules`,
Description: `Blueprints of Fleet Assistance Modules.`,
},
800: &MarketGroup{
ID: 800,
ParentID: 1041,
Name: `Components`,
Description: `Blueprints for intermediary items used in the production of more advanced items.`,
},
801: &MarketGroup{
ID: 801,
ParentID: 143,
Name: `Siege Modules`,
Description: `Modules designed to augment and enhance a capital ship's siege warfare abilities.`,
},
802: &MarketGroup{
ID: 802,
ParentID: 65,
Name: `Amarr`,
Description: ``,
},
803: &MarketGroup{
ID: 803,
ParentID: 65,
Name: `Caldari`,
Description: ``,
},
812: &MarketGroup{
ID: 812,
ParentID: 1381,
Name: `Titans`,
Description: `Fathers of the fleet, the biggest of the big.`,
},
813: &MarketGroup{
ID: 813,
ParentID: 812,
Name: `Amarr`,
Description: `Amarr titan designs.`,
},
814: &MarketGroup{
ID: 814,
ParentID: 812,
Name: `Caldari`,
Description: `Caldari titan designs.`,
},
815: &MarketGroup{
ID: 815,
ParentID: 812,
Name: `Gallente`,
Description: `Gallente titan designs.`,
},
816: &MarketGroup{
ID: 816,
ParentID: 812,
Name: `Minmatar`,
Description: `Minmatar titan designs.`,
},
817: &MarketGroup{
ID: 817,
ParentID: 1381,
Name: `Carriers`,
Description: `Capital warships, able to transport and deploy hordes of smaller vessels into the field.`,
},
818: &MarketGroup{
ID: 818,
ParentID: 817,
Name: `Amarr`,
Description: `Amarr carrier designs.`,
},
819: &MarketGroup{
ID: 819,
ParentID: 817,
Name: `Caldari`,
Description: `Caldari carrier designs.`,
},
820: &MarketGroup{
ID: 820,
ParentID: 817,
Name: `Gallente`,
Description: `Gallente carrier designs.`,
},
821: &MarketGroup{
ID: 821,
ParentID: 817,
Name: `Minmatar`,
Description: `Minmatar carrier designs.`,
},
822: &MarketGroup{
ID: 822,
ParentID: 1375,
Name: `<NAME>`,
Description: `Battlecruiser-class vessels, designed to aid their allies on the battlefield.`,
},
823: &MarketGroup{
ID: 823,
ParentID: 1373,
Name: `Interdictors`,
Description: `Destroyer-class vessels designed to pull other starships out of warp.`,
},
824: &MarketGroup{
ID: 824,
ParentID: 1368,
Name: `Recon Ships`,
Description: `Cruisers specialized in reconaissance operations and electronic warfare.`,
},
825: &MarketGroup{
ID: 825,
ParentID: 822,
Name: `Amarr`,
Description: `Amarr command ship designs.`,
},
826: &MarketGroup{
ID: 826,
ParentID: 823,
Name: `Amarr`,
Description: `Amarr interdictor designs.`,
},
827: &MarketGroup{
ID: 827,
ParentID: 824,
Name: `Amarr`,
Description: `Amarr recon ship designs.`,
},
828: &MarketGroup{
ID: 828,
ParentID: 822,
Name: `Caldari`,
Description: `Caldari command ship designs.`,
},
829: &MarketGroup{
ID: 829,
ParentID: 823,
Name: `Caldari`,
Description: `Caldari interdictor designs.`,
},
830: &MarketGroup{
ID: 830,
ParentID: 824,
Name: `Caldari`,
Description: `Caldari recon ship designs.`,
},
831: &MarketGroup{
ID: 831,
ParentID: 822,
Name: `Gallente`,
Description: `Gallente command ship designs.`,
},
832: &MarketGroup{
ID: 832,
ParentID: 823,
Name: `Gallente`,
Description: `Gallente interdictor designs.`,
},
833: &MarketGroup{
ID: 833,
ParentID: 824,
Name: `Gallente`,
Description: `Gallente recon ship designs.`,
},
834: &MarketGroup{
ID: 834,
ParentID: 822,
Name: `Minmatar`,
Description: `Minmatar command ship designs.`,
},
835: &MarketGroup{
ID: 835,
ParentID: 823,
Name: `Minmatar`,
Description: `Minmatar interdictor designs.`,
},
836: &MarketGroup{
ID: 836,
ParentID: 824,
Name: `Minmatar`,
Description: `Minmatar recon ship designs.`,
},
837: &MarketGroup{
ID: 837,
ParentID: 159,
Name: `Light Scout Drones`,
Description: `Light Scout Drones`,
},
838: &MarketGroup{
ID: 838,
ParentID: 159,
Name: `Medium Scout Drones`,
Description: `Medium Scout Drones`,
},
839: &MarketGroup{
ID: 839,
ParentID: 159,
Name: `Heavy Attack Drones`,
Description: `Heavy Attack Drones`,
},
840: &MarketGroup{
ID: 840,
ParentID: 2410,
Name: `<NAME>`,
Description: `Single-pilot combat vessels, deployable from Carriers and Supercarriers.`,
},
841: &MarketGroup{
ID: 841,
ParentID: 157,
Name: `Electronic Warfare Drones`,
Description: `Drones that affect the tracking and sensor capabilities of ships`,
},
842: &MarketGroup{
ID: 842,
ParentID: 157,
Name: `Logistic Drones`,
Description: `Drones that provide support to friendly ships`,
},
843: &MarketGroup{
ID: 843,
ParentID: 157,
Name: `Combat Utility Drones`,
Description: `Drones that interfere with the performance of enemy ships`,
},
845: &MarketGroup{
ID: 845,
ParentID: 99,
Name: `Standard Ammo`,
Description: `Standard projectile rounds.`,
},
846: &MarketGroup{
ID: 846,
ParentID: 99,
Name: `Advanced Artillery Ammo`,
Description: `Ultra-heavy shells incorporating cutting-edge technology.`,
},
847: &MarketGroup{
ID: 847,
ParentID: 99,
Name: `Advanced Autocannon Ammo`,
Description: `Sophisticated autocannon munitions incorporating cutting-edge technology.`,
},
848: &MarketGroup{
ID: 848,
ParentID: 100,
Name: `Standard Charges`,
Description: `Standard hybrid slugs.`,
},
849: &MarketGroup{
ID: 849,
ParentID: 100,
Name: `Advanced Blaster Charges`,
Description: `Technologically advanced blaster ammunition.`,
},
850: &MarketGroup{
ID: 850,
ParentID: 100,
Name: `Advanced Railgun Charges`,
Description: `Technologically advanced railgun ammunition.`,
},
851: &MarketGroup{
ID: 851,
ParentID: 101,
Name: `Standard Crystals`,
Description: `The standard in energy beam frequenzy modulation.`,
},
852: &MarketGroup{
ID: 852,
ParentID: 101,
Name: `Advanced Beam Laser Crystals`,
Description: `Technologically advanced beam laser crystals.`,
},
853: &MarketGroup{
ID: 853,
ParentID: 101,
Name: `Advanced Pulse Laser Crystals`,
Description: `Technologically advanced pulse laser crystals.`,
},
854: &MarketGroup{
ID: 854,
ParentID: 846,
Name: `Large`,
Description: `Large ADAR ammunition, fired by battleship-sized guns.`,
},
855: &MarketGroup{
ID: 855,
ParentID: 846,
Name: `Medium`,
Description: `Medium ADAR ammunition, fired by cruiser-sized guns.`,
},
856: &MarketGroup{
ID: 856,
ParentID: 846,
Name: `Small`,
Description: `Small ADAR ammunition, fired by frigate-sized guns.`,
},
857: &MarketGroup{
ID: 857,
ParentID: 847,
Name: `Large`,
Description: `Large ADAC ammunition, fired by battleship-sized guns.`,
},
858: &MarketGroup{
ID: 858,
ParentID: 847,
Name: `Medium`,
Description: `Medium ADAC ammunition, fired by cruiser-sized guns.`,
},
859: &MarketGroup{
ID: 859,
ParentID: 847,
Name: `Small`,
Description: `Small ADAC ammunition, fired by frigate-sized guns.`,
},
860: &MarketGroup{
ID: 860,
ParentID: 849,
Name: `Large`,
Description: `Large advanced hybrid shells, fired by battleship-sized guns.`,
},
861: &MarketGroup{
ID: 861,
ParentID: 849,
Name: `Medium`,
Description: `Medium advanced hybrid shells, fired by cruiser-sized guns.`,
},
862: &MarketGroup{
ID: 862,
ParentID: 849,
Name: `Small`,
Description: `Small advanced hybrid shells, fired by frigate-sized guns.`,
},
863: &MarketGroup{
ID: 863,
ParentID: 850,
Name: `Large`,
Description: `Large advanced railgun ammunition, fired by battleship-sized guns.`,
},
864: &MarketGroup{
ID: 864,
ParentID: 850,
Name: `Medium`,
Description: `Medium advanced railgun ammunition, fired by cruiser-sized guns.`,
},
865: &MarketGroup{
ID: 865,
ParentID: 850,
Name: `Small`,
Description: `Small advanced railgun ammunition, fired by frigate-sized guns.`,
},
866: &MarketGroup{
ID: 866,
ParentID: 852,
Name: `Large`,
Description: `Large advanced beam laser crystals, for use with battleship-sized lasers.`,
},
867: &MarketGroup{
ID: 867,
ParentID: 852,
Name: `Medium`,
Description: `Medium advanced beam laser crystals, for use with cruiser-sized lasers.`,
},
868: &MarketGroup{
ID: 868,
ParentID: 852,
Name: `Small`,
Description: `Small advanced beam laser crystals, for use with frigate-sized lasers.`,
},
869: &MarketGroup{
ID: 869,
ParentID: 853,
Name: `Large`,
Description: `Large advanced pulse laser crystals, for use with battleship-sized lasers.`,
},
870: &MarketGroup{
ID: 870,
ParentID: 853,
Name: `Medium`,
Description: `Medium advanced pulse laser crystals, for use with cruiser-sized lasers.`,
},
871: &MarketGroup{
ID: 871,
ParentID: 853,
Name: `Small`,
Description: `Small advanced pulse laser crystals, for use with frigate-sized lasers.`,
},
872: &MarketGroup{
ID: 872,
ParentID: 656,
Name: `Tractor Beams`,
Description: `Tractor Beams`,
},
874: &MarketGroup{
ID: 874,
ParentID: 1384,
Name: `Exhumers`,
Description: `Elite Mining Barges`,
},
878: &MarketGroup{
ID: 878,
ParentID: 204,
Name: `Titans`,
Description: `Blueprints of titan-class vessels.`,
},
879: &MarketGroup{
ID: 879,
ParentID: 204,
Name: `Carriers`,
Description: `Blueprints of carrier- and mothership-class vessels.`,
},
880: &MarketGroup{
ID: 880,
ParentID: 204,
Name: `Interdictors`,
Description: `Blueprints of interdictor-class vessels.`,
},
881: &MarketGroup{
ID: 881,
ParentID: 204,
Name: `Command Ships`,
Description: `Blueprints of command ship-class vessels.`,
},
882: &MarketGroup{
ID: 882,
ParentID: 204,
Name: `Recon Ships`,
Description: `Blueprints of recon ship-class designs.`,
},
883: &MarketGroup{
ID: 883,
ParentID: 204,
Name: `Exhumers`,
Description: `Blueprints of Exhumers.`,
},
884: &MarketGroup{
ID: 884,
ParentID: 878,
Name: `Amarr`,
Description: `Blueprints of Amarr titan designs.`,
},
885: &MarketGroup{
ID: 885,
ParentID: 878,
Name: `Caldari`,
Description: `Blueprints of Caldari titan designs.`,
},
886: &MarketGroup{
ID: 886,
ParentID: 878,
Name: `Gallente`,
Description: `Blueprints of Gallente titan designs.`,
},
887: &MarketGroup{
ID: 887,
ParentID: 878,
Name: `Minmatar`,
Description: `Blueprints of Minmatar titan designs.`,
},
888: &MarketGroup{
ID: 888,
ParentID: 879,
Name: `Amarr`,
Description: `Blueprints of Amarr carrier and mothership designs.`,
},
889: &MarketGroup{
ID: 889,
ParentID: 879,
Name: `Caldari`,
Description: `Blueprints of Caldari carrier and mothership designs.`,
},
890: &MarketGroup{
ID: 890,
ParentID: 879,
Name: `Gallente`,
Description: `Blueprints of Gallente carrier and mothership designs.`,
},
891: &MarketGroup{
ID: 891,
ParentID: 879,
Name: `Minmatar`,
Description: `Blueprints of Minmatar carrier and mothership designs.`,
},
892: &MarketGroup{
ID: 892,
ParentID: 880,
Name: `Amarr`,
Description: `Blueprints of Amarr interdictor designs.`,
},
893: &MarketGroup{
ID: 893,
ParentID: 880,
Name: `Caldari`,
Description: `Blueprints of Caldari interdictor designs.`,
},
894: &MarketGroup{
ID: 894,
ParentID: 880,
Name: `Gallente`,
Description: `Blueprints of Gallente interdictor designs.`,
},
895: &MarketGroup{
ID: 895,
ParentID: 880,
Name: `Minmatar`,
Description: `Blueprints of Minmatar interdictor designs.`,
},
896: &MarketGroup{
ID: 896,
ParentID: 881,
Name: `Amarr`,
Description: `Blueprints of Amarr command ship designs.`,
},
897: &MarketGroup{
ID: 897,
ParentID: 881,
Name: `Caldari`,
Description: `Blueprints of Caldari command ship designs.`,
},
898: &MarketGroup{
ID: 898,
ParentID: 881,
Name: `Gallente`,
Description: `Blueprints of Gallente command ship designs.`,
},
899: &MarketGroup{
ID: 899,
ParentID: 881,
Name: `Minmatar`,
Description: `Blueprints of Minmatar command ship designs.`,
},
900: &MarketGroup{
ID: 900,
ParentID: 882,
Name: `Amarr`,
Description: `Blueprints of Amarr recon ship designs.`,
},
901: &MarketGroup{
ID: 901,
ParentID: 882,
Name: `Caldari`,
Description: `Blueprints of Caldari recon ship designs.`,
},
902: &MarketGroup{
ID: 902,
ParentID: 882,
Name: `Gallente`,
Description: `Blueprints of Gallente recon ship designs.`,
},
903: &MarketGroup{
ID: 903,
ParentID: 882,
Name: `Minmatar`,
Description: `Blueprints of Minmatar recon ship designs.`,
},
904: &MarketGroup{
ID: 904,
ParentID: 883,
Name: `ORE`,
Description: `Blueprints of ORE exhumer designs.`,
},
905: &MarketGroup{
ID: 905,
ParentID: 1578,
Name: `<NAME>`,
Description: `Blueprints of tractor beams.`,
},
910: &MarketGroup{
ID: 910,
ParentID: 663,
Name: `Capital`,
Description: `Capital ship-sized energy transfer arrays.`,
},
911: &MarketGroup{
ID: 911,
ParentID: 159,
Name: `<NAME>`,
Description: `Deployable watchdogs.`,
},
912: &MarketGroup{
ID: 912,
ParentID: 10,
Name: `Superweapons`,
Description: `The titan's fist.`,
},
913: &MarketGroup{
ID: 913,
ParentID: 210,
Name: `Superweapons`,
Description: `Doomsday Device and other Superweapon blueprints`,
},
914: &MarketGroup{
ID: 914,
ParentID: 115,
Name: `Standard Auto-Targeting`,
Description: `Standard Auto-Targeting`,
},
917: &MarketGroup{
ID: 917,
ParentID: 117,
Name: `Advanced High Precision Light Missiles`,
Description: `Advanced High Precision Light Missiles`,
},
918: &MarketGroup{
ID: 918,
ParentID: 580,
Name: `Advanced High Precision Cruise Missiles`,
Description: `Advanced High Precision Cruise Missiles`,
},
919: &MarketGroup{
ID: 919,
ParentID: 581,
Name: `Advanced High Precision Heavy Missiles`,
Description: `Advanced High Precision Heavy Missiles`,
},
920: &MarketGroup{
ID: 920,
ParentID: 117,
Name: `Standard Light Missiles`,
Description: `Standard Light Missiles`,
},
921: &MarketGroup{
ID: 921,
ParentID: 580,
Name: `Standard Cruise Missiles`,
Description: `Standard Cruise Missiles`,
},
922: &MarketGroup{
ID: 922,
ParentID: 118,
Name: `Standard Rockets`,
Description: `Standard Rockets`,
},
923: &MarketGroup{
ID: 923,
ParentID: 387,
Name: `Standard Torpedoes`,
Description: `Standard Torpedoes`,
},
924: &MarketGroup{
ID: 924,
ParentID: 581,
Name: `Standard Heavy Missiles`,
Description: `Standard Heavy Missiles`,
},
925: &MarketGroup{
ID: 925,
ParentID: 580,
Name: `Advanced High Damage Cruise Missiles`,
Description: `Advanced High Damage Cruise Missiles`,
},
926: &MarketGroup{
ID: 926,
ParentID: 581,
Name: `Advanced High Damage Heavy Missiles`,
Description: `Advanced High Damage Heavy Missiles`,
},
927: &MarketGroup{
ID: 927,
ParentID: 117,
Name: `Advanced High Damage Light Missiles`,
Description: `Advanced High Damage Light Missiles`,
},
928: &MarketGroup{
ID: 928,
ParentID: 118,
Name: `Advanced Long Range Rockets`,
Description: `Advanced Long Range Rockets`,
},
929: &MarketGroup{
ID: 929,
ParentID: 387,
Name: `Advanced Long Range Torpedoes`,
Description: `Advanced Long Range Torpedoes`,
},
930: &MarketGroup{
ID: 930,
ParentID: 118,
Name: `Advanced Anti-Ship Rockets`,
Description: `Advanced Anti-Ship Rockets`,
},
931: &MarketGroup{
ID: 931,
ParentID: 387,
Name: `Advanced Anti-Ship Torpedoes`,
Description: `Advanced Anti-Ship Torpedoes`,
},
932: &MarketGroup{
ID: 932,
ParentID: 1285,
Name: `Assembly Arrays`,
Description: `Structures which house manufacturing facilities.`,
},
933: &MarketGroup{
ID: 933,
ParentID: 1285,
Name: `Laboratory`,
Description: `Structures with research facilities.`,
},
935: &MarketGroup{
ID: 935,
ParentID: 1713,
Name: `Mining Upgrades`,
Description: `Subsystems designed to improve mining laser efficiency.`,
},
937: &MarketGroup{
ID: 937,
ParentID: 1711,
Name: `Mining Upgrades`,
Description: `Blueprints of mining upgrades.`,
},
938: &MarketGroup{
ID: 938,
ParentID: 9,
Name: `Drone Upgrades`,
Description: `Modules that increase drone efficiency`,
},
939: &MarketGroup{
ID: 939,
ParentID: 209,
Name: `Drone Upgrades`,
Description: `Blueprints of drone upgrades.`,
},
940: &MarketGroup{
ID: 940,
ParentID: 19,
Name: `Starbase Charters`,
Description: `The required licenses to operate starbases within the borders of the empires`,
},
943: &MarketGroup{
ID: 943,
ParentID: 2,
Name: `Ship Modifications`,
Description: `Blueprints of rigs used to modify ship performance`,
},
944: &MarketGroup{
ID: 944,
ParentID: 943,
Name: `Armor Rigs`,
Description: `Permanent modification of a ship's armor facilities.`,
},
945: &MarketGroup{
ID: 945,
ParentID: 943,
Name: `Astronautic Rigs`,
Description: `Permanent modification of a ship's astronautic facilities.`,
},
946: &MarketGroup{
ID: 946,
ParentID: 943,
Name: `Drone Rigs`,
Description: `Permanent modification of a ship's drone facilities.`,
},
948: &MarketGroup{
ID: 948,
ParentID: 943,
Name: `Electronics Superiority Rigs`,
Description: `Permanent modification of a ship's electronics superiority facilities.`,
},
949: &MarketGroup{
ID: 949,
ParentID: 943,
Name: `Engineering Rigs`,
Description: `Permanent modification of a ship's energy grid facilities.`,
},
950: &MarketGroup{
ID: 950,
ParentID: 943,
Name: `Energy Weapon Rigs`,
Description: `Permanent modification of a ship's energy weapon facilities.`,
},
951: &MarketGroup{
ID: 951,
ParentID: 943,
Name: `Hybrid Weapon Rigs`,
Description: `Permanent modification of a ship's hybrid weapon facilities.`,
},
952: &MarketGroup{
ID: 952,
ParentID: 943,
Name: `Missile Launcher Rigs`,
Description: `Permanent modification of a ship's missile launcher facilities.`,
},
953: &MarketGroup{
ID: 953,
ParentID: 943,
Name: `Projectile Weapon Rigs`,
Description: `Permanent modification of a ship's projectile weapon facilities.`,
},
954: &MarketGroup{
ID: 954,
ParentID: 943,
Name: `Shield Rigs`,
Description: `Permanent modification of a ship's shield facilities.`,
},
955: &MarketGroup{
ID: 955,
ParentID: 0,
Name: `Ship and Module Modifications`,
Description: `Rigs and subsystems that alter a starship's attributes in such a way as to make it more effective in its chosen role
Mutaplasmids that permanently alter a module's attributes`,
},
956: &MarketGroup{
ID: 956,
ParentID: 1111,
Name: `Armor Rigs`,
Description: `Permanent modification of a ship's armor facilities.`,
},
957: &MarketGroup{
ID: 957,
ParentID: 1111,
Name: `Astronautic Rigs`,
Description: `Permanent modification of a ship's astronautic facilities.`,
},
958: &MarketGroup{
ID: 958,
ParentID: 1111,
Name: `Drone Rigs`,
Description: `Permanent modification of a ship's drone facilities.`,
},
960: &MarketGroup{
ID: 960,
ParentID: 1111,
Name: `Electronics Superiority Rigs`,
Description: `Permanent modification of a ship's electronics superiority facilities.`,
},
961: &MarketGroup{
ID: 961,
ParentID: 1111,
Name: `Engineering Rigs`,
Description: `Permanent modification of a ship's energy grid facilities.`,
},
962: &MarketGroup{
ID: 962,
ParentID: 1111,
Name: `Energy Weapon Rigs`,
Description: `Permanent modification of a ship's energy weapon facilities.`,
},
963: &MarketGroup{
ID: 963,
ParentID: 1111,
Name: `Hybrid Weapon Rigs`,
Description: `Permanent modification of a ship's hybrid weapon facilities.`,
},
964: &MarketGroup{
ID: 964,
ParentID: 1111,
Name: `Missile Launcher Rigs`,
Description: `Permanent modification of a ship's missile launcher facilities.`,
},
965: &MarketGroup{
ID: 965,
ParentID: 1111,
Name: `Shield Rigs`,
Description: `Permanent modification of a ship's shield facilities.`,
},
967: &MarketGroup{
ID: 967,
ParentID: 677,
Name: `Signal Distortion Amplifier`,
Description: `Ship systems that amplify the distortion signal generated by electronic counter measure systems.`,
},
968: &MarketGroup{
ID: 968,
ParentID: 114,
Name: `Heavy Assault Missiles`,
Description: `Jet-propelled projectiles for heavy assault launchers.`,
},
971: &MarketGroup{
ID: 971,
ParentID: 968,
Name: `Standard Heavy Assault Missiles`,
Description: `Standard heavy assault missiles.`,
},
972: &MarketGroup{
ID: 972,
ParentID: 968,
Name: `Advanced Long Range Heavy Assault Missiles`,
Description: `Advanced long range heavy assault missiles.`,
},
973: &MarketGroup{
ID: 973,
ParentID: 968,
Name: `Advanced Anti-Ship Heavy Assault Missile`,
Description: `Advanced anti-ship heavy assault missiles.`,
},
974: &MarketGroup{
ID: 974,
ParentID: 140,
Name: `Heavy Assault Launchers`,
Description: `For the launching of assault missiles.`,
},
975: &MarketGroup{
ID: 975,
ParentID: 314,
Name: `Heavy Assault Missiles`,
Description: `Blueprints of heavy assault missiles.`,
},
977: &MarketGroup{
ID: 977,
ParentID: 24,
Name: `Booster`,
Description: `Illegal substances that give temporary effects, but with adverse side-effects as well`,
},
979: &MarketGroup{
ID: 979,
ParentID: 1111,
Name: `Projectile Weapon Rigs`,
Description: `Permanent modification of a ship's projectile weapon facilities.`,
},
983: &MarketGroup{
ID: 983,
ParentID: 1032,
Name: `Booster Gas Clouds`,
Description: `Voluminous clouds of various gases that can be found in space.`,
},
986: &MarketGroup{
ID: 986,
ParentID: 99,
Name: `Faction Ammo`,
Description: `Enhanced ammunition designed and manufactured by the factions of New Eden`,
},
987: &MarketGroup{
ID: 987,
ParentID: 986,
Name: `Large`,
Description: `Large faction ammunition, fired by battleship-sized guns.`,
},
988: &MarketGroup{
ID: 988,
ParentID: 986,
Name: `Medium`,
Description: `Medium faction ammunition, fired by cruiser-sized guns`,
},
989: &MarketGroup{
ID: 989,
ParentID: 986,
Name: `Small`,
Description: `Small faction ammunition, fired by frigate-sized guns`,
},
990: &MarketGroup{
ID: 990,
ParentID: 100,
Name: `Faction Charges`,
Description: `Enhanced ammunition designed and manufactured by the factions of New Eden`,
},
991: &MarketGroup{
ID: 991,
ParentID: 990,
Name: `Large`,
Description: `Large faction issue hybrid shells, fired by battleship-sized guns.`,
},
992: &MarketGroup{
ID: 992,
ParentID: 990,
Name: `Medium`,
Description: `Medium faction issue hybrid shells, fired by cruiser-sized guns.`,
},
993: &MarketGroup{
ID: 993,
ParentID: 990,
Name: `Small`,
Description: `Small faction issue hybrid shells, fired by frigate-sized guns.`,
},
994: &MarketGroup{
ID: 994,
ParentID: 101,
Name: `Faction Crystals`,
Description: `Enhanced frequenzy crystals designed and manufactured by the factions of New Eden`,
},
995: &MarketGroup{
ID: 995,
ParentID: 994,
Name: `Large`,
Description: `Large faction issue frequenzy crystals, for use with battleship-size lasers.`,
},
996: &MarketGroup{
ID: 996,
ParentID: 994,
Name: `Medium`,
Description: `Medium faction issue frequenzy crystals, for use with cruiser-size lasers.`,
},
997: &MarketGroup{
ID: 997,
ParentID: 994,
Name: `Small`,
Description: `Small faction issue frequenzy crystals, for use with frigate-size lasers.`,
},
998: &MarketGroup{
ID: 998,
ParentID: 117,
Name: `Faction Light Missiles`,
Description: `Faction issue light missiles.`,
},
999: &MarketGroup{
ID: 999,
ParentID: 118,
Name: `Faction Rockets`,
Description: `Faction issue rockets.`,
},
1000: &MarketGroup{
ID: 1000,
ParentID: 387,
Name: `Faction Torpedoes`,
Description: `Faction issue torpedoes.`,
},
1001: &MarketGroup{
ID: 1001,
ParentID: 580,
Name: `Faction Cruise Missiles`,
Description: `Faction issue cruise missiles.`,
},
1002: &MarketGroup{
ID: 1002,
ParentID: 581,
Name: `Faction Heavy Missiles`,
Description: `Faction issue heavy missiles.`,
},
1003: &MarketGroup{
ID: 1003,
ParentID: 968,
Name: `Faction Heavy Assault Missiles`,
Description: `Faction issue heavy assault missiles.`,
},
1004: &MarketGroup{
ID: 1004,
ParentID: 990,
Name: `Extra Large`,
Description: `Fired by dreadnaught-sized guns and stationary defense systems.`,
},
1006: &MarketGroup{
ID: 1006,
ParentID: 986,
Name: `Extra Large`,
Description: `Fired by dreadnaught-sized guns and stationary defense systems.`,
},
1007: &MarketGroup{
ID: 1007,
ParentID: 994,
Name: `Extra Large`,
Description: `Faction issue frequenzy crystals for use with dreadnought-sized lasers and stationary defense systems.`,
},
1008: &MarketGroup{
ID: 1008,
ParentID: 406,
Name: `Cargo Containers`,
Description: `Blueprints of Cargo Containers.`,
},
1009: &MarketGroup{
ID: 1009,
ParentID: 480,
Name: `Energy Neutralization Batteries`,
Description: `A starbase array designed to neutralize the capacitor of hostile ships in the area.`,
},
1010: &MarketGroup{
ID: 1010,
ParentID: 1285,
Name: `System Scanning Array`,
Description: `A system-wide intelligence gathering array.`,
},
1011: &MarketGroup{
ID: 1011,
ParentID: 1285,
Name: `Jump Bridge `,
Description: `A bridge of light, spanning the cold and empty void between the stars.`,
},
1012: &MarketGroup{
ID: 1012,
ParentID: 1285,
Name: `Cynosural System Jammer`,
Description: `The lock and key to a system's highway of cynosural passage.`,
},
1013: &MarketGroup{
ID: 1013,
ParentID: 1285,
Name: `Cynosural Generator Array`,
Description: `A lighthouse in the sea of nothing, casting its cynosural light out to far-off ships.`,
},
1014: &MarketGroup{
ID: 1014,
ParentID: 10,
Name: `Bomb Launchers`,
Description: `Large launchers, designed for stealth bombers for bomb deployment.`,
},
1015: &MarketGroup{
ID: 1015,
ParentID: 11,
Name: `Bombs`,
Description: `Large, area of effect weapons, used by stealth bombers`,
},
1016: &MarketGroup{
ID: 1016,
ParentID: 211,
Name: `Bombs`,
Description: `Blueprints of bombs.`,
},
1018: &MarketGroup{
ID: 1018,
ParentID: 14,
Name: `Remote Hull Repairers`,
Description: `Repair the hulls of your allies at range, always handy after those close call situations.`,
},
1019: &MarketGroup{
ID: 1019,
ParentID: 210,
Name: `Bomb Launchers`,
Description: `Blueprints of bomb launchers.`,
},
1021: &MarketGroup{
ID: 1021,
ParentID: 1035,
Name: `Outpost Components`,
Description: `Components to those glorious centers of community, outposts.`,
},
1022: &MarketGroup{
ID: 1022,
ParentID: 1021,
Name: `Improvement Platforms`,
Description: ``,
},
1027: &MarketGroup{
ID: 1027,
ParentID: 1021,
Name: `Outpost Upgrade Platforms`,
Description: `Upgrade platforms for outposts, intended as a base for further improvements.`,
},
1028: &MarketGroup{
ID: 1028,
ParentID: 2237,
Name: `Light Fighters`,
Description: `Blueprints of light fighter designs.`,
},
1029: &MarketGroup{
ID: 1029,
ParentID: 357,
Name: `Electronic Warfare Drones`,
Description: `Blueprints of electronic warfare drone designs.`,
},
1030: &MarketGroup{
ID: 1030,
ParentID: 357,
Name: `Logistic Drones`,
Description: `Blueprints of logistics drone designs.`,
},
1031: &MarketGroup{
ID: 1031,
ParentID: 533,
Name: `Raw Materials`,
Description: `The building blocks of New Eden.`,
},
1032: &MarketGroup{
ID: 1032,
ParentID: 533,
Name: `Gas Clouds Materials`,
Description: `Voluminous clouds of various gases, found in space, that can be extracted and used in the manufacturing of biochemical boosters.`,
},
1033: &MarketGroup{
ID: 1033,
ParentID: 533,
Name: `Ice Products`,
Description: `Ice isotopes harvested from ice asteroids.`,
},
1034: &MarketGroup{
ID: 1034,
ParentID: 533,
Name: `Reaction Materials`,
Description: `The various types of raw materials harvested from the moons of New Eden.`,
},
1035: &MarketGroup{
ID: 1035,
ParentID: 475,
Name: `Components`,
Description: `Components are manufactured items used in various industry activities`,
},
1037: &MarketGroup{
ID: 1037,
ParentID: 1713,
Name: `Gas Cloud Harvesters`,
Description: `Gas cloud harvester designs.`,
},
1038: &MarketGroup{
ID: 1038,
ParentID: 1713,
Name: `Ice Harvesters`,
Description: `Ice harvester designs.`,
},
1039: &MarketGroup{
ID: 1039,
ParentID: 1713,
Name: `Mining Lasers`,
Description: `Mining laser designs.`,
},
1040: &MarketGroup{
ID: 1040,
ParentID: 1713,
Name: `Strip Miners`,
Description: `Strip miner designs.`,
},
1041: &MarketGroup{
ID: 1041,
ParentID: 2,
Name: `Manufacture & Research`,
Description: `Blueprints of items used for manufacturing and research processes`,
},
1045: &MarketGroup{
ID: 1045,
ParentID: 204,
Name: `Capital Industrial Ships`,
Description: `Blueprints of Capital Industrial Ships.`,
},
1046: &MarketGroup{
ID: 1046,
ParentID: 1045,
Name: `ORE`,
Description: `Blueprints of ORE Capital Industrial Ship designs.`,
},
1047: &MarketGroup{
ID: 1047,
ParentID: 1381,
Name: `Capital Industrial Ships`,
Description: `Capital ships designed to be the industrial backbone of any fleet.`,
},
1048: &MarketGroup{
ID: 1048,
ParentID: 1047,
Name: `ORE`,
Description: `ORE capital industrial ship designs.`,
},
1049: &MarketGroup{
ID: 1049,
ParentID: 134,
Name: `Small `,
Description: `Armor repair designs, intended for frigate-class vessels.`,
},
1050: &MarketGroup{
ID: 1050,
ParentID: 134,
Name: `Medium`,
Description: `Armor repair designs, intended for cruiser-class vessels.`,
},
1051: &MarketGroup{
ID: 1051,
ParentID: 134,
Name: `Large`,
Description: `Armor repair designs, intended for battleship-class vessels.`,
},
1052: &MarketGroup{
ID: 1052,
ParentID: 134,
Name: `Capital`,
Description: `Armor repair designs, intended for capital-class vessels.`,
},
1053: &MarketGroup{
ID: 1053,
ParentID: 538,
Name: `Small`,
Description: `Hull repair system designs, intended for frigate-class vessels.`,
},
1054: &MarketGroup{
ID: 1054,
ParentID: 538,
Name: `Medium`,
Description: `Hull repair system designs, intended for cruiser-class vessels.`,
},
1055: &MarketGroup{
ID: 1055,
ParentID: 538,
Name: `Large`,
Description: `Hull repair system designs, intended for battleship-class vessels.`,
},
1056: &MarketGroup{
ID: 1056,
ParentID: 537,
Name: `Capital`,
Description: `Remote armor repair system designs, intended for capital-class vessels.`,
},
1057: &MarketGroup{
ID: 1057,
ParentID: 537,
Name: `Large`,
Description: `Remote armor repair system designs, intended for battleship-class vessels.`,
},
1058: &MarketGroup{
ID: 1058,
ParentID: 537,
Name: `Medium`,
Description: `Remote armor repair system designs, intended for cruiser-class vessels.`,
},
1059: &MarketGroup{
ID: 1059,
ParentID: 537,
Name: `Small`,
Description: `Remote armor repair system designs, intended for frigate-class vessels.`,
},
1060: &MarketGroup{
ID: 1060,
ParentID: 1018,
Name: `Small`,
Description: `Remote hull repair system designs, intended for use on frigate-class vessels.`,
},
1061: &MarketGroup{
ID: 1061,
ParentID: 1018,
Name: `Medium`,
Description: `Remote hull repair system designs, intended for use on cruiser-class vessels.`,
},
1062: &MarketGroup{
ID: 1062,
ParentID: 1018,
Name: `Large`,
Description: `Remote hull repair system designs, intended for use on battleship-class vessels.`,
},
1063: &MarketGroup{
ID: 1063,
ParentID: 1018,
Name: `Capital`,
Description: `Remote hull repair system designs, intended for use on capital-class vessels.`,
},
1065: &MarketGroup{
ID: 1065,
ParentID: 1364,
Name: `Electronic Attack Frigates`,
Description: `Frigates specialized in electronic warfare.`,
},
1066: &MarketGroup{
ID: 1066,
ParentID: 1065,
Name: `Amarr`,
Description: `Amarr electronic attack frigate designs.`,
},
1067: &MarketGroup{
ID: 1067,
ParentID: 1065,
Name: `Caldari`,
Description: `Caldari electronic attack frigate designs.`,
},
1068: &MarketGroup{
ID: 1068,
ParentID: 1065,
Name: `Gallente`,
Description: `Gallente electronic attack frigate designs.`,
},
1069: &MarketGroup{
ID: 1069,
ParentID: 1065,
Name: `Minmatar`,
Description: `Minmatar electronic attack frigate designs.`,
},
1070: &MarketGroup{
ID: 1070,
ParentID: 1368,
Name: `Heavy Interdiction Cruisers`,
Description: `Cruiser-class vessels, designed to pull other vessels out of warp.`,
},
1071: &MarketGroup{
ID: 1071,
ParentID: 1070,
Name: `Amarr`,
Description: `Amarr heavy interdiction cruiser designs.`,
},
1072: &MarketGroup{
ID: 1072,
ParentID: 1070,
Name: `Caldari`,
Description: `Caldari heavy interdiction cruiser designs.`,
},
1073: &MarketGroup{
ID: 1073,
ParentID: 1070,
Name: `Gallente`,
Description: `Gallente heavy interdiction cruiser designs.`,
},
1074: &MarketGroup{
ID: 1074,
ParentID: 1070,
Name: `Minmatar`,
Description: `Minmatar heavy interdiction cruiser designs.`,
},
1075: &MarketGroup{
ID: 1075,
ParentID: 1377,
Name: `Black Ops`,
Description: `Battleship-Class vessels, designed to operate behind enemy lines.`,
},
1076: &MarketGroup{
ID: 1076,
ParentID: 1075,
Name: `Amarr`,
Description: `Amarr black ops designs.`,
},
1077: &MarketGroup{
ID: 1077,
ParentID: 1075,
Name: `Caldari`,
Description: `Caldari black ops designs.`,
},
1078: &MarketGroup{
ID: 1078,
ParentID: 1075,
Name: `Gallente`,
Description: `Gallente black ops designs.`,
},
1079: &MarketGroup{
ID: 1079,
ParentID: 1075,
Name: `Minmatar`,
Description: `Minmatar black ops designs.`,
},
1080: &MarketGroup{
ID: 1080,
ParentID: 1377,
Name: `Marauders`,
Description: `Battleship-Class vessels, intended for prolonged frontline deployment.`,
},
1081: &MarketGroup{
ID: 1081,
ParentID: 1080,
Name: `Amarr`,
Description: `Amarr marauder designs.`,
},
1082: &MarketGroup{
ID: 1082,
ParentID: 1080,
Name: `Caldari`,
Description: `Caldari marauder designs`,
},
1083: &MarketGroup{
ID: 1083,
ParentID: 1080,
Name: `Gallente`,
Description: `Gallente marauder designs.`,
},
1084: &MarketGroup{
ID: 1084,
ParentID: 1080,
Name: `Minmatar`,
Description: `Minmatar marauder designs.`,
},
1085: &MarketGroup{
ID: 1085,
ParentID: 657,
Name: `Warp Disruption Field Generators`,
Description: `A ship system that generates a local warp disruption field, preventing warp travel.`,
},
1086: &MarketGroup{
ID: 1086,
ParentID: 132,
Name: `Inertial Stabilizers`,
Description: `Ship systems that stabilize the ship and allow for more graceful maneuvers.`,
},
1087: &MarketGroup{
ID: 1087,
ParentID: 132,
Name: `Overdrives`,
Description: `Ship systems that add more raw power to the ship's engines.`,
},
1088: &MarketGroup{
ID: 1088,
ParentID: 132,
Name: `Warp Core Stabilizers`,
Description: `Ship systems that help to keep the warp drive operational, despite interference.`,
},
1089: &MarketGroup{
ID: 1089,
ParentID: 1381,
Name: `Jump Freighters`,
Description: `Capital cargo ships with jump drive capabilities, bringing trade and commerce to distant stars.`,
},
1090: &MarketGroup{
ID: 1090,
ParentID: 1089,
Name: `Amarr`,
Description: `Amarr jump freighter designs.`,
},
1091: &MarketGroup{
ID: 1091,
ParentID: 1089,
Name: `Caldari`,
Description: `Caldari jump freighter designs.`,
},
1092: &MarketGroup{
ID: 1092,
ParentID: 1089,
Name: `Gallente`,
Description: `Gallente jump freighter designs.`,
},
1093: &MarketGroup{
ID: 1093,
ParentID: 1089,
Name: `Minmatar`,
Description: `Minmatar jump freighter designs.`,
},
1094: &MarketGroup{
ID: 1094,
ParentID: 11,
Name: `Scripts`,
Description: `Scripts that allow for tuning the effects of certain modules`,
},
1097: &MarketGroup{
ID: 1097,
ParentID: 800,
Name: `Advanced Capital Ship Components`,
Description: `Blueprints of Advanced Capital Ship Components.`,
},
1103: &MarketGroup{
ID: 1103,
ParentID: 11,
Name: `Nanite Repair Paste`,
Description: `A nanite packed-fluid, used for emergency repairs of ship modules`,
},
1105: &MarketGroup{
ID: 1105,
ParentID: 211,
Name: `Scripts`,
Description: `Blueprints of scripts.`,
},
1109: &MarketGroup{
ID: 1109,
ParentID: 19,
Name: `Sleeper Components`,
Description: `Mysterious pieces of technology of Sleeper origin, coveted by some empire corporations`,
},
1110: &MarketGroup{
ID: 1110,
ParentID: 150,
Name: `Scanning`,
Description: `Skills pertaining to signature recognition and identification procedures`,
},
1111: &MarketGroup{
ID: 1111,
ParentID: 955,
Name: `Rigs`,
Description: `Rigs modify the performance of ships in which they're installed. Destroyed if removed from a ship`,
},
1112: &MarketGroup{
ID: 1112,
ParentID: 955,
Name: `Subsystems`,
Description: `Components used to piece together advanced modular spaceships`,
},
1122: &MarketGroup{
ID: 1122,
ParentID: 1610,
Name: `Amarr Core Subsystems`,
Description: `Amarr core subsystems.`,
},
1123: &MarketGroup{
ID: 1123,
ParentID: 1625,
Name: `Caldari Core Subsystems`,
Description: `Caldari core subsystems.`,
},
1124: &MarketGroup{
ID: 1124,
ParentID: 1627,
Name: `Gallente Core Subsystems`,
Description: `Gallente core subsystems.`,
},
1125: &MarketGroup{
ID: 1125,
ParentID: 1626,
Name: `Minmatar Core Subsystems`,
Description: `Minmatar core subsystems.`,
},
1126: &MarketGroup{
ID: 1126,
ParentID: 1610,
Name: `Amarr Defensive Subsystems`,
Description: `Amarr defensive subsystems.`,
},
1127: &MarketGroup{
ID: 1127,
ParentID: 1625,
Name: `Caldari Defensive Subsystems`,
Description: `Caldari defensive subsystems.`,
},
1128: &MarketGroup{
ID: 1128,
ParentID: 1626,
Name: `Minmatar Defensive Subsystems`,
Description: `Minmatar defensive subsystems.`,
},
1129: &MarketGroup{
ID: 1129,
ParentID: 1627,
Name: `Gallente Defensive Subsystems`,
Description: `Gallente defensive subsystems.`,
},
1130: &MarketGroup{
ID: 1130,
ParentID: 1610,
Name: `Amarr Offensive Subsystems`,
Description: `Amarr offensive subsystems.`,
},
1131: &MarketGroup{
ID: 1131,
ParentID: 1625,
Name: `Caldari Offensive Subsystems`,
Description: `Caldari offensive subsystems.`,
},
1132: &MarketGroup{
ID: 1132,
ParentID: 1627,
Name: `Gallente Offensive Subsystems`,
Description: `Gallente offensive subsystems.`,
},
1133: &MarketGroup{
ID: 1133,
ParentID: 1626,
Name: `Minmatar Offensive Subsystems`,
Description: `Minmatar offensive subsystems.`,
},
1134: &MarketGroup{
ID: 1134,
ParentID: 1610,
Name: `Amarr Propulsion Subsystems`,
Description: `Amarr propulsion subsystems.`,
},
1135: &MarketGroup{
ID: 1135,
ParentID: 1625,
Name: `Caldari Propulsion Subsystems`,
Description: `Caldari propulsion subsystems.`,
},
1136: &MarketGroup{
ID: 1136,
ParentID: 1627,
Name: `Gallente Propulsion Subsystems`,
Description: `Gallente propulsion subsystems.`,
},
1137: &MarketGroup{
ID: 1137,
ParentID: 1626,
Name: `Minmatar Propulsion Subsystems`,
Description: `Minmatar propulsion subsystems.`,
},
1138: &MarketGroup{
ID: 1138,
ParentID: 1368,
Name: `Strategic Cruisers`,
Description: `Highly advanced cruisers that use modular components to achieve maximum versatility.`,
},
1139: &MarketGroup{
ID: 1139,
ParentID: 1138,
Name: `Amarr`,
Description: `Amarr strategic cruiser designs.`,
},
1140: &MarketGroup{
ID: 1140,
ParentID: 1138,
Name: `Caldari`,
Description: `Caldari strategic cruiser designs.`,
},
1141: &MarketGroup{
ID: 1141,
ParentID: 1138,
Name: `Gallente`,
Description: `Gallente strategic cruiser designs.`,
},
1142: &MarketGroup{
ID: 1142,
ParentID: 1138,
Name: `Minmatar`,
Description: `Minmatar strategic cruiser designs.`,
},
1147: &MarketGroup{
ID: 1147,
ParentID: 1035,
Name: `Subsystem Components`,
Description: `The building blocks of advanced subsystems.`,
},
1191: &MarketGroup{
ID: 1191,
ParentID: 800,
Name: `Subsystem Components`,
Description: `Blueprints of Subsystem Components.`,
},
1192: &MarketGroup{
ID: 1192,
ParentID: 115,
Name: `Faction Auto-Targeting`,
Description: `Faction auto-targeting designs.`,
},
1193: &MarketGroup{
ID: 1193,
ParentID: 505,
Name: `Standard XL Torpedoes`,
Description: `Standard XL torpedo designs.`,
},
1194: &MarketGroup{
ID: 1194,
ParentID: 505,
Name: `Faction XL Torpedoes`,
Description: `Faction XL torpedo designs.`,
},
1195: &MarketGroup{
ID: 1195,
ParentID: 135,
Name: `Reinforced Bulkheads`,
Description: `Struts and braces that help improve a starship's structural integrity.`,
},
1196: &MarketGroup{
ID: 1196,
ParentID: 135,
Name: `Nanofiber Internal Structures`,
Description: `Lightweight materials that provide a boost in speed by weakening a starship's internal structure.`,
},
1197: &MarketGroup{
ID: 1197,
ParentID: 135,
Name: `Expanded Cargoholds`,
Description: `The extra bulk this expansion adds to a starship reduces its speed, but gives it more room to store cargo.`,
},
1198: &MarketGroup{
ID: 1198,
ParentID: 1710,
Name: `Scan Probe Launchers`,
Description: `Blueprints of Scan Probe Launchers.`,
},
1199: &MarketGroup{
ID: 1199,
ParentID: 120,
Name: `Scan Probes`,
Description: `Scan probe designs.`,
},
1200: &MarketGroup{
ID: 1200,
ParentID: 120,
Name: `Survey Probes`,
Description: `Survey probe designs.`,
},
1201: &MarketGroup{
ID: 1201,
ParentID: 120,
Name: `Interdiction Probes`,
Description: `Interdiction probe designs.`,
},
1202: &MarketGroup{
ID: 1202,
ParentID: 944,
Name: `Large Armor Rigs`,
Description: `Blueprints of Large Armor Rigs.`,
},
1203: &MarketGroup{
ID: 1203,
ParentID: 944,
Name: `Small Armor Rigs`,
Description: `Blueprints of Small Armor Rigs.`,
},
1204: &MarketGroup{
ID: 1204,
ParentID: 944,
Name: `Medium Armor Rigs`,
Description: `Blueprints of Medium Armor Rigs.`,
},
1206: &MarketGroup{
ID: 1206,
ParentID: 956,
Name: `Small Armor Rigs`,
Description: `Modifications that affect a small starship's armor.`,
},
1207: &MarketGroup{
ID: 1207,
ParentID: 956,
Name: `Medium Armor Rigs`,
Description: `Modifications that affect a medium starship's armor.`,
},
1208: &MarketGroup{
ID: 1208,
ParentID: 956,
Name: `Large Armor Rigs`,
Description: `Modifications that affect a large starship's armor.`,
},
1210: &MarketGroup{
ID: 1210,
ParentID: 957,
Name: `Small Astronautic Rigs`,
Description: `Modifications that affect a small starship's astronautics.`,
},
1211: &MarketGroup{
ID: 1211,
ParentID: 957,
Name: `Medium Astronautic Rigs`,
Description: `Modifications that affect a medium starship's astronautics.`,
},
1212: &MarketGroup{
ID: 1212,
ParentID: 957,
Name: `Large Astronautic Rigs`,
Description: `Modifications that affect a large starship's astronautics.`,
},
1213: &MarketGroup{
ID: 1213,
ParentID: 958,
Name: `Small Drone Rigs`,
Description: `Modifications that affect a small starship's drones.`,
},
1214: &MarketGroup{
ID: 1214,
ParentID: 958,
Name: `Medium Drone Rigs`,
Description: `Modifications that affect a medium starship's drones.`,
},
1215: &MarketGroup{
ID: 1215,
ParentID: 958,
Name: `Large Drone Rigs`,
Description: `Modifications that affect a large starship's drones.`,
},
1219: &MarketGroup{
ID: 1219,
ParentID: 960,
Name: `Small Electronics Superiority Rigs`,
Description: `Modifications that affect a small starship's electronic warfare capabilities.`,
},
1220: &MarketGroup{
ID: 1220,
ParentID: 960,
Name: `Medium Electronics Superiority Rigs`,
Description: `Modifications that affect a medium starship's electronic warfare capabilities.`,
},
1221: &MarketGroup{
ID: 1221,
ParentID: 960,
Name: `Large Electronics Superiority Rigs`,
Description: `Modifications that affect a large starship's electronic warfare capabilities.`,
},
1222: &MarketGroup{
ID: 1222,
ParentID: 961,
Name: `Small Engineering Rigs`,
Description: `Modifications that affect a small starship's energy grid.`,
},
1223: &MarketGroup{
ID: 1223,
ParentID: 961,
Name: `Medium Engineering Rigs`,
Description: `Modifications that affect a medium starship's energy grid.`,
},
1224: &MarketGroup{
ID: 1224,
ParentID: 961,
Name: `Large Engineering Rigs`,
Description: `Modifications that affect a large starship's energy grid.`,
},
1225: &MarketGroup{
ID: 1225,
ParentID: 962,
Name: `Small Energy Weapon Rigs`,
Description: `Modifications that affect a small starship's energy weapons.`,
},
1226: &MarketGroup{
ID: 1226,
ParentID: 962,
Name: `Medium Energy Weapon Rigs`,
Description: `Modifications that affect a medium starship's energy weapons.`,
},
1227: &MarketGroup{
ID: 1227,
ParentID: 962,
Name: `Large Energy Weapon Rigs`,
Description: `Modifications that affect a large starship's energy weapons.`,
},
1228: &MarketGroup{
ID: 1228,
ParentID: 963,
Name: `Small Hybrid Weapon Rigs`,
Description: `Modifications that affect a small starship's hybrid weapons.`,
},
1229: &MarketGroup{
ID: 1229,
ParentID: 963,
Name: `Medium Hybrid Weapon Rigs`,
Description: `Modifications that affect a medium starship's hybrid weapons.`,
},
1230: &MarketGroup{
ID: 1230,
ParentID: 963,
Name: `Large Hybrid Weapon Rigs`,
Description: `Modifications that affect a large starship's hybrid weapons.`,
},
1231: &MarketGroup{
ID: 1231,
ParentID: 964,
Name: `Small Missile Launcher Rigs`,
Description: `Modifications that affect a small starship's missile launchers.`,
},
1232: &MarketGroup{
ID: 1232,
ParentID: 964,
Name: `Medium Missile Launcher Rigs`,
Description: `Modifications that affect a medium starship's missile launchers.`,
},
1233: &MarketGroup{
ID: 1233,
ParentID: 964,
Name: `Large Missile Launcher Rigs`,
Description: `Modifications that affect a large starship's missile launchers.`,
},
1234: &MarketGroup{
ID: 1234,
ParentID: 965,
Name: `Small Shield Rigs`,
Description: `Modifications that affect a small starship's shields.`,
},
1235: &MarketGroup{
ID: 1235,
ParentID: 965,
Name: `Medium Shield Rigs`,
Description: `Modifications that affect a medium starship's shields.`,
},
1236: &MarketGroup{
ID: 1236,
ParentID: 965,
Name: `Large Shield Rigs`,
Description: `Modifications that affect a large starship's shields.`,
},
1237: &MarketGroup{
ID: 1237,
ParentID: 979,
Name: `Small Projectile Weapon Rigs`,
Description: `Modifications that affect a small starship's projectile weapons.`,
},
1238: &MarketGroup{
ID: 1238,
ParentID: 979,
Name: `Medium Projectile Weapon Rigs`,
Description: `Modifications that affect a medium starship's projectile weapons.`,
},
1239: &MarketGroup{
ID: 1239,
ParentID: 979,
Name: `Large Projectile Weapon Rigs`,
Description: `Modifications that affect a large starship's projectile weapons.`,
},
1240: &MarketGroup{
ID: 1240,
ParentID: 945,
Name: `Small Astronautic Rigs`,
Description: `Blueprints of Small Astronautic Rigs.`,
},
1241: &MarketGroup{
ID: 1241,
ParentID: 945,
Name: `Medium Astronautic Rigs`,
Description: `Blueprints of Medium Astronautic Rigs.`,
},
1242: &MarketGroup{
ID: 1242,
ParentID: 945,
Name: `Large Astronautic Rigs`,
Description: `Blueprints of Large Astronautic Rigs.`,
},
1243: &MarketGroup{
ID: 1243,
ParentID: 946,
Name: `Small Drone Rigs`,
Description: `Blueprints of Small Drone Rigs.`,
},
1244: &MarketGroup{
ID: 1244,
ParentID: 946,
Name: `Medium Drone Rigs`,
Description: `Blueprints of Medium Drone Rigs.`,
},
1245: &MarketGroup{
ID: 1245,
ParentID: 946,
Name: `Large Drone Rigs`,
Description: `Blueprints of Large Drone Rigs.`,
},
1249: &MarketGroup{
ID: 1249,
ParentID: 948,
Name: `Small Electronics Superiority Rigs`,
Description: `Blueprints of Small Electronics Superiority Rigs.`,
},
1250: &MarketGroup{
ID: 1250,
ParentID: 948,
Name: `Medium Electronics Superiority Rigs`,
Description: `Blueprints of Medium Electronics Superiority Rigs.`,
},
1251: &MarketGroup{
ID: 1251,
ParentID: 948,
Name: `Large Electronics Superiority Rigs`,
Description: `Blueprints of Large Electronics Superiority Rigs.`,
},
1252: &MarketGroup{
ID: 1252,
ParentID: 949,
Name: `Small Engineering Rigs`,
Description: `Blueprints of Small Energy Grid Rigs.`,
},
1253: &MarketGroup{
ID: 1253,
ParentID: 949,
Name: `Medium Engineering Rigs`,
Description: `Blueprints of Medium Energy Grid Rigs.`,
},
1254: &MarketGroup{
ID: 1254,
ParentID: 949,
Name: `Large Engineering Rigs`,
Description: `Blueprints of Large Energy Grid Rigs.`,
},
1255: &MarketGroup{
ID: 1255,
ParentID: 950,
Name: `Small Energy Weapon Rigs`,
Description: `Blueprints of Small Energy Weapon Rigs.`,
},
1256: &MarketGroup{
ID: 1256,
ParentID: 950,
Name: `Medium Energy Weapon Rigs`,
Description: `Blueprints of Medium Energy Weapon Rigs.`,
},
1257: &MarketGroup{
ID: 1257,
ParentID: 950,
Name: `Large Energy Weapon Rigs`,
Description: `Blueprints of Large Energy Weapon Rigs.`,
},
1258: &MarketGroup{
ID: 1258,
ParentID: 951,
Name: `Small Hybrid Weapon Rigs`,
Description: `Blueprints of Small Hybrid Weapon Rigs.`,
},
1259: &MarketGroup{
ID: 1259,
ParentID: 951,
Name: `Medium Hybrid Weapon Rigs`,
Description: `Blueprints of Medium Hybrid Weapon Rigs.`,
},
1260: &MarketGroup{
ID: 1260,
ParentID: 951,
Name: `Large Hybrid Weapon Rigs`,
Description: `Blueprints of Large Hybrid Weapon Rigs.`,
},
1261: &MarketGroup{
ID: 1261,
ParentID: 952,
Name: `Small Missile Launcher Rigs`,
Description: `Blueprints of Small Missile Launcher Rigs.`,
},
1262: &MarketGroup{
ID: 1262,
ParentID: 952,
Name: `Medium Missile Launcher Rigs`,
Description: `Blueprints of Medium Missile Launcher Rigs.`,
},
1263: &MarketGroup{
ID: 1263,
ParentID: 952,
Name: `Large Missile Launcher Rigs`,
Description: `Blueprints of Large Missile Launcher Rigs.`,
},
1264: &MarketGroup{
ID: 1264,
ParentID: 953,
Name: `Small Projectile Weapon Rigs`,
Description: `Blueprints of Small Projectile Weapon Rigs.`,
},
1265: &MarketGroup{
ID: 1265,
ParentID: 953,
Name: `Medium Projectile Weapon Rigs`,
Description: `Blueprints of Medium Projectile Weapon Rigs.`,
},
1266: &MarketGroup{
ID: 1266,
ParentID: 953,
Name: `Large Projectile Weapon Rigs`,
Description: `Blueprints of Large Projectile Weapon Rigs.`,
},
1267: &MarketGroup{
ID: 1267,
ParentID: 954,
Name: `Small Shield Rigs`,
Description: `Blueprints of Small Shield Rigs.`,
},
1268: &MarketGroup{
ID: 1268,
ParentID: 954,
Name: `Medium Shield Rigs`,
Description: `Blueprints of Medium Shield Rigs.`,
},
1269: &MarketGroup{
ID: 1269,
ParentID: 954,
Name: `Large Shield Rigs`,
Description: `Blueprints of Large Shield Rigs.`,
},
1272: &MarketGroup{
ID: 1272,
ParentID: 477,
Name: `Sovereignty Structures`,
Description: `Structures pertaining to the control and exploitation of solar systems`,
},
1273: &MarketGroup{
ID: 1273,
ParentID: 1272,
Name: `Territorial Claim Units`,
Description: `Starbase units for claiming territory.`,
},
1274: &MarketGroup{
ID: 1274,
ParentID: 1272,
Name: `Sovereignty Blockade Units`,
Description: `Starbase units for blockading sovereignty.`,
},
1275: &MarketGroup{
ID: 1275,
ParentID: 1272,
Name: `Infrastructure Hubs`,
Description: `Starbase units for building infrastructure.`,
},
1276: &MarketGroup{
ID: 1276,
ParentID: 1272,
Name: `Infrastructure Upgrades`,
Description: `Upgrades which are installed into the Infrastructure Hub.`,
},
1282: &MarketGroup{
ID: 1282,
ParentID: 1276,
Name: `Strategic Upgrades`,
Description: `Strategic upgrades to sovereignty structures.`,
},
1283: &MarketGroup{
ID: 1283,
ParentID: 1276,
Name: `Industrial Upgrades`,
Description: `Industrial upgrades to sovereignty structures.`,
},
1284: &MarketGroup{
ID: 1284,
ParentID: 1276,
Name: `Military Upgrades`,
Description: `Military upgrades to sovereignty structures.`,
},
1285: &MarketGroup{
ID: 1285,
ParentID: 477,
Name: `Starbase Structures`,
Description: `Structures used in constructing capsuleer-run starbases`,
},
1286: &MarketGroup{
ID: 1286,
ParentID: 314,
Name: `XL Cruise Missiles`,
Description: `Blueprints of XL Cruise Missiles.`,
},
1287: &MarketGroup{
ID: 1287,
ParentID: 1316,
Name: `Standard XL Cruise Missiles`,
Description: `Standard XL cruise missile designs.`,
},
1310: &MarketGroup{
ID: 1310,
ParentID: 2410,
Name: `Heavy Fighters`,
Description: `Piloted combat vessels, deployable from Carriers and Supercarriers.`,
},
1313: &MarketGroup{
ID: 1313,
ParentID: 2237,
Name: `Heavy Fighters`,
Description: `Blueprints of heavy fighter designs.`,
},
1316: &MarketGroup{
ID: 1316,
ParentID: 114,
Name: `XL Cruise Missiles`,
Description: `XL cruise missile designs.`,
},
1317: &MarketGroup{
ID: 1317,
ParentID: 1316,
Name: `Faction XL Cruise Missiles`,
Description: `Faction XL cruise missile designs.`,
},
1320: &MarketGroup{
ID: 1320,
ParentID: 0,
Name: `Planetary Infrastructure`,
Description: `Capsuleers can exploit the planetary resources of New Eden by establishing command centers on the ground and orbital infrastructure in space`,
},
1322: &MarketGroup{
ID: 1322,
ParentID: 1320,
Name: `Command Centers`,
Description: `A Command Center is required to extract resources from a planet`,
},
1323: &MarketGroup{
ID: 1323,
ParentID: 150,
Name: `Resource Processing`,
Description: `Skills pertaining to efficiently extracting raw materials and refining them`,
},
1332: &MarketGroup{
ID: 1332,
ParentID: 533,
Name: `Planetary Materials`,
Description: `Materials sourced from planets`,
},
1333: &MarketGroup{
ID: 1333,
ParentID: 1332,
Name: `Raw Planetary Materials`,
Description: `The first tier of planetary materials.`,
},
1334: &MarketGroup{
ID: 1334,
ParentID: 1332,
Name: `Processed Planetary Materials`,
Description: `The second tier of planetary materials.`,
},
1335: &MarketGroup{
ID: 1335,
ParentID: 1332,
Name: `Refined Planetary Materials`,
Description: `The third tier of planetary materials.`,
},
1336: &MarketGroup{
ID: 1336,
ParentID: 1332,
Name: `Specialized Planetary Materials`,
Description: `The fourth tier of planetary materials.`,
},
1337: &MarketGroup{
ID: 1337,
ParentID: 1332,
Name: `Advanced Planetary Materials`,
Description: `The firth and final tier of planetary materials.`,
},
1338: &MarketGroup{
ID: 1338,
ParentID: 2,
Name: `Structures`,
Description: `Blueprints of structures in space`,
},
1339: &MarketGroup{
ID: 1339,
ParentID: 1841,
Name: `Control Towers`,
Description: `Blueprints of Control Towers.`,
},
1340: &MarketGroup{
ID: 1340,
ParentID: 1841,
Name: `Assembly Arrays`,
Description: `Blueprints of Assembly Arrays.`,
},
1341: &MarketGroup{
ID: 1341,
ParentID: 1841,
Name: `Corporate Hangar Arrays`,
Description: `Blueprints of Corporate Hangar Arrays.`,
},
1342: &MarketGroup{
ID: 1342,
ParentID: 1841,
Name: `Cynosural Generator Arrays`,
Description: `Blueprints of Cynosural Generator Arrays.`,
},
1343: &MarketGroup{
ID: 1343,
ParentID: 1841,
Name: `Cynosural System Jammers`,
Description: `Blueprints of Cynosural System Jammers.`,
},
1344: &MarketGroup{
ID: 1344,
ParentID: 1534,
Name: `Electronic Warfare Batteries`,
Description: `Blueprints of Electronic Warfare Batteries.`,
},
1345: &MarketGroup{
ID: 1345,
ParentID: 1534,
Name: `Energy Neutralization Batteries`,
Description: `Blueprints of Energy Neutralization Batteries.`,
},
1346: &MarketGroup{
ID: 1346,
ParentID: 1841,
Name: `Jump Bridge`,
Description: `Blueprints of Jump Bridge.`,
},
1347: &MarketGroup{
ID: 1347,
ParentID: 1534,
Name: `Hybrid Batteries`,
Description: `Blueprints of Hybrid Batteries.`,
},
1348: &MarketGroup{
ID: 1348,
ParentID: 1534,
Name: `Laser Batteries`,
Description: `Blueprints of Laser Batteries.`,
},
1349: &MarketGroup{
ID: 1349,
ParentID: 1534,
Name: `Missile Batteries`,
Description: `Blueprints of Missile Batteries.`,
},
1350: &MarketGroup{
ID: 1350,
ParentID: 1534,
Name: `Projectile Batteries`,
Description: `Blueprints of Projectile Batteries.`,
},
1351: &MarketGroup{
ID: 1351,
ParentID: 1841,
Name: `Reactors`,
Description: `Blueprints of Reactors.`,
},
1352: &MarketGroup{
ID: 1352,
ParentID: 1841,
Name: `Moon Harvesting Arrays`,
Description: `Blueprints of Moon Harvesting Arrays.`,
},
1353: &MarketGroup{
ID: 1353,
ParentID: 1841,
Name: `Reprocessing Arrays`,
Description: `Blueprints of Reprocessing Arrays.`,
},
1354: &MarketGroup{
ID: 1354,
ParentID: 1841,
Name: `Shield Hardening Arrays`,
Description: `Blueprints of Shield Hardening Arrays.`,
},
1355: &MarketGroup{
ID: 1355,
ParentID: 1841,
Name: `Silos`,
Description: `Blueprints of Silos.`,
},
1356: &MarketGroup{
ID: 1356,
ParentID: 2015,
Name: `Sovereignty Structures`,
Description: `Blueprints of Sovereignty Structures.`,
},
1358: &MarketGroup{
ID: 1358,
ParentID: 211,
Name: `Nanite Repair Paste`,
Description: `Blueprints of Nanite Repair Paste.`,
},
1359: &MarketGroup{
ID: 1359,
ParentID: 1841,
Name: `Laboratories`,
Description: `Blueprints of Laboratories.`,
},
1361: &MarketGroup{
ID: 1361,
ParentID: 4,
Name: `Frigates`,
Description: `Small spaceships that excel in any role involving speed and agility`,
},
1362: &MarketGroup{
ID: 1362,
ParentID: 1361,
Name: `Faction Frigates`,
Description: `Frigates designed by specific factions.`,
},
1364: &MarketGroup{
ID: 1364,
ParentID: 1361,
Name: `Advanced Frigates`,
Description: `Frigate variants intended for specialized purposes.`,
},
1365: &MarketGroup{
ID: 1365,
ParentID: 1362,
Name: `Pirate Faction`,
Description: `Non-Empire faction frigate designs.`,
},
1366: &MarketGroup{
ID: 1366,
ParentID: 1362,
Name: `Navy Faction`,
Description: `Navy faction frigate designs.`,
},
1367: &MarketGroup{
ID: 1367,
ParentID: 4,
Name: `Cruisers`,
Description: `Medium-sized spaceships intended for a variety of roles`,
},
1368: &MarketGroup{
ID: 1368,
ParentID: 1367,
Name: `Advanced Cruisers`,
Description: `Cruiser variants intended for specialized purposes.`,
},
1369: &MarketGroup{
ID: 1369,
ParentID: 1367,
Name: `Faction Cruisers`,
Description: `Cruisers designed by specific factions.`,
},
1370: &MarketGroup{
ID: 1370,
ParentID: 1369,
Name: `Navy Faction`,
Description: `Navy faction cruiser designs.`,
},
1371: &MarketGroup{
ID: 1371,
ParentID: 1369,
Name: `Pirate Faction`,
Description: `Pirate faction cruiser designs.`,
},
1372: &MarketGroup{
ID: 1372,
ParentID: 4,
Name: `Destroyers`,
Description: `Heavily armed spaceships that sacrifice durability for extra firepower`,
},
1373: &MarketGroup{
ID: 1373,
ParentID: 1372,
Name: `Advanced Destroyers`,
Description: `These destroyers have additional attributes to help them specialize in specific roles.`,
},
1374: &MarketGroup{
ID: 1374,
ParentID: 4,
Name: `Battlecruisers`,
Description: `Medium-sized spaceships that are more powerful than normal cruisers`,
},
1375: &MarketGroup{
ID: 1375,
ParentID: 1374,
Name: `Advanced Battlecruisers`,
Description: `Advanced battlecruisers make use of advanced technology for maximum effectiveness in a certain role.`,
},
1376: &MarketGroup{
ID: 1376,
ParentID: 4,
Name: `Battleships`,
Description: `Large-sized spaceships with enough offensive and defensive capabilities to tackle all but the biggest problems`,
},
1377: &MarketGroup{
ID: 1377,
ParentID: 1376,
Name: `Advanced Battleships`,
Description: `By focusing the design down to a single purpose, these battleships excel at their designated tasks like no other starship can.`,
},
1378: &MarketGroup{
ID: 1378,
ParentID: 1376,
Name: `Faction Battleships`,
Description: `Battleships designed by specific factions.`,
},
1379: &MarketGroup{
ID: 1379,
ParentID: 1378,
Name: `Navy Faction`,
Description: `Navy faction battleship designs.`,
},
1380: &MarketGroup{
ID: 1380,
ParentID: 1378,
Name: `Pirate Faction`,
Description: `Pirate faction battleship designs.`,
},
1381: &MarketGroup{
ID: 1381,
ParentID: 4,
Name: `Capital Ships`,
Description: `Huge spaceships capable of dominating battlefields or industry sectors`,
},
1382: &MarketGroup{
ID: 1382,
ParentID: 4,
Name: `Industrial Ships`,
Description: `Large transport ships that form the backbone of the economy`,
},
1384: &MarketGroup{
ID: 1384,
ParentID: 4,
Name: `Mining Barges`,
Description: `Ships specially designed to harvest resources in space`,
},
1385: &MarketGroup{
ID: 1385,
ParentID: 1382,
Name: `Advanced Industrial Ships`,
Description: `Advanced industrial ships are specially modified for specific purposes.`,
},
1389: &MarketGroup{
ID: 1389,
ParentID: 208,
Name: `ORE`,
Description: `Blueprints of ORE industrial designs.`,
},
1390: &MarketGroup{
ID: 1390,
ParentID: 8,
Name: `ORE`,
Description: `ORE industrial designs.`,
},
1392: &MarketGroup{
ID: 1392,
ParentID: 817,
Name: `Faction Carrier`,
Description: `Faction carrier designs.`,
},
1396: &MarketGroup{
ID: 1396,
ParentID: 0,
Name: `Apparel`,
Description: `Capsuleers can choose from a variety of exclusive clothing and accessories`,
},
1397: &MarketGroup{
ID: 1397,
ParentID: 1396,
Name: `Men's Clothing`,
Description: `Clothing for a masculine physique`,
},
1398: &MarketGroup{
ID: 1398,
ParentID: 1397,
Name: `Tops`,
Description: `Clothing worn on the torso.`,
},
1399: &MarketGroup{
ID: 1399,
ParentID: 1397,
Name: `Outerwear`,
Description: `Clothing worn on the outside of other clothing.`,
},
1400: &MarketGroup{
ID: 1400,
ParentID: 1397,
Name: `Footwear`,
Description: `Garments worn on the feet, for fashion, protection against the environment, and adornment.`,
},
1401: &MarketGroup{
ID: 1401,
ParentID: 1397,
Name: `Bottoms`,
Description: `Any of various garments worn below the waist. `,
},
1402: &MarketGroup{
ID: 1402,
ParentID: 1396,
Name: `Women's Clothing`,
Description: `Clothing for a feminine physique`,
},
1403: &MarketGroup{
ID: 1403,
ParentID: 1402,
Name: `Bottoms`,
Description: `Any of various garments worn below the waist.`,
},
1404: &MarketGroup{
ID: 1404,
ParentID: 1402,
Name: `Footwear`,
Description: `Garments worn on the feet, for fashion, protection against the environment, and adornment.`,
},
1405: &MarketGroup{
ID: 1405,
ParentID: 1402,
Name: `Outerwear`,
Description: `Clothing worn on the outside of other clothing.`,
},
1406: &MarketGroup{
ID: 1406,
ParentID: 1402,
Name: `Tops`,
Description: `Clothing worn on the torso.`,
},
1407: &MarketGroup{
ID: 1407,
ParentID: 1396,
Name: `Accessories`,
Description: `Apparel items that supplement an outfit by adding style and class`,
},
1408: &MarketGroup{
ID: 1408,
ParentID: 1407,
Name: `Eyewear`,
Description: `Eyewear is almost exclusively used for vanity purposes given advances in modern cyberware that renders externally mounted hardware obsolete. `,
},
1410: &MarketGroup{
ID: 1410,
ParentID: 1320,
Name: `Orbital Infrastructure`,
Description: `Gives capsuleers stronger control over a planet's resources`,
},
1416: &MarketGroup{
ID: 1416,
ParentID: 535,
Name: `Reactive Armor Hardeners`,
Description: `Armor Hardeners that shift resistance based on damaged received.`,
},
1426: &MarketGroup{
ID: 1426,
ParentID: 657,
Name: `Target Breaker`,
Description: `Clears all locks on you and all your locks.`,
},
1427: &MarketGroup{
ID: 1427,
ParentID: 19,
Name: `Aurum Tokens`,
Description: `An Aurum Token is a physical chit that can be redeemed for a predefined amount of AUR currency`,
},
1469: &MarketGroup{
ID: 1469,
ParentID: 531,
Name: `Armor Implants`,
Description: `Armor Implants`,
},
1470: &MarketGroup{
ID: 1470,
ParentID: 531,
Name: `Electronic Systems Implants`,
Description: `Electronic Systems Implants`,
},
1471: &MarketGroup{
ID: 1471,
ParentID: 531,
Name: `Engineering Implants`,
Description: `Engineering Implants`,
},
1472: &MarketGroup{
ID: 1472,
ParentID: 531,
Name: `Faction Omega Implants`,
Description: `Faction Omega Implants`,
},
1473: &MarketGroup{
ID: 1473,
ParentID: 531,
Name: `Gunnery Implants`,
Description: `Gunnery Implants`,
},
1474: &MarketGroup{
ID: 1474,
ParentID: 531,
Name: `Industry Implants`,
Description: `Industry Implants`,
},
1475: &MarketGroup{
ID: 1475,
ParentID: 531,
Name: `Fleet Support Implants`,
Description: `Fleet Support Implants`,
},
1476: &MarketGroup{
ID: 1476,
ParentID: 531,
Name: `Missile Implants`,
Description: `Missile Implants`,
},
1477: &MarketGroup{
ID: 1477,
ParentID: 531,
Name: `Navigation Implants`,
Description: `Navigation Implants`,
},
1478: &MarketGroup{
ID: 1478,
ParentID: 531,
Name: `Science Implants`,
Description: `Science Implants`,
},
1479: &MarketGroup{
ID: 1479,
ParentID: 531,
Name: `Shield Implants`,
Description: `Shield Implants`,
},
1480: &MarketGroup{
ID: 1480,
ParentID: 1479,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1481: &MarketGroup{
ID: 1481,
ParentID: 1479,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1482: &MarketGroup{
ID: 1482,
ParentID: 1479,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1483: &MarketGroup{
ID: 1483,
ParentID: 1479,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1484: &MarketGroup{
ID: 1484,
ParentID: 1478,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1485: &MarketGroup{
ID: 1485,
ParentID: 1478,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1486: &MarketGroup{
ID: 1486,
ParentID: 1478,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1489: &MarketGroup{
ID: 1489,
ParentID: 1477,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1490: &MarketGroup{
ID: 1490,
ParentID: 1477,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1491: &MarketGroup{
ID: 1491,
ParentID: 1477,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1492: &MarketGroup{
ID: 1492,
ParentID: 1477,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1493: &MarketGroup{
ID: 1493,
ParentID: 1476,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1494: &MarketGroup{
ID: 1494,
ParentID: 1476,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1495: &MarketGroup{
ID: 1495,
ParentID: 1476,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1496: &MarketGroup{
ID: 1496,
ParentID: 1476,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1497: &MarketGroup{
ID: 1497,
ParentID: 1476,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1498: &MarketGroup{
ID: 1498,
ParentID: 1473,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1499: &MarketGroup{
ID: 1499,
ParentID: 1473,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1500: &MarketGroup{
ID: 1500,
ParentID: 1473,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1501: &MarketGroup{
ID: 1501,
ParentID: 1473,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1502: &MarketGroup{
ID: 1502,
ParentID: 1473,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1504: &MarketGroup{
ID: 1504,
ParentID: 1474,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1505: &MarketGroup{
ID: 1505,
ParentID: 1475,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1506: &MarketGroup{
ID: 1506,
ParentID: 1472,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1507: &MarketGroup{
ID: 1507,
ParentID: 1471,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1508: &MarketGroup{
ID: 1508,
ParentID: 1471,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1509: &MarketGroup{
ID: 1509,
ParentID: 1471,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1512: &MarketGroup{
ID: 1512,
ParentID: 1470,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1513: &MarketGroup{
ID: 1513,
ParentID: 1470,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1514: &MarketGroup{
ID: 1514,
ParentID: 1469,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1515: &MarketGroup{
ID: 1515,
ParentID: 1469,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1516: &MarketGroup{
ID: 1516,
ParentID: 1469,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1517: &MarketGroup{
ID: 1517,
ParentID: 1469,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1518: &MarketGroup{
ID: 1518,
ParentID: 1469,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1520: &MarketGroup{
ID: 1520,
ParentID: 211,
Name: `Probes`,
Description: `Probes`,
},
1521: &MarketGroup{
ID: 1521,
ParentID: 1520,
Name: `Survey Probes`,
Description: `Survey Probes`,
},
1522: &MarketGroup{
ID: 1522,
ParentID: 1041,
Name: `Materials`,
Description: `Materials`,
},
1525: &MarketGroup{
ID: 1525,
ParentID: 252,
Name: `Afterburners`,
Description: `Blueprints of afterburners.`,
},
1526: &MarketGroup{
ID: 1526,
ParentID: 314,
Name: `Cruise Missiles`,
Description: `Cruise Missiles`,
},
1527: &MarketGroup{
ID: 1527,
ParentID: 314,
Name: `Heavy Missiles`,
Description: `Heavy Missiles`,
},
1528: &MarketGroup{
ID: 1528,
ParentID: 314,
Name: `Light Missiles`,
Description: ``,
},
1529: &MarketGroup{
ID: 1529,
ParentID: 1520,
Name: `Interdiction Probes`,
Description: `Interdiction Probes`,
},
1530: &MarketGroup{
ID: 1530,
ParentID: 357,
Name: `Combat Drones`,
Description: `Combat Drones`,
},
1531: &MarketGroup{
ID: 1531,
ParentID: 1530,
Name: `Light Scout Drones`,
Description: `Blueprints of light scout drone designs.`,
},
1532: &MarketGroup{
ID: 1532,
ParentID: 1530,
Name: `Medium Scout Drones`,
Description: `Blueprints of medium scout drone designs.`,
},
1533: &MarketGroup{
ID: 1533,
ParentID: 1530,
Name: `Sentry Drones`,
Description: `Blueprints of sentry drone designs.`,
},
1534: &MarketGroup{
ID: 1534,
ParentID: 1841,
Name: `Weapon Batteries`,
Description: `Weapon Batteries`,
},
1535: &MarketGroup{
ID: 1535,
ParentID: 1841,
Name: `Ship Maintenance Arrays`,
Description: `Ship Maintenance Arrays`,
},
1536: &MarketGroup{
ID: 1536,
ParentID: 214,
Name: `Armor Repairers`,
Description: `Blueprints for Armor Repairers`,
},
1537: &MarketGroup{
ID: 1537,
ParentID: 214,
Name: `Hull Repairers`,
Description: `Blueprints for Hull Repairers`,
},
1538: &MarketGroup{
ID: 1538,
ParentID: 214,
Name: `Remote Hull Repairers`,
Description: `Blueprints for Remote Hull Repairers`,
},
1539: &MarketGroup{
ID: 1539,
ParentID: 214,
Name: `Remote Armor Repairers`,
Description: `Blueprints for Remote Armor Repairers`,
},
1540: &MarketGroup{
ID: 1540,
ParentID: 214,
Name: `Armor Hardeners`,
Description: `Blueprints for Armor Hardeners`,
},
1541: &MarketGroup{
ID: 1541,
ParentID: 214,
Name: `Armor Plates`,
Description: `Blueprints for Armor Plates`,
},
1542: &MarketGroup{
ID: 1542,
ParentID: 214,
Name: `Damage Controls`,
Description: `Blueprints for Damage Controls`,
},
1543: &MarketGroup{
ID: 1543,
ParentID: 214,
Name: `Energized Armor Membranes`,
Description: `Blueprints for Energized Armor Membranes`,
},
1544: &MarketGroup{
ID: 1544,
ParentID: 214,
Name: `Armor Coatings`,
Description: `Blueprints for Armor Coatings`,
},
1545: &MarketGroup{
ID: 1545,
ParentID: 209,
Name: `Shield`,
Description: `Shield`,
},
1546: &MarketGroup{
ID: 1546,
ParentID: 209,
Name: `Engineering Equipment`,
Description: `Engineering Equipment`,
},
1547: &MarketGroup{
ID: 1547,
ParentID: 1545,
Name: `Shield Flux Coils`,
Description: `Shield Flux Coils`,
},
1548: &MarketGroup{
ID: 1548,
ParentID: 1545,
Name: `Shield Hardeners`,
Description: `Shield Hardeners`,
},
1549: &MarketGroup{
ID: 1549,
ParentID: 1545,
Name: `Shield Extenders`,
Description: `Shield Extenders`,
},
1550: &MarketGroup{
ID: 1550,
ParentID: 1545,
Name: `Shield Power Relays`,
Description: `Shield Power Relays`,
},
1551: &MarketGroup{
ID: 1551,
ParentID: 1545,
Name: `Shield Rechargers`,
Description: `Shield Rechargers`,
},
1552: &MarketGroup{
ID: 1552,
ParentID: 1545,
Name: `Shield Boosters`,
Description: `Shield Boosters`,
},
1553: &MarketGroup{
ID: 1553,
ParentID: 1545,
Name: `Remote Shield Boosters`,
Description: `Remote Shield Boosters`,
},
1554: &MarketGroup{
ID: 1554,
ParentID: 1545,
Name: `Shield Resistance Amplifiers`,
Description: `Shield Resistance Amplifiers`,
},
1555: &MarketGroup{
ID: 1555,
ParentID: 1546,
Name: `Capacitor Rechargers`,
Description: `Capacitor Rechargers`,
},
1556: &MarketGroup{
ID: 1556,
ParentID: 1546,
Name: `Capacitor Flux Coils`,
Description: `Capacitor Flux Coils`,
},
1557: &MarketGroup{
ID: 1557,
ParentID: 1546,
Name: `Capacitor Power Relays`,
Description: `Capacitor Power Relays`,
},
1558: &MarketGroup{
ID: 1558,
ParentID: 1546,
Name: `Capacitor Batteries`,
Description: `Capacitor Batteries`,
},
1559: &MarketGroup{
ID: 1559,
ParentID: 1546,
Name: `Auxiliary Power Controls`,
Description: `Auxiliary Power Controls`,
},
1560: &MarketGroup{
ID: 1560,
ParentID: 1546,
Name: `Power Diagnostic Systems`,
Description: `Power Diagnostic Systems`,
},
1561: &MarketGroup{
ID: 1561,
ParentID: 1546,
Name: `Reactor Control Units`,
Description: `Reactor Control Units`,
},
1562: &MarketGroup{
ID: 1562,
ParentID: 1546,
Name: `Remote Capacitor Transmitters`,
Description: `Remote Capacitor Transmitters`,
},
1563: &MarketGroup{
ID: 1563,
ParentID: 1546,
Name: `Capacitor Boosters`,
Description: `Capacitor Boosters`,
},
1564: &MarketGroup{
ID: 1564,
ParentID: 1546,
Name: `Energy Nosferatu`,
Description: `Energy Nosferatu`,
},
1565: &MarketGroup{
ID: 1565,
ParentID: 1546,
Name: `Energy Neutralizers`,
Description: `Energy Neutralizers`,
},
1566: &MarketGroup{
ID: 1566,
ParentID: 209,
Name: `Electronic Warfare`,
Description: `Electronic Warfare`,
},
1567: &MarketGroup{
ID: 1567,
ParentID: 1566,
Name: `Electronic Counter Measures`,
Description: `Electronic Counter Measures`,
},
1568: &MarketGroup{
ID: 1568,
ParentID: 1566,
Name: `ECM Burst`,
Description: `ECM Burst`,
},
1570: &MarketGroup{
ID: 1570,
ParentID: 1566,
Name: `Stasis Webifiers`,
Description: `Stasis Webifiers`,
},
1571: &MarketGroup{
ID: 1571,
ParentID: 1566,
Name: `Target Painters`,
Description: `Target Painters`,
},
1572: &MarketGroup{
ID: 1572,
ParentID: 1566,
Name: `Warp Disruption Field Generators`,
Description: `Warp Disruption Field Generators`,
},
1574: &MarketGroup{
ID: 1574,
ParentID: 1566,
Name: `Weapon Disruptors`,
Description: `Weapon Disruptors`,
},
1575: &MarketGroup{
ID: 1575,
ParentID: 1566,
Name: `ECCM`,
Description: `ECCM`,
},
1576: &MarketGroup{
ID: 1576,
ParentID: 1566,
Name: `Remote Sensor Dampeners`,
Description: `Remote Sensor Dampeners`,
},
1577: &MarketGroup{
ID: 1577,
ParentID: 1566,
Name: `Projected ECCM`,
Description: `Projected ECCM`,
},
1578: &MarketGroup{
ID: 1578,
ParentID: 209,
Name: `Electronics and Sensor Upgrades`,
Description: `Electronics and Sensor Upgrades`,
},
1579: &MarketGroup{
ID: 1579,
ParentID: 1578,
Name: `Automated Targeting Systems`,
Description: `Automated Targeting Systems`,
},
1580: &MarketGroup{
ID: 1580,
ParentID: 1578,
Name: `Remote Sensor Boosters`,
Description: `Remote Sensor Boosters`,
},
1581: &MarketGroup{
ID: 1581,
ParentID: 1578,
Name: `Sensor Boosters`,
Description: `Sensor Boosters`,
},
1582: &MarketGroup{
ID: 1582,
ParentID: 1578,
Name: `Passive Targeting Systems`,
Description: `Passive Targeting Systems`,
},
1583: &MarketGroup{
ID: 1583,
ParentID: 1566,
Name: `Sensor Backup Arrays`,
Description: `Sensor Backup Arrays`,
},
1584: &MarketGroup{
ID: 1584,
ParentID: 1578,
Name: `CPU Upgrades`,
Description: `CPU Upgrades`,
},
1585: &MarketGroup{
ID: 1585,
ParentID: 1578,
Name: `Signal Amplifiers`,
Description: `Signal Amplifiers`,
},
1586: &MarketGroup{
ID: 1586,
ParentID: 357,
Name: `Combat Utility Drones`,
Description: `Combat Utility Drones`,
},
1587: &MarketGroup{
ID: 1587,
ParentID: 1097,
Name: `Amarr`,
Description: `Amarr`,
},
1588: &MarketGroup{
ID: 1588,
ParentID: 1097,
Name: `Caldari`,
Description: `Caldari`,
},
1589: &MarketGroup{
ID: 1589,
ParentID: 1097,
Name: `Gallente`,
Description: `Gallente`,
},
1590: &MarketGroup{
ID: 1590,
ParentID: 1097,
Name: `Minmatar`,
Description: `Minmatar`,
},
1591: &MarketGroup{
ID: 1591,
ParentID: 800,
Name: `<NAME>`,
Description: `Advanced Components`,
},
1592: &MarketGroup{
ID: 1592,
ParentID: 1591,
Name: `Amarr`,
Description: `Amarr`,
},
1593: &MarketGroup{
ID: 1593,
ParentID: 1591,
Name: `Caldari`,
Description: `Caldari`,
},
1594: &MarketGroup{
ID: 1594,
ParentID: 1591,
Name: `Gallente`,
Description: `Gallente`,
},
1595: &MarketGroup{
ID: 1595,
ParentID: 1591,
Name: `Minmatar`,
Description: `Minmatar`,
},
1598: &MarketGroup{
ID: 1598,
ParentID: 99,
Name: `Orbital Strike`,
Description: ``,
},
1599: &MarketGroup{
ID: 1599,
ParentID: 101,
Name: `Orbital Strike`,
Description: ``,
},
1600: &MarketGroup{
ID: 1600,
ParentID: 100,
Name: `Orbital Strike`,
Description: ``,
},
1601: &MarketGroup{
ID: 1601,
ParentID: 300,
Name: `Orbital Strike`,
Description: ``,
},
1602: &MarketGroup{
ID: 1602,
ParentID: 301,
Name: `Orbital Strike`,
Description: ``,
},
1603: &MarketGroup{
ID: 1603,
ParentID: 299,
Name: `Orbital Strike`,
Description: ``,
},
1610: &MarketGroup{
ID: 1610,
ParentID: 1112,
Name: `Amarr Subsystems`,
Description: `Amarr Subsystems`,
},
1612: &MarketGroup{
ID: 1612,
ParentID: 4,
Name: `Special Edition Ships`,
Description: `Rare and visually distinct ships of interest to collectors`,
},
1614: &MarketGroup{
ID: 1614,
ParentID: 1612,
Name: `Special Edition Industrial Ships`,
Description: `Industrial ships which have been offered to capsuleers on occasion for limited periods.`,
},
1616: &MarketGroup{
ID: 1616,
ParentID: 5,
Name: `ORE`,
Description: `ORE frigate designs.`,
},
1617: &MarketGroup{
ID: 1617,
ParentID: 205,
Name: `ORE`,
Description: `Blueprints of ORE frigate designs.`,
},
1618: &MarketGroup{
ID: 1618,
ParentID: 1612,
Name: `Special Edition Shuttles`,
Description: `Shuttles which have been offered to capsuleers on occasion for limited periods.`,
},
1619: &MarketGroup{
ID: 1619,
ParentID: 1612,
Name: `Special Edition Frigates`,
Description: `Frigates which have been offered to capsuleers on occasion for limited periods.`,
},
1620: &MarketGroup{
ID: 1620,
ParentID: 1612,
Name: `Special Edition Battleships`,
Description: `Battleships which have been offered to capsuleers on occasion for limited periods.`,
},
1621: &MarketGroup{
ID: 1621,
ParentID: 1612,
Name: `Special Edition Heavy Assault Cruisers`,
Description: `Heavy Assault Cruisers which have been offered to capsuleers on occasion for limited periods.`,
},
1623: &MarketGroup{
ID: 1623,
ParentID: 1612,
Name: `Special Edition Assault Frigates`,
Description: `Assault Frigates which have been offered to capsuleers on occasion for limited periods.`,
},
1624: &MarketGroup{
ID: 1624,
ParentID: 1612,
Name: `Special Edition Logistics`,
Description: `Logistics Ships which have been offered to capsuleers on occasion for limited periods.`,
},
1625: &MarketGroup{
ID: 1625,
ParentID: 1112,
Name: `Caldari Subsystems`,
Description: `Caldari Subsystems`,
},
1626: &MarketGroup{
ID: 1626,
ParentID: 1112,
Name: `Minmatar Subsystems`,
Description: `Minmatar Subsystems`,
},
1627: &MarketGroup{
ID: 1627,
ParentID: 1112,
Name: `Gallente Subsystems`,
Description: `Gallente Subsystems`,
},
1631: &MarketGroup{
ID: 1631,
ParentID: 391,
Name: `Faction Shuttles`,
Description: `Faction Shuttles`,
},
1633: &MarketGroup{
ID: 1633,
ParentID: 779,
Name: `Command Bursts`,
Description: `Command Bursts`,
},
1639: &MarketGroup{
ID: 1639,
ParentID: 779,
Name: `Command Processors`,
Description: `Command Processors`,
},
1640: &MarketGroup{
ID: 1640,
ParentID: 779,
Name: `Jump Portal Generators`,
Description: `Jump Portal Generators`,
},
1641: &MarketGroup{
ID: 1641,
ParentID: 779,
Name: `Cynosural Field Generators`,
Description: `Cynosural Field Generators`,
},
1642: &MarketGroup{
ID: 1642,
ParentID: 779,
Name: `Clone Vat Bays`,
Description: `Clone Vat Bays`,
},
1643: &MarketGroup{
ID: 1643,
ParentID: 357,
Name: `Salvage Drones`,
Description: `Blueprints of salvage drone designs.`,
},
1646: &MarketGroup{
ID: 1646,
ParentID: 157,
Name: `Salvage Drones`,
Description: `Drones capable of salvaging wrecks for valuables`,
},
1650: &MarketGroup{
ID: 1650,
ParentID: 52,
Name: `Micro Jump Drives`,
Description: `Advanced propulsion technology that allows for a jump over a very short distance.`,
},
1651: &MarketGroup{
ID: 1651,
ParentID: 379,
Name: `Secure Containers`,
Description: `Secure Containers`,
},
1652: &MarketGroup{
ID: 1652,
ParentID: 379,
Name: `Audit Log Containers`,
Description: `Audit Log Containers`,
},
1653: &MarketGroup{
ID: 1653,
ParentID: 379,
Name: `Freight Containers`,
Description: `Freight Containers`,
},
1657: &MarketGroup{
ID: 1657,
ParentID: 379,
Name: `Standard Containers`,
Description: `Standard Containers`,
},
1658: &MarketGroup{
ID: 1658,
ParentID: 379,
Name: `Station Containers`,
Description: `Station Containers`,
},
1659: &MarketGroup{
ID: 1659,
ParentID: 0,
Name: `Special Edition Assets`,
Description: `Special Edition Assets are typically limited run items associated with past events in New Eden, such as the Alliance Tournament`,
},
1660: &MarketGroup{
ID: 1660,
ParentID: 1659,
Name: `Special Edition Tournament Cards`,
Description: `Special Edition Tournament Cards`,
},
1661: &MarketGroup{
ID: 1661,
ParentID: 1659,
Name: `Special Edition Commodities`,
Description: `Special Edition Commodities`,
},
1662: &MarketGroup{
ID: 1662,
ParentID: 1659,
Name: `Special Edition Apparel`,
Description: `Special Edition Apparel`,
},
1663: &MarketGroup{
ID: 1663,
ParentID: 1659,
Name: `Special Edition Festival Assets`,
Description: `Special Edition Festival Assets`,
},
1664: &MarketGroup{
ID: 1664,
ParentID: 1659,
Name: `Special Edition Blueprints`,
Description: `Special Edition Blueprints`,
},
1665: &MarketGroup{
ID: 1665,
ParentID: 540,
Name: `Thermal Coatings`,
Description: `Thermal Coatings`,
},
1666: &MarketGroup{
ID: 1666,
ParentID: 540,
Name: `Kinetic Coatings`,
Description: `Kinetic Coatings`,
},
1667: &MarketGroup{
ID: 1667,
ParentID: 540,
Name: `Explosive Coatings`,
Description: `Explosive Coatings`,
},
1668: &MarketGroup{
ID: 1668,
ParentID: 540,
Name: `EM Coatings`,
Description: `EM Coatings`,
},
1669: &MarketGroup{
ID: 1669,
ParentID: 14,
Name: `Layered Armor Coatings`,
Description: `Layered Armor Coatings`,
},
1670: &MarketGroup{
ID: 1670,
ParentID: 540,
Name: `Multispectrum Coatings`,
Description: `Multispectrum Coatings`,
},
1672: &MarketGroup{
ID: 1672,
ParentID: 133,
Name: `100mm Armor Plate`,
Description: `100mm Armor Plate`,
},
1673: &MarketGroup{
ID: 1673,
ParentID: 133,
Name: `200mm Armor Plate`,
Description: `200mm Armor Plate`,
},
1674: &MarketGroup{
ID: 1674,
ParentID: 133,
Name: `400mm Armor Plate`,
Description: `400mm Armor Plate`,
},
1675: &MarketGroup{
ID: 1675,
ParentID: 133,
Name: `800mm Armor Plate`,
Description: `800mm Armor Plate`,
},
1676: &MarketGroup{
ID: 1676,
ParentID: 133,
Name: `1600mm Armor Plate`,
Description: `1600mm Armor Plate`,
},
1678: &MarketGroup{
ID: 1678,
ParentID: 535,
Name: `Thermal Armor Hardeners`,
Description: `Thermal Armor Hardeners`,
},
1679: &MarketGroup{
ID: 1679,
ParentID: 535,
Name: `Kinetic Armor Hardeners`,
Description: ``,
},
1680: &MarketGroup{
ID: 1680,
ParentID: 535,
Name: `Explosive Armor Hardeners`,
Description: `Explosive Armor Hardeners`,
},
1681: &MarketGroup{
ID: 1681,
ParentID: 535,
Name: `EM Armor Hardeners`,
Description: `EM Armor Hardeners`,
},
1682: &MarketGroup{
ID: 1682,
ParentID: 541,
Name: `Explosive Energized Membranes`,
Description: `Explosive Energized Membranes`,
},
1683: &MarketGroup{
ID: 1683,
ParentID: 541,
Name: `Thermal Energized Membranes`,
Description: `Thermal Energized Membranes`,
},
1684: &MarketGroup{
ID: 1684,
ParentID: 541,
Name: `EM Energized Membranes`,
Description: `EM Energized Membranes`,
},
1685: &MarketGroup{
ID: 1685,
ParentID: 541,
Name: `Kinetic Energized Membranes`,
Description: `Kinetic Energized Membranes`,
},
1686: &MarketGroup{
ID: 1686,
ParentID: 541,
Name: `Multispectrum Energized Membranes`,
Description: `Multispectrum Energized Membranes`,
},
1687: &MarketGroup{
ID: 1687,
ParentID: 14,
Name: `Layered Energized Armor Membranes`,
Description: `Layered Energized Armor Membranes`,
},
1688: &MarketGroup{
ID: 1688,
ParentID: 550,
Name: `Thermal Shield Amplifiers`,
Description: `Thermal Shield Amplifiers`,
},
1689: &MarketGroup{
ID: 1689,
ParentID: 550,
Name: `Kinetic Shield Amplifiers`,
Description: `Kinetic Shield Amplifiers`,
},
1690: &MarketGroup{
ID: 1690,
ParentID: 550,
Name: `Explosive Shield Amplifiers`,
Description: `Explosive Shield Amplifiers`,
},
1691: &MarketGroup{
ID: 1691,
ParentID: 550,
Name: `EM Shield Amplifiers`,
Description: `EM Shield Amplifiers`,
},
1692: &MarketGroup{
ID: 1692,
ParentID: 553,
Name: `Thermal Shield Hardeners`,
Description: `Thermal Shield Hardeners`,
},
1693: &MarketGroup{
ID: 1693,
ParentID: 553,
Name: `Kinetic Shield Hardeners`,
Description: `Kinetic Shield Hardeners`,
},
1694: &MarketGroup{
ID: 1694,
ParentID: 553,
Name: `Explosive Shield Hardeners`,
Description: `Explosive Shield Hardeners`,
},
1695: &MarketGroup{
ID: 1695,
ParentID: 553,
Name: `EM Shield Hardeners`,
Description: `EM Shield Hardeners`,
},
1696: &MarketGroup{
ID: 1696,
ParentID: 553,
Name: `Multispectrum Shield Hardeners`,
Description: `Multispectrum Shield Hardeners`,
},
1697: &MarketGroup{
ID: 1697,
ParentID: 252,
Name: `Micro Jump Drives`,
Description: `Blueprints of Micro Jump Drives.`,
},
1698: &MarketGroup{
ID: 1698,
ParentID: 1612,
Name: `Special Edition Battlecruisers`,
Description: `Battlecruisers which have been offered to capsuleers on occasion for limited periods.`,
},
1699: &MarketGroup{
ID: 1699,
ParentID: 1612,
Name: `Special Edition Cruisers`,
Description: `Cruisers which have been offered to capsuleers on occasion for limited periods.`,
},
1700: &MarketGroup{
ID: 1700,
ParentID: 19,
Name: `Security Tags`,
Description: `These tags can be turned in, in low-security space, for a boost to security rating`,
},
1701: &MarketGroup{
ID: 1701,
ParentID: 1841,
Name: `Personal Hangar Arrays`,
Description: `Blueprints of Personal Hangar Arrays.`,
},
1702: &MarketGroup{
ID: 1702,
ParentID: 1285,
Name: `Personal Hangar Arrays`,
Description: `A large hangar structure, for easy storage of materials and modules.`,
},
1703: &MarketGroup{
ID: 1703,
ParentID: 1374,
Name: `Faction Battlecruisers`,
Description: `Battlecruisers designed by specific factions.`,
},
1704: &MarketGroup{
ID: 1704,
ParentID: 1703,
Name: `Navy Faction`,
Description: `Navy faction battlecruiser designs.`,
},
1707: &MarketGroup{
ID: 1707,
ParentID: 1710,
Name: `Scanning Upgrades`,
Description: `Blueprints of scanning upgrade modules`,
},
1708: &MarketGroup{
ID: 1708,
ParentID: 9,
Name: `Scanning Equipment`,
Description: `Modules that give or improve scanning capabilities of spaceships`,
},
1709: &MarketGroup{
ID: 1709,
ParentID: 1708,
Name: `Scanning Upgrades`,
Description: `Modules that modify scanning`,
},
1710: &MarketGroup{
ID: 1710,
ParentID: 209,
Name: `Scanning Equipment`,
Description: `Scanning equipment`,
},
1711: &MarketGroup{
ID: 1711,
ParentID: 209,
Name: `Harvest Equipment`,
Description: `Harvesting equipment`,
},
1712: &MarketGroup{
ID: 1712,
ParentID: 1711,
Name: `Salvagers`,
Description: `Blueprints of salvager modules.`,
},
1713: &MarketGroup{
ID: 1713,
ParentID: 9,
Name: `Harvest Equipment`,
Description: `Modules that give or improve harvesting capabilities of spaceships`,
},
1715: &MarketGroup{
ID: 1715,
ParentID: 1713,
Name: `Salvagers`,
Description: ``,
},
1716: &MarketGroup{
ID: 1716,
ParentID: 1710,
Name: `Survey Probe Launchers`,
Description: `Blueprints for building probes for surveying moons.`,
},
1717: &MarketGroup{
ID: 1717,
ParentID: 1708,
Name: `Survey Probe Launchers`,
Description: `Probes for surveying moons.`,
},
1718: &MarketGroup{
ID: 1718,
ParentID: 1708,
Name: `Analyzers`,
Description: `Modules that analyze old or recent computer systems in exploration sites.`,
},
1719: &MarketGroup{
ID: 1719,
ParentID: 944,
Name: `Capital Armor Rigs`,
Description: `Blueprints of Capital Armor Rigs.`,
},
1720: &MarketGroup{
ID: 1720,
ParentID: 945,
Name: `Capital Astronautic Rigs`,
Description: `Blueprints of Capital Astronautic Rigs.`,
},
1721: &MarketGroup{
ID: 1721,
ParentID: 946,
Name: `Capital Drone Rigs`,
Description: `Blueprints of Capital Drone Rigs.`,
},
1723: &MarketGroup{
ID: 1723,
ParentID: 948,
Name: `Capital Electronics Superiority Rigs`,
Description: `Blueprints of Capital Electronics Superiority Rigs.`,
},
1724: &MarketGroup{
ID: 1724,
ParentID: 949,
Name: `Capital Engineering Rigs`,
Description: `Blueprints of Capital Energy Grid Rigs.`,
},
1725: &MarketGroup{
ID: 1725,
ParentID: 950,
Name: `Capital Energy Weapon Rigs`,
Description: `Blueprints of Capital Energy Weapon Rigs.`,
},
1726: &MarketGroup{
ID: 1726,
ParentID: 951,
Name: `Capital Hybrid Weapon Rigs`,
Description: `Blueprints of Capital Hybrid Weapon Rigs.`,
},
1727: &MarketGroup{
ID: 1727,
ParentID: 952,
Name: `Capital Missile Launcher Rigs`,
Description: `Blueprints of Capital Missile Launcher Rigs.`,
},
1728: &MarketGroup{
ID: 1728,
ParentID: 953,
Name: `Capital Projectile Weapon Rigs`,
Description: `Blueprints of Capital Projectile Weapon Rigs.`,
},
1729: &MarketGroup{
ID: 1729,
ParentID: 954,
Name: `Capital Shield Rigs`,
Description: `Blueprints of Capital Shield Rigs.`,
},
1730: &MarketGroup{
ID: 1730,
ParentID: 956,
Name: `Capital Armor Rigs`,
Description: `Modifications that affect a capital starship's armor.`,
},
1731: &MarketGroup{
ID: 1731,
ParentID: 979,
Name: `Capital Projectile Weapon Rigs`,
Description: `Modifications that affect a capital starship's projectile weapons.`,
},
1732: &MarketGroup{
ID: 1732,
ParentID: 965,
Name: `Capital Shield Rigs`,
Description: `Modifications that affect a capital starship's shields.`,
},
1733: &MarketGroup{
ID: 1733,
ParentID: 964,
Name: `Capital Missile Launcher Rigs`,
Description: `Modifications that affect a capital starship's missile launchers.`,
},
1734: &MarketGroup{
ID: 1734,
ParentID: 963,
Name: `Capital Hybrid Weapon Rigs`,
Description: `Modifications that affect a capital starship's hybrid weapons.`,
},
1735: &MarketGroup{
ID: 1735,
ParentID: 962,
Name: `Capital Energy Weapon Rigs`,
Description: `Modifications that affect a capital starship's energy weapons.`,
},
1736: &MarketGroup{
ID: 1736,
ParentID: 961,
Name: `Capital Engineering Rigs`,
Description: `Modifications that affect a capital starship's energy grid.`,
},
1737: &MarketGroup{
ID: 1737,
ParentID: 960,
Name: `Capital Electronics Superiority Rigs`,
Description: `Modifications that affect a capital starship's electronic warfare capabilities.`,
},
1739: &MarketGroup{
ID: 1739,
ParentID: 958,
Name: `Capital Drone Rigs`,
Description: `Modifications that affect a capital starship's drones.`,
},
1740: &MarketGroup{
ID: 1740,
ParentID: 957,
Name: `Capital Astronautic Rigs`,
Description: `Modifications that affect a capital starship's astronautics.`,
},
1745: &MarketGroup{
ID: 1745,
ParentID: 150,
Name: `Armor`,
Description: `Skills pertaining to efficiently protecting the structural integrity of spaceships`,
},
1746: &MarketGroup{
ID: 1746,
ParentID: 150,
Name: `Neural Enhancement`,
Description: `Skills pertaining to managing boosters, implants and clone operations`,
},
1747: &MarketGroup{
ID: 1747,
ParentID: 150,
Name: `Shields`,
Description: `Skills pertaining to management of a spaceship's energy barriers`,
},
1748: &MarketGroup{
ID: 1748,
ParentID: 150,
Name: `Targeting`,
Description: `Skills pertaining to management of a spaceship's sensor and tracking systems`,
},
1761: &MarketGroup{
ID: 1761,
ParentID: 531,
Name: `Targeting Implants`,
Description: `Targeting Implants`,
},
1762: &MarketGroup{
ID: 1762,
ParentID: 531,
Name: `Resource Processing Implants`,
Description: `Resource Processing Implants`,
},
1763: &MarketGroup{
ID: 1763,
ParentID: 531,
Name: `Scanning Implants`,
Description: `Scanning Implants`,
},
1764: &MarketGroup{
ID: 1764,
ParentID: 531,
Name: `Neural Enhancement Implants`,
Description: `Biology Implants`,
},
1765: &MarketGroup{
ID: 1765,
ParentID: 1761,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1766: &MarketGroup{
ID: 1766,
ParentID: 1761,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1767: &MarketGroup{
ID: 1767,
ParentID: 1762,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1768: &MarketGroup{
ID: 1768,
ParentID: 1762,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1769: &MarketGroup{
ID: 1769,
ParentID: 1762,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1770: &MarketGroup{
ID: 1770,
ParentID: 1763,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
1771: &MarketGroup{
ID: 1771,
ParentID: 1763,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
1772: &MarketGroup{
ID: 1772,
ParentID: 1763,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1773: &MarketGroup{
ID: 1773,
ParentID: 1763,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1774: &MarketGroup{
ID: 1774,
ParentID: 1763,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1775: &MarketGroup{
ID: 1775,
ParentID: 1764,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
1776: &MarketGroup{
ID: 1776,
ParentID: 1764,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
1777: &MarketGroup{
ID: 1777,
ParentID: 1764,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
1779: &MarketGroup{
ID: 1779,
ParentID: 1111,
Name: `Resource Processing Rigs`,
Description: `Permanent modification of a ship's resource processing facilities.`,
},
1780: &MarketGroup{
ID: 1780,
ParentID: 1111,
Name: `Scanning Rigs`,
Description: `Permanent modification of a ship's scanning facilities.`,
},
1781: &MarketGroup{
ID: 1781,
ParentID: 1111,
Name: `Targeting Rigs`,
Description: `Permanent modification of a ship's targeting facilities.`,
},
1782: &MarketGroup{
ID: 1782,
ParentID: 1779,
Name: `Small Resource Processing Rigs`,
Description: `Modifications that affect a small starship's resource processing.`,
},
1783: &MarketGroup{
ID: 1783,
ParentID: 1779,
Name: `Medium Resource Processing Rigs`,
Description: `Modifications that affect a medium starship's resource processing.`,
},
1784: &MarketGroup{
ID: 1784,
ParentID: 1779,
Name: `Large Resource Processing Rigs`,
Description: `Modifications that affect a large starship's resource processing.`,
},
1785: &MarketGroup{
ID: 1785,
ParentID: 1779,
Name: `Capital Resource Processing Rigs`,
Description: `Modifications that affect a capital starship's resource processing.`,
},
1786: &MarketGroup{
ID: 1786,
ParentID: 1780,
Name: `Small Scanning Rigs`,
Description: `Modifications that affect a small starship's scanning.`,
},
1787: &MarketGroup{
ID: 1787,
ParentID: 1780,
Name: `Medium Scanning Rigs`,
Description: `Modifications that affect a medium starship's scanning.`,
},
1788: &MarketGroup{
ID: 1788,
ParentID: 1780,
Name: `Large Scanning Rigs`,
Description: `Modifications that affect a large starship's scanning.`,
},
1789: &MarketGroup{
ID: 1789,
ParentID: 1780,
Name: `Capital Scanning Rigs`,
Description: `Modifications that affect a capital starship's scanning.`,
},
1790: &MarketGroup{
ID: 1790,
ParentID: 1781,
Name: `Small Targeting Rigs`,
Description: `Modifications that affect a small starship's targeting.`,
},
1791: &MarketGroup{
ID: 1791,
ParentID: 1781,
Name: `Medium Targeting Rigs`,
Description: `Modifications that affect a medium starship's targeting.`,
},
1792: &MarketGroup{
ID: 1792,
ParentID: 1781,
Name: `Large Targeting Rigs`,
Description: `Modifications that affect a large starship's targeting.`,
},
1793: &MarketGroup{
ID: 1793,
ParentID: 1781,
Name: `Capital Targeting Rigs`,
Description: `Modifications that affect a capital starship's targeting.`,
},
1794: &MarketGroup{
ID: 1794,
ParentID: 943,
Name: `Resource Processing Rigs`,
Description: `Permanent modification of a ship's resource processing.`,
},
1795: &MarketGroup{
ID: 1795,
ParentID: 943,
Name: `Scanning Rigs`,
Description: `Permanent modification of a ship's scanning facilities.`,
},
1796: &MarketGroup{
ID: 1796,
ParentID: 943,
Name: `Targeting Rigs`,
Description: `Permanent modification of a ship's targeting facilities.`,
},
1797: &MarketGroup{
ID: 1797,
ParentID: 1794,
Name: `Capital Resource Processing Rigs`,
Description: `Blueprints of Capital Resource Processing Rigs.`,
},
1798: &MarketGroup{
ID: 1798,
ParentID: 1794,
Name: `Large Resource Processing Rigs`,
Description: `Blueprints of Large Resource Processing Rigs.`,
},
1799: &MarketGroup{
ID: 1799,
ParentID: 1794,
Name: `Medium Resource Processing Rigs`,
Description: `Blueprints of Medium Resource Processing Rigs.`,
},
1800: &MarketGroup{
ID: 1800,
ParentID: 1794,
Name: `Small Resource Processing Rigs`,
Description: `Blueprints of Small Resource Processing Rigs.`,
},
1801: &MarketGroup{
ID: 1801,
ParentID: 1795,
Name: `Small Scanning Rigs`,
Description: `Blueprints of Small Scanning Rigs.`,
},
1802: &MarketGroup{
ID: 1802,
ParentID: 1795,
Name: `Medium Scanning Rigs`,
Description: `Blueprints of Medium Scanning Rigs.`,
},
1803: &MarketGroup{
ID: 1803,
ParentID: 1795,
Name: `Large Scanning Rigs`,
Description: `Blueprints of Large Scanning Rigs.`,
},
1804: &MarketGroup{
ID: 1804,
ParentID: 1795,
Name: `Capital Scanning Rigs`,
Description: `Blueprints of Capital Scanning Rigs.`,
},
1805: &MarketGroup{
ID: 1805,
ParentID: 1796,
Name: `Small Targeting Rigs`,
Description: `Blueprints of Small Targeting Rigs.`,
},
1806: &MarketGroup{
ID: 1806,
ParentID: 1796,
Name: `Medium Targeting Rigs`,
Description: `Blueprints of Medium Targeting Rigs.`,
},
1807: &MarketGroup{
ID: 1807,
ParentID: 1796,
Name: `Large Targeting Rigs`,
Description: `Blueprints of Large Targeting Rigs.`,
},
1808: &MarketGroup{
ID: 1808,
ParentID: 1796,
Name: `Capital Targeting Rigs`,
Description: `Blueprints of Capital Targeting Rigs.`,
},
1809: &MarketGroup{
ID: 1809,
ParentID: 1710,
Name: `Analyzers`,
Description: `Blueprints of analyzers.`,
},
1810: &MarketGroup{
ID: 1810,
ParentID: 1660,
Name: `Alliance Tournament Cards`,
Description: `Alliance Tournament Cards`,
},
1811: &MarketGroup{
ID: 1811,
ParentID: 1660,
Name: `New Eden Open Cards`,
Description: `New Eden Open Cards`,
},
1812: &MarketGroup{
ID: 1812,
ParentID: 1810,
Name: `Alliance Tournament All Star Teams`,
Description: `Alliance Tournament All Star Teams`,
},
1813: &MarketGroup{
ID: 1813,
ParentID: 1811,
Name: `NEO YC 114 Team Cards`,
Description: `NEO YC 114 Team Cards`,
},
1814: &MarketGroup{
ID: 1814,
ParentID: 1659,
Name: `Special Edition Implants`,
Description: `Special Edition Implants`,
},
1815: &MarketGroup{
ID: 1815,
ParentID: 4,
Name: `Corvettes`,
Description: `Small entry-level and expendable ships`,
},
1816: &MarketGroup{
ID: 1816,
ParentID: 1815,
Name: `Amarr`,
Description: `Amarr rookie ship designs.`,
},
1817: &MarketGroup{
ID: 1817,
ParentID: 1815,
Name: `Caldari`,
Description: `Caldari rookie ship designs.`,
},
1818: &MarketGroup{
ID: 1818,
ParentID: 1815,
Name: `Gallente`,
Description: `Gallente rookie ship designs.`,
},
1819: &MarketGroup{
ID: 1819,
ParentID: 1815,
Name: `Minmatar`,
Description: `Minmatar rookie ship designs.`,
},
1822: &MarketGroup{
ID: 1822,
ParentID: 1407,
Name: `Tattoos`,
Description: `Tattoos for characters`,
},
1823: &MarketGroup{
ID: 1823,
ParentID: 150,
Name: `Planet Management`,
Description: `Skills required for the control and remote operation of planetary colonies`,
},
1824: &MarketGroup{
ID: 1824,
ParentID: 150,
Name: `Subsystems`,
Description: `Skills pertaining to the use and control of ship subsystems`,
},
1827: &MarketGroup{
ID: 1827,
ParentID: 140,
Name: `Rapid Heavy Missile Launchers`,
Description: ``,
},
1828: &MarketGroup{
ID: 1828,
ParentID: 406,
Name: `Mobile Depots`,
Description: `Blueprints for Mobile Depots`,
},
1829: &MarketGroup{
ID: 1829,
ParentID: 406,
Name: `Mobile Cynosural Inhibitors`,
Description: `Blueprints for Mobile Cynosural Inhibitors`,
},
1830: &MarketGroup{
ID: 1830,
ParentID: 406,
Name: `Mobile Tractor Units`,
Description: `Blueprints of Mobile Tractor Units`,
},
1831: &MarketGroup{
ID: 1831,
ParentID: 404,
Name: `Mobile Depots`,
Description: ``,
},
1832: &MarketGroup{
ID: 1832,
ParentID: 404,
Name: `Mobile Cynosural Inhibitors`,
Description: ``,
},
1833: &MarketGroup{
ID: 1833,
ParentID: 404,
Name: `Mobile Tractor Units`,
Description: ``,
},
1834: &MarketGroup{
ID: 1834,
ParentID: 406,
Name: `Mobile Siphon Units`,
Description: `Blueprints of Mobile Siphon Units.`,
},
1835: &MarketGroup{
ID: 1835,
ParentID: 404,
Name: `Mobile Siphon Units`,
Description: `Steals resources from Player Owned Structures.`,
},
1836: &MarketGroup{
ID: 1836,
ParentID: 1407,
Name: `Augmentations`,
Description: `a market group for various prosthetics`,
},
1837: &MarketGroup{
ID: 1837,
ParentID: 1612,
Name: `Special Edition Recon Ships`,
Description: ``,
},
1838: &MarketGroup{
ID: 1838,
ParentID: 1612,
Name: `Special Edition Covert Ops`,
Description: ``,
},
1840: &MarketGroup{
ID: 1840,
ParentID: 19,
Name: `Covert Research Tools`,
Description: `These covert operation items are in demand by various factions`,
},
1841: &MarketGroup{
ID: 1841,
ParentID: 1338,
Name: `Starbase Structures`,
Description: `Blueprints of deployable structures.`,
},
1842: &MarketGroup{
ID: 1842,
ParentID: 406,
Name: `Mobile Micro Jump Units`,
Description: ``,
},
1843: &MarketGroup{
ID: 1843,
ParentID: 406,
Name: `Mobile Scan Inhibitors`,
Description: ``,
},
1844: &MarketGroup{
ID: 1844,
ParentID: 404,
Name: `Mobile Micro Jump Units`,
Description: ``,
},
1845: &MarketGroup{
ID: 1845,
ParentID: 404,
Name: `Mobile Scan Inhibitors`,
Description: ``,
},
1846: &MarketGroup{
ID: 1846,
ParentID: 19,
Name: `Bounty Encrypted Bonds`,
Description: `These tags can be turned in, at empire navy stations, in return for ISK`,
},
1847: &MarketGroup{
ID: 1847,
ParentID: 404,
Name: `Encounter Surveillance Systems`,
Description: `Deployables that monitor bounty payout activity in the system`,
},
1849: &MarketGroup{
ID: 1849,
ParentID: 2,
Name: `Reaction Formulas`,
Description: `Chemical reactions used in Refineries to create many useful materials, as well as various booster drugs`,
},
1850: &MarketGroup{
ID: 1850,
ParentID: 1849,
Name: `Simple Reactions`,
Description: `Chemical processes through which raw materials are made into compounds`,
},
1851: &MarketGroup{
ID: 1851,
ParentID: 1849,
Name: `Complex Reactions`,
Description: `Chemical processes through which compounds are combined into complex materials`,
},
1852: &MarketGroup{
ID: 1852,
ParentID: 1849,
Name: `Simple Biochemical Reactions`,
Description: `Biochemical processes through which raw materials are made into compounds`,
},
1853: &MarketGroup{
ID: 1853,
ParentID: 1849,
Name: `Complex Biochemical Reactions`,
Description: `Biochemical processes through which compounds are combined into complex materials`,
},
1854: &MarketGroup{
ID: 1854,
ParentID: 1849,
Name: `Polymer Reactions`,
Description: `Chemical processes through which raw materials are made into hybrid polymers`,
},
1855: &MarketGroup{
ID: 1855,
ParentID: 1031,
Name: `Ice Ores`,
Description: `Materials gathered from ice asteroids.`,
},
1856: &MarketGroup{
ID: 1856,
ParentID: 1031,
Name: `Alloys & Compounds`,
Description: `Various compounds composed of asteroid ores.`,
},
1857: &MarketGroup{
ID: 1857,
ParentID: 533,
Name: `Minerals`,
Description: `Minerals harvested from refined asteroid ore.`,
},
1858: &MarketGroup{
ID: 1858,
ParentID: 1034,
Name: `Booster Materials`,
Description: `Various types of biochemical boosters, most often used by pod pilots.`,
},
1859: &MarketGroup{
ID: 1859,
ParentID: 1032,
Name: `Fullerenes`,
Description: `This rare form of gas can only be harvested in wormhole space.`,
},
1860: &MarketGroup{
ID: 1860,
ParentID: 1034,
Name: `Polymer Materials`,
Description: `Material made from combining fullerenes.`,
},
1861: &MarketGroup{
ID: 1861,
ParentID: 533,
Name: `Salvage Materials`,
Description: ``,
},
1862: &MarketGroup{
ID: 1862,
ParentID: 1861,
Name: `Ancient Salvaged Materials`,
Description: `Despite being very old, these materials hold unimaginable technological secrets.`,
},
1863: &MarketGroup{
ID: 1863,
ParentID: 1861,
Name: `Salvaged Materials`,
Description: `Materials salvaged from destroyed starships.`,
},
1864: &MarketGroup{
ID: 1864,
ParentID: 1021,
Name: `Construction Platforms`,
Description: `Outpost Construction Platforms`,
},
1865: &MarketGroup{
ID: 1865,
ParentID: 1035,
Name: `Structure Components`,
Description: `Modular items used in the manufacture of Space Stations.`,
},
1866: &MarketGroup{
ID: 1866,
ParentID: 1022,
Name: `Amarr Improvement Platforms`,
Description: `Improvements specifically designed for Amarrian outposts.`,
},
1867: &MarketGroup{
ID: 1867,
ParentID: 1022,
Name: `Caldari Improvement Platforms`,
Description: `Improvements specifically designed for Caldari outposts.`,
},
1868: &MarketGroup{
ID: 1868,
ParentID: 1022,
Name: `Gallente Improvement Platforms`,
Description: `Improvements specifically designed for Gallentean outposts.`,
},
1869: &MarketGroup{
ID: 1869,
ParentID: 1022,
Name: `Minmatar Improvement Platforms`,
Description: `Improvements specifically designed for Minmatar outposts.`,
},
1870: &MarketGroup{
ID: 1870,
ParentID: 1035,
Name: `Fuel Blocks`,
Description: `Assembled blocks of fuel for starbases and other structures.`,
},
1872: &MarketGroup{
ID: 1872,
ParentID: 475,
Name: `Research Equipment`,
Description: `Various items used in research and invention`,
},
1873: &MarketGroup{
ID: 1873,
ParentID: 1872,
Name: `Decryptors`,
Description: ``,
},
1880: &MarketGroup{
ID: 1880,
ParentID: 1872,
Name: `Datacores`,
Description: `Research Data`,
},
1883: &MarketGroup{
ID: 1883,
ParentID: 1035,
Name: `Advanced Capital Components`,
Description: `Components used in the manufacture of Tech II capital ships.`,
},
1884: &MarketGroup{
ID: 1884,
ParentID: 1883,
Name: `Amarr`,
Description: ``,
},
1885: &MarketGroup{
ID: 1885,
ParentID: 1883,
Name: `Caldari`,
Description: ``,
},
1886: &MarketGroup{
ID: 1886,
ParentID: 1883,
Name: `Gallente`,
Description: ``,
},
1887: &MarketGroup{
ID: 1887,
ParentID: 1883,
Name: `Minmatar`,
Description: ``,
},
1888: &MarketGroup{
ID: 1888,
ParentID: 65,
Name: `Gallente`,
Description: ``,
},
1889: &MarketGroup{
ID: 1889,
ParentID: 65,
Name: `Minmatar`,
Description: ``,
},
1897: &MarketGroup{
ID: 1897,
ParentID: 533,
Name: `Faction Materials`,
Description: ``,
},
1898: &MarketGroup{
ID: 1898,
ParentID: 1897,
Name: `<NAME>`,
Description: `Materials used in the construction of specific factional equipment.`,
},
1899: &MarketGroup{
ID: 1899,
ParentID: 1897,
Name: `Blood Raiders`,
Description: `Materials used in the construction of specific factional equipment.`,
},
1900: &MarketGroup{
ID: 1900,
ParentID: 1897,
Name: `Guristas`,
Description: `Materials used in the construction of specific factional equipment.`,
},
1901: &MarketGroup{
ID: 1901,
ParentID: 1897,
Name: `Serpentis`,
Description: `Materials used in the construction of specific factional equipment.`,
},
1902: &MarketGroup{
ID: 1902,
ParentID: 1897,
Name: `Sleeper`,
Description: `Artifacts of the Sleeper civilization.`,
},
1903: &MarketGroup{
ID: 1903,
ParentID: 1897,
Name: `Talocan`,
Description: `Artifacts of the Talocan civilization.`,
},
1904: &MarketGroup{
ID: 1904,
ParentID: 1897,
Name: `<NAME>`,
Description: `Artifacts of the Yan Jung civilization.`,
},
1905: &MarketGroup{
ID: 1905,
ParentID: 1897,
Name: `Takmahl`,
Description: `Artifacts of the Takmahl civilization.`,
},
1906: &MarketGroup{
ID: 1906,
ParentID: 1897,
Name: `<NAME>`,
Description: `Salvaged bits and pieces from destroyed Rogue Drones, can be integrated with current drone technology for improved performance.`,
},
1907: &MarketGroup{
ID: 1907,
ParentID: 1872,
Name: `R.Db`,
Description: `Databases used in Tech II research jobs.`,
},
1908: &MarketGroup{
ID: 1908,
ParentID: 1035,
Name: `R.A.M.`,
Description: `Construction tools used in Tech II manufacturing.`,
},
1909: &MarketGroup{
ID: 1909,
ParentID: 1872,
Name: `Ancient Relics`,
Description: ``,
},
1912: &MarketGroup{
ID: 1912,
ParentID: 798,
Name: `Construction Platforms`,
Description: ``,
},
1913: &MarketGroup{
ID: 1913,
ParentID: 800,
Name: `Structure Components`,
Description: ``,
},
1918: &MarketGroup{
ID: 1918,
ParentID: 800,
Name: `R.A.M.`,
Description: ``,
},
1919: &MarketGroup{
ID: 1919,
ParentID: 1522,
Name: `R.Db`,
Description: ``,
},
1920: &MarketGroup{
ID: 1920,
ParentID: 800,
Name: `Fuel Blocks`,
Description: ``,
},
1921: &MarketGroup{
ID: 1921,
ParentID: 1285,
Name: `Compression Array`,
Description: `Anchorable compression structures.`,
},
1922: &MarketGroup{
ID: 1922,
ParentID: 0,
Name: `Pilot's Services`,
Description: `Services available to capsuleers include PLEX, Multiple Pilot Training, Pilot's Body Resculpt Certificates, and Skill Trading items`,
},
1923: &MarketGroup{
ID: 1923,
ParentID: 1922,
Name: `PLEX`,
Description: `PLEX is an item that can be traded between players on the regional market. PLEX can also be used in the New Eden Store to upgrade your account to Omega Clone State, purchase virtual goods, and activate other account services.`,
},
1924: &MarketGroup{
ID: 1924,
ParentID: 1364,
Name: `Expedition Frigates`,
Description: ``,
},
1931: &MarketGroup{
ID: 1931,
ParentID: 132,
Name: `Warp Accelerators`,
Description: ``,
},
1932: &MarketGroup{
ID: 1932,
ParentID: 1612,
Name: `Special Edition Interceptors`,
Description: ``,
},
1935: &MarketGroup{
ID: 1935,
ParentID: 657,
Name: `Warp Disruptors`,
Description: ``,
},
1936: &MarketGroup{
ID: 1936,
ParentID: 657,
Name: `Warp Scramblers`,
Description: ``,
},
1937: &MarketGroup{
ID: 1937,
ParentID: 657,
Name: `Interdiction Sphere Launchers`,
Description: ``,
},
1938: &MarketGroup{
ID: 1938,
ParentID: 1566,
Name: `Warp Disruptors`,
Description: ``,
},
1939: &MarketGroup{
ID: 1939,
ParentID: 1566,
Name: `Warp Scramblers`,
Description: ``,
},
1940: &MarketGroup{
ID: 1940,
ParentID: 1566,
Name: `Interdiction Sphere Launchers`,
Description: ``,
},
1941: &MarketGroup{
ID: 1941,
ParentID: 132,
Name: `Jump Economizers`,
Description: ``,
},
1942: &MarketGroup{
ID: 1942,
ParentID: 1922,
Name: `Pilot's Services`,
Description: `Services available for various customization for capsuleers`,
},
1943: &MarketGroup{
ID: 1943,
ParentID: 1407,
Name: `Headwear`,
Description: ``,
},
1944: &MarketGroup{
ID: 1944,
ParentID: 1402,
Name: `Bottoms, extras`,
Description: ``,
},
1945: &MarketGroup{
ID: 1945,
ParentID: 1841,
Name: `Compression Array`,
Description: `Blueprint of the Compression Array.`,
},
1949: &MarketGroup{
ID: 1949,
ParentID: 787,
Name: `ORE`,
Description: `Designs for ORE freighters.`,
},
1950: &MarketGroup{
ID: 1950,
ParentID: 766,
Name: `ORE`,
Description: `Freighters designed by ORE.`,
},
1951: &MarketGroup{
ID: 1951,
ParentID: 1373,
Name: `Tactical Destroyers`,
Description: ``,
},
1952: &MarketGroup{
ID: 1952,
ParentID: 1951,
Name: `Amarr`,
Description: ``,
},
1953: &MarketGroup{
ID: 1953,
ParentID: 1951,
Name: `Minmatar`,
Description: ``,
},
1954: &MarketGroup{
ID: 1954,
ParentID: 0,
Name: `Ship SKINs`,
Description: `Super Kerr-Induced Nanocoatings that modify the visual look of a ship`,
},
1955: &MarketGroup{
ID: 1955,
ParentID: 1954,
Name: `Battlecruisers`,
Description: ``,
},
1956: &MarketGroup{
ID: 1956,
ParentID: 2101,
Name: `Amarr`,
Description: ``,
},
1957: &MarketGroup{
ID: 1957,
ParentID: 2101,
Name: `Caldari`,
Description: ``,
},
1958: &MarketGroup{
ID: 1958,
ParentID: 2101,
Name: `Gallente`,
Description: ``,
},
1959: &MarketGroup{
ID: 1959,
ParentID: 2101,
Name: `Minmatar`,
Description: ``,
},
1960: &MarketGroup{
ID: 1960,
ParentID: 1954,
Name: `Battleships`,
Description: ``,
},
1961: &MarketGroup{
ID: 1961,
ParentID: 1960,
Name: `Faction Battleships`,
Description: ``,
},
1962: &MarketGroup{
ID: 1962,
ParentID: 1960,
Name: `Standard Battleships`,
Description: ``,
},
1963: &MarketGroup{
ID: 1963,
ParentID: 1961,
Name: `Pirate Faction`,
Description: ``,
},
1964: &MarketGroup{
ID: 1964,
ParentID: 1962,
Name: `Amarr`,
Description: ``,
},
1965: &MarketGroup{
ID: 1965,
ParentID: 1962,
Name: `Caldari`,
Description: ``,
},
1966: &MarketGroup{
ID: 1966,
ParentID: 1962,
Name: `Gallente`,
Description: ``,
},
1967: &MarketGroup{
ID: 1967,
ParentID: 1962,
Name: `Minmatar`,
Description: ``,
},
1968: &MarketGroup{
ID: 1968,
ParentID: 1954,
Name: `Capital Ships`,
Description: ``,
},
1969: &MarketGroup{
ID: 1969,
ParentID: 2114,
Name: `ORE`,
Description: ``,
},
1970: &MarketGroup{
ID: 1970,
ParentID: 1968,
Name: `Carriers`,
Description: ``,
},
1971: &MarketGroup{
ID: 1971,
ParentID: 1968,
Name: `Dreadnoughts`,
Description: ``,
},
1972: &MarketGroup{
ID: 1972,
ParentID: 1968,
Name: `Freighters`,
Description: ``,
},
1973: &MarketGroup{
ID: 1973,
ParentID: 1968,
Name: `Titans`,
Description: ``,
},
1974: &MarketGroup{
ID: 1974,
ParentID: 2374,
Name: `Amarr`,
Description: ``,
},
1975: &MarketGroup{
ID: 1975,
ParentID: 2374,
Name: `Caldari`,
Description: ``,
},
1976: &MarketGroup{
ID: 1976,
ParentID: 2374,
Name: `Gallente`,
Description: ``,
},
1977: &MarketGroup{
ID: 1977,
ParentID: 2374,
Name: `Minmatar`,
Description: ``,
},
1978: &MarketGroup{
ID: 1978,
ParentID: 2381,
Name: `Amarr`,
Description: ``,
},
1979: &MarketGroup{
ID: 1979,
ParentID: 2381,
Name: `Gallente`,
Description: ``,
},
1980: &MarketGroup{
ID: 1980,
ParentID: 2377,
Name: `Amarr`,
Description: ``,
},
1981: &MarketGroup{
ID: 1981,
ParentID: 2377,
Name: `Caldari`,
Description: ``,
},
1982: &MarketGroup{
ID: 1982,
ParentID: 2377,
Name: `Gallente`,
Description: ``,
},
1983: &MarketGroup{
ID: 1983,
ParentID: 2377,
Name: `Minmatar`,
Description: ``,
},
1984: &MarketGroup{
ID: 1984,
ParentID: 1972,
Name: `Amarr`,
Description: ``,
},
1985: &MarketGroup{
ID: 1985,
ParentID: 1972,
Name: `Caldari`,
Description: ``,
},
1986: &MarketGroup{
ID: 1986,
ParentID: 1972,
Name: `Gallente`,
Description: ``,
},
1987: &MarketGroup{
ID: 1987,
ParentID: 1972,
Name: `Minmatar`,
Description: ``,
},
1988: &MarketGroup{
ID: 1988,
ParentID: 1954,
Name: `Cruisers`,
Description: ``,
},
1989: &MarketGroup{
ID: 1989,
ParentID: 1954,
Name: `Destroyers`,
Description: ``,
},
1990: &MarketGroup{
ID: 1990,
ParentID: 2028,
Name: `Amarr`,
Description: ``,
},
1991: &MarketGroup{
ID: 1991,
ParentID: 2028,
Name: `Caldari`,
Description: ``,
},
1992: &MarketGroup{
ID: 1992,
ParentID: 2028,
Name: `Gallente`,
Description: ``,
},
1993: &MarketGroup{
ID: 1993,
ParentID: 2028,
Name: `Minmatar`,
Description: ``,
},
1994: &MarketGroup{
ID: 1994,
ParentID: 2035,
Name: `Amarr`,
Description: ``,
},
1995: &MarketGroup{
ID: 1995,
ParentID: 2035,
Name: `Caldari`,
Description: ``,
},
1996: &MarketGroup{
ID: 1996,
ParentID: 2035,
Name: `Gallente`,
Description: ``,
},
1997: &MarketGroup{
ID: 1997,
ParentID: 2035,
Name: `Minmatar`,
Description: ``,
},
1998: &MarketGroup{
ID: 1998,
ParentID: 1954,
Name: `Frigates`,
Description: ``,
},
1999: &MarketGroup{
ID: 1999,
ParentID: 1998,
Name: `Faction Frigates`,
Description: ``,
},
2000: &MarketGroup{
ID: 2000,
ParentID: 1999,
Name: `<NAME>`,
Description: ``,
},
2001: &MarketGroup{
ID: 2001,
ParentID: 1998,
Name: `Standard Frigates`,
Description: ``,
},
2002: &MarketGroup{
ID: 2002,
ParentID: 2001,
Name: `Amarr`,
Description: ``,
},
2003: &MarketGroup{
ID: 2003,
ParentID: 2001,
Name: `Caldari`,
Description: ``,
},
2004: &MarketGroup{
ID: 2004,
ParentID: 2001,
Name: `Gallente`,
Description: ``,
},
2005: &MarketGroup{
ID: 2005,
ParentID: 2001,
Name: `Minmatar`,
Description: ``,
},
2006: &MarketGroup{
ID: 2006,
ParentID: 1954,
Name: `Industrial Ships`,
Description: ``,
},
2007: &MarketGroup{
ID: 2007,
ParentID: 2085,
Name: `Amarr`,
Description: ``,
},
2008: &MarketGroup{
ID: 2008,
ParentID: 2085,
Name: `Caldari`,
Description: ``,
},
2009: &MarketGroup{
ID: 2009,
ParentID: 2085,
Name: `Gallente`,
Description: ``,
},
2010: &MarketGroup{
ID: 2010,
ParentID: 2085,
Name: `Minmatar`,
Description: ``,
},
2011: &MarketGroup{
ID: 2011,
ParentID: 1954,
Name: `Mining Barges`,
Description: ``,
},
2012: &MarketGroup{
ID: 2012,
ParentID: 2011,
Name: `Exhumers`,
Description: ``,
},
2013: &MarketGroup{
ID: 2013,
ParentID: 19,
Name: `Unknown Components`,
Description: `Mysterious pieces of technology of unknown origin`,
},
2014: &MarketGroup{
ID: 2014,
ParentID: 2015,
Name: `Industrial Upgrades`,
Description: `Blueprints of Industrial Upgrades.`,
},
2015: &MarketGroup{
ID: 2015,
ParentID: 1338,
Name: `Sovereignty and Infrastructure`,
Description: ``,
},
2016: &MarketGroup{
ID: 2016,
ParentID: 2015,
Name: `Military Upgrades`,
Description: `Blueprints of Military Upgrades`,
},
2017: &MarketGroup{
ID: 2017,
ParentID: 2015,
Name: `Strategic Upgrades`,
Description: `Blueprints of Strategic Upgrades`,
},
2018: &MarketGroup{
ID: 2018,
ParentID: 1708,
Name: `Entosis Links`,
Description: ``,
},
2020: &MarketGroup{
ID: 2020,
ParentID: 1710,
Name: `Entosis Links`,
Description: `Entosis Link Blueprints`,
},
2021: &MarketGroup{
ID: 2021,
ParentID: 1951,
Name: `Caldari`,
Description: ``,
},
2022: &MarketGroup{
ID: 2022,
ParentID: 1960,
Name: `Advanced Battleships`,
Description: ``,
},
2023: &MarketGroup{
ID: 2023,
ParentID: 2022,
Name: `Marauders`,
Description: ``,
},
2024: &MarketGroup{
ID: 2024,
ParentID: 2023,
Name: `Amarr`,
Description: ``,
},
2025: &MarketGroup{
ID: 2025,
ParentID: 2023,
Name: `Caldari`,
Description: ``,
},
2026: &MarketGroup{
ID: 2026,
ParentID: 2023,
Name: `Gallente`,
Description: ``,
},
2027: &MarketGroup{
ID: 2027,
ParentID: 2023,
Name: `Minmatar`,
Description: ``,
},
2028: &MarketGroup{
ID: 2028,
ParentID: 1988,
Name: `Standard Cruisers`,
Description: ``,
},
2029: &MarketGroup{
ID: 2029,
ParentID: 1988,
Name: `Faction Cruisers`,
Description: ``,
},
2030: &MarketGroup{
ID: 2030,
ParentID: 2029,
Name: `Pirate Faction`,
Description: ``,
},
2031: &MarketGroup{
ID: 2031,
ParentID: 1999,
Name: `Pirate Faction`,
Description: ``,
},
2032: &MarketGroup{
ID: 2032,
ParentID: 143,
Name: `Missile Guidance Computers`,
Description: ``,
},
2033: &MarketGroup{
ID: 2033,
ParentID: 143,
Name: `Missile Guidance Enhancers`,
Description: ``,
},
2034: &MarketGroup{
ID: 2034,
ParentID: 1951,
Name: `Gallente`,
Description: ``,
},
2035: &MarketGroup{
ID: 2035,
ParentID: 1989,
Name: `Standard Destroyers`,
Description: ``,
},
2036: &MarketGroup{
ID: 2036,
ParentID: 1989,
Name: `Advanced Destroyers`,
Description: ``,
},
2037: &MarketGroup{
ID: 2037,
ParentID: 2036,
Name: `Interdictors`,
Description: ``,
},
2038: &MarketGroup{
ID: 2038,
ParentID: 2037,
Name: `Amarr`,
Description: ``,
},
2039: &MarketGroup{
ID: 2039,
ParentID: 2037,
Name: `Caldari`,
Description: ``,
},
2040: &MarketGroup{
ID: 2040,
ParentID: 2037,
Name: `Gallente`,
Description: ``,
},
2041: &MarketGroup{
ID: 2041,
ParentID: 2037,
Name: `Minmatar`,
Description: ``,
},
2042: &MarketGroup{
ID: 2042,
ParentID: 1998,
Name: `Advanced Frigates`,
Description: ``,
},
2043: &MarketGroup{
ID: 2043,
ParentID: 2042,
Name: `Assault Frigates`,
Description: ``,
},
2044: &MarketGroup{
ID: 2044,
ParentID: 2042,
Name: `Covert Ops`,
Description: ``,
},
2045: &MarketGroup{
ID: 2045,
ParentID: 2042,
Name: `Electronic Attack Frigates`,
Description: ``,
},
2046: &MarketGroup{
ID: 2046,
ParentID: 2042,
Name: `Interceptors`,
Description: ``,
},
2047: &MarketGroup{
ID: 2047,
ParentID: 2043,
Name: `Amarr`,
Description: ``,
},
2048: &MarketGroup{
ID: 2048,
ParentID: 2043,
Name: `Caldari`,
Description: ``,
},
2049: &MarketGroup{
ID: 2049,
ParentID: 2043,
Name: `Gallente`,
Description: ``,
},
2050: &MarketGroup{
ID: 2050,
ParentID: 2043,
Name: `Minmatar`,
Description: ``,
},
2051: &MarketGroup{
ID: 2051,
ParentID: 2044,
Name: `Amarr`,
Description: ``,
},
2052: &MarketGroup{
ID: 2052,
ParentID: 2044,
Name: `Caldari`,
Description: ``,
},
2053: &MarketGroup{
ID: 2053,
ParentID: 2044,
Name: `Gallente`,
Description: ``,
},
2054: &MarketGroup{
ID: 2054,
ParentID: 2044,
Name: `Minmatar`,
Description: ``,
},
2055: &MarketGroup{
ID: 2055,
ParentID: 2045,
Name: `Amarr`,
Description: ``,
},
2056: &MarketGroup{
ID: 2056,
ParentID: 2045,
Name: `Caldari`,
Description: ``,
},
2057: &MarketGroup{
ID: 2057,
ParentID: 2045,
Name: `Gallente`,
Description: ``,
},
2058: &MarketGroup{
ID: 2058,
ParentID: 2045,
Name: `Minmatar`,
Description: ``,
},
2059: &MarketGroup{
ID: 2059,
ParentID: 2046,
Name: `Amarr`,
Description: ``,
},
2060: &MarketGroup{
ID: 2060,
ParentID: 2046,
Name: `Caldari`,
Description: ``,
},
2061: &MarketGroup{
ID: 2061,
ParentID: 2046,
Name: `Gallente`,
Description: ``,
},
2062: &MarketGroup{
ID: 2062,
ParentID: 2046,
Name: `Minmatar`,
Description: ``,
},
2063: &MarketGroup{
ID: 2063,
ParentID: 2029,
Name: `Navy Faction`,
Description: ``,
},
2064: &MarketGroup{
ID: 2064,
ParentID: 1988,
Name: `Advanced Cruisers`,
Description: ``,
},
2065: &MarketGroup{
ID: 2065,
ParentID: 2064,
Name: `Heavy Assault Cruisers`,
Description: ``,
},
2066: &MarketGroup{
ID: 2066,
ParentID: 2064,
Name: `Heavy Interdiction Cruisers`,
Description: ``,
},
2067: &MarketGroup{
ID: 2067,
ParentID: 2064,
Name: `Logistics`,
Description: ``,
},
2068: &MarketGroup{
ID: 2068,
ParentID: 2064,
Name: `Recon Ships`,
Description: ``,
},
2069: &MarketGroup{
ID: 2069,
ParentID: 2065,
Name: `Amarr`,
Description: ``,
},
2070: &MarketGroup{
ID: 2070,
ParentID: 2065,
Name: `Caldari`,
Description: ``,
},
2071: &MarketGroup{
ID: 2071,
ParentID: 2065,
Name: `Gallente`,
Description: ``,
},
2072: &MarketGroup{
ID: 2072,
ParentID: 2065,
Name: `Minmatar`,
Description: ``,
},
2073: &MarketGroup{
ID: 2073,
ParentID: 2066,
Name: `Amarr`,
Description: ``,
},
2074: &MarketGroup{
ID: 2074,
ParentID: 2066,
Name: `Caldari`,
Description: ``,
},
2075: &MarketGroup{
ID: 2075,
ParentID: 2066,
Name: `Gallente`,
Description: ``,
},
2076: &MarketGroup{
ID: 2076,
ParentID: 2066,
Name: `Minmatar`,
Description: ``,
},
2077: &MarketGroup{
ID: 2077,
ParentID: 2067,
Name: `Amarr`,
Description: ``,
},
2078: &MarketGroup{
ID: 2078,
ParentID: 2067,
Name: `Caldari`,
Description: ``,
},
2079: &MarketGroup{
ID: 2079,
ParentID: 2067,
Name: `Gallente`,
Description: ``,
},
2080: &MarketGroup{
ID: 2080,
ParentID: 2067,
Name: `Minmatar`,
Description: ``,
},
2081: &MarketGroup{
ID: 2081,
ParentID: 2068,
Name: `Amarr`,
Description: ``,
},
2082: &MarketGroup{
ID: 2082,
ParentID: 2068,
Name: `Caldari`,
Description: ``,
},
2083: &MarketGroup{
ID: 2083,
ParentID: 2068,
Name: `Gallente`,
Description: ``,
},
2084: &MarketGroup{
ID: 2084,
ParentID: 2068,
Name: `Minmatar`,
Description: ``,
},
2085: &MarketGroup{
ID: 2085,
ParentID: 2006,
Name: `Standard Industrial Ships`,
Description: ``,
},
2086: &MarketGroup{
ID: 2086,
ParentID: 2006,
Name: `Advanced Industrial Ships`,
Description: ``,
},
2087: &MarketGroup{
ID: 2087,
ParentID: 2086,
Name: `Transport Ships`,
Description: ``,
},
2088: &MarketGroup{
ID: 2088,
ParentID: 2087,
Name: `Amarr`,
Description: ``,
},
2089: &MarketGroup{
ID: 2089,
ParentID: 2087,
Name: `Caldari`,
Description: ``,
},
2090: &MarketGroup{
ID: 2090,
ParentID: 2087,
Name: `Gallente`,
Description: ``,
},
2091: &MarketGroup{
ID: 2091,
ParentID: 2087,
Name: `Minmatar`,
Description: ``,
},
2092: &MarketGroup{
ID: 2092,
ParentID: 2381,
Name: `Caldari`,
Description: ``,
},
2093: &MarketGroup{
ID: 2093,
ParentID: 2381,
Name: `Minmatar`,
Description: ``,
},
2094: &MarketGroup{
ID: 2094,
ParentID: 1968,
Name: `Jump Freighters`,
Description: ``,
},
2095: &MarketGroup{
ID: 2095,
ParentID: 2094,
Name: `Amarr`,
Description: ``,
},
2096: &MarketGroup{
ID: 2096,
ParentID: 2094,
Name: `Caldari`,
Description: ``,
},
2097: &MarketGroup{
ID: 2097,
ParentID: 2094,
Name: `Gallente`,
Description: ``,
},
2098: &MarketGroup{
ID: 2098,
ParentID: 2094,
Name: `Minmatar`,
Description: ``,
},
2099: &MarketGroup{
ID: 2099,
ParentID: 1955,
Name: `Advanced Battlecruisers`,
Description: ``,
},
2100: &MarketGroup{
ID: 2100,
ParentID: 1955,
Name: `Faction Battlecruisers`,
Description: ``,
},
2101: &MarketGroup{
ID: 2101,
ParentID: 1955,
Name: `Standard Battlecruisers`,
Description: ``,
},
2102: &MarketGroup{
ID: 2102,
ParentID: 2099,
Name: `<NAME>`,
Description: ``,
},
2103: &MarketGroup{
ID: 2103,
ParentID: 2100,
Name: `<NAME>`,
Description: ``,
},
2104: &MarketGroup{
ID: 2104,
ParentID: 2102,
Name: `Amarr`,
Description: ``,
},
2105: &MarketGroup{
ID: 2105,
ParentID: 2102,
Name: `Caldari`,
Description: ``,
},
2106: &MarketGroup{
ID: 2106,
ParentID: 2102,
Name: `Gallente`,
Description: ``,
},
2107: &MarketGroup{
ID: 2107,
ParentID: 2102,
Name: `Minmatar`,
Description: ``,
},
2108: &MarketGroup{
ID: 2108,
ParentID: 1961,
Name: `<NAME>`,
Description: ``,
},
2109: &MarketGroup{
ID: 2109,
ParentID: 2022,
Name: `<NAME>`,
Description: ``,
},
2110: &MarketGroup{
ID: 2110,
ParentID: 2109,
Name: `Amarr`,
Description: ``,
},
2111: &MarketGroup{
ID: 2111,
ParentID: 2109,
Name: `Caldari`,
Description: ``,
},
2112: &MarketGroup{
ID: 2112,
ParentID: 2109,
Name: `Gallente`,
Description: ``,
},
2113: &MarketGroup{
ID: 2113,
ParentID: 2109,
Name: `Minmatar`,
Description: ``,
},
2114: &MarketGroup{
ID: 2114,
ParentID: 1968,
Name: `Capital Industrial Ships`,
Description: ``,
},
2115: &MarketGroup{
ID: 2115,
ParentID: 1612,
Name: `Special Edition Heavy Interdiction Cruisers`,
Description: `Heavy Interdiction Cruisers which have been offered to capsuleers on occasion for limited periods.`,
},
2119: &MarketGroup{
ID: 2119,
ParentID: 1954,
Name: `Multiple Hull SKINs`,
Description: ``,
},
2120: &MarketGroup{
ID: 2120,
ParentID: 2119,
Name: `Special Edition SKINs`,
Description: ``,
},
2125: &MarketGroup{
ID: 2125,
ParentID: 1373,
Name: `Command Destroyers`,
Description: ``,
},
2126: &MarketGroup{
ID: 2126,
ParentID: 2125,
Name: `Amarr`,
Description: ``,
},
2131: &MarketGroup{
ID: 2131,
ParentID: 2125,
Name: `Caldari`,
Description: ``,
},
2132: &MarketGroup{
ID: 2132,
ParentID: 2125,
Name: `Gallente`,
Description: ``,
},
2133: &MarketGroup{
ID: 2133,
ParentID: 2125,
Name: `Minmatar`,
Description: ``,
},
2134: &MarketGroup{
ID: 2134,
ParentID: 252,
Name: `Micro Jump Field Generators`,
Description: ``,
},
2135: &MarketGroup{
ID: 2135,
ParentID: 52,
Name: `Micro Jump Field Generators`,
Description: ``,
},
2136: &MarketGroup{
ID: 2136,
ParentID: 2042,
Name: `Logistics Frigates`,
Description: ``,
},
2137: &MarketGroup{
ID: 2137,
ParentID: 2136,
Name: `Amarr`,
Description: ``,
},
2138: &MarketGroup{
ID: 2138,
ParentID: 2136,
Name: `Caldari`,
Description: ``,
},
2139: &MarketGroup{
ID: 2139,
ParentID: 2136,
Name: `Gallente`,
Description: ``,
},
2140: &MarketGroup{
ID: 2140,
ParentID: 2136,
Name: `Minmatar`,
Description: ``,
},
2141: &MarketGroup{
ID: 2141,
ParentID: 2036,
Name: `Command Destroyers`,
Description: ``,
},
2142: &MarketGroup{
ID: 2142,
ParentID: 2141,
Name: `Amarr`,
Description: ``,
},
2143: &MarketGroup{
ID: 2143,
ParentID: 2141,
Name: `Caldari`,
Description: ``,
},
2144: &MarketGroup{
ID: 2144,
ParentID: 2141,
Name: `Gallente`,
Description: ``,
},
2145: &MarketGroup{
ID: 2145,
ParentID: 2141,
Name: `Minmatar`,
Description: ``,
},
2146: &MarketGroup{
ID: 2146,
ParentID: 1364,
Name: `<NAME>`,
Description: `Frigates specialized in repairing their allies`,
},
2147: &MarketGroup{
ID: 2147,
ParentID: 2146,
Name: `Amarr`,
Description: `Amarr Logistics Frigates`,
},
2148: &MarketGroup{
ID: 2148,
ParentID: 2146,
Name: `Caldari`,
Description: `Caldari Logistics Frigates`,
},
2149: &MarketGroup{
ID: 2149,
ParentID: 2146,
Name: `Gallente`,
Description: `Gallente Logistics Frigates`,
},
2150: &MarketGroup{
ID: 2150,
ParentID: 2146,
Name: `Minmatar`,
Description: `Minmatar Logistics Frigates`,
},
2151: &MarketGroup{
ID: 2151,
ParentID: 1713,
Name: `Ice Mining Lasers`,
Description: `Ice mining laser designs.`,
},
2152: &MarketGroup{
ID: 2152,
ParentID: 150,
Name: `Structure Management`,
Description: `Skills pertaining to the efficient use of player owned structures`,
},
2153: &MarketGroup{
ID: 2153,
ParentID: 1566,
Name: `Stasis Grapplers`,
Description: `Stasis Grappler Blueprints`,
},
2154: &MarketGroup{
ID: 2154,
ParentID: 657,
Name: `Stasis Grapplers`,
Description: `Stasis Grapplers`,
},
2155: &MarketGroup{
ID: 2155,
ParentID: 533,
Name: `Named Components`,
Description: ``,
},
2156: &MarketGroup{
ID: 2156,
ParentID: 1338,
Name: `Citadels`,
Description: `Blueprints of Citadel structures.`,
},
2157: &MarketGroup{
ID: 2157,
ParentID: 2,
Name: `Structure Modifications`,
Description: `Blueprints of structure modifications.`,
},
2158: &MarketGroup{
ID: 2158,
ParentID: 2,
Name: `Structure Equipment`,
Description: `Blueprints of Structure Modules.`,
},
2159: &MarketGroup{
ID: 2159,
ParentID: 2157,
Name: `Structure Combat Rigs`,
Description: `Blueprints for Structure Combat rigs.`,
},
2160: &MarketGroup{
ID: 2160,
ParentID: 2157,
Name: `Structure Resource Processing Rigs`,
Description: `Blueprints for structure reprocessing and reactions rigs.`,
},
2161: &MarketGroup{
ID: 2161,
ParentID: 2158,
Name: `Electronic Warfare`,
Description: ``,
},
2162: &MarketGroup{
ID: 2162,
ParentID: 2158,
Name: `Electronics and Sensor Upgrades`,
Description: ``,
},
2163: &MarketGroup{
ID: 2163,
ParentID: 2158,
Name: `Engineering Equipment`,
Description: ``,
},
2164: &MarketGroup{
ID: 2164,
ParentID: 2158,
Name: `Structure Weapons`,
Description: ``,
},
2165: &MarketGroup{
ID: 2165,
ParentID: 2158,
Name: `Fighter Upgrades`,
Description: ``,
},
2166: &MarketGroup{
ID: 2166,
ParentID: 2158,
Name: `Service Modules`,
Description: ``,
},
2167: &MarketGroup{
ID: 2167,
ParentID: 2161,
Name: `Electronic Counter Measures`,
Description: ``,
},
2168: &MarketGroup{
ID: 2168,
ParentID: 2161,
Name: `Remote Sensor Dampeners`,
Description: ``,
},
2169: &MarketGroup{
ID: 2169,
ParentID: 2161,
Name: `Stasis Webifiers`,
Description: ``,
},
2170: &MarketGroup{
ID: 2170,
ParentID: 2161,
Name: `Target Painters`,
Description: ``,
},
2171: &MarketGroup{
ID: 2171,
ParentID: 2161,
Name: `Weapon Disruptors`,
Description: `Weapon Disruptors`,
},
2172: &MarketGroup{
ID: 2172,
ParentID: 2161,
Name: `Warp Scramblers`,
Description: ``,
},
2173: &MarketGroup{
ID: 2173,
ParentID: 2162,
Name: `CPU Upgrades`,
Description: ``,
},
2174: &MarketGroup{
ID: 2174,
ParentID: 2162,
Name: `Signal Amplifiers`,
Description: ``,
},
2175: &MarketGroup{
ID: 2175,
ParentID: 2163,
Name: `Capacitor Batteries`,
Description: ``,
},
2176: &MarketGroup{
ID: 2176,
ParentID: 2163,
Name: `Energy Neutralizers`,
Description: ``,
},
2177: &MarketGroup{
ID: 2177,
ParentID: 2163,
Name: `Reactor Control Units`,
Description: ``,
},
2178: &MarketGroup{
ID: 2178,
ParentID: 2164,
Name: `Missile Launchers`,
Description: ``,
},
2179: &MarketGroup{
ID: 2179,
ParentID: 2164,
Name: `Weapon Upgrades`,
Description: ``,
},
2180: &MarketGroup{
ID: 2180,
ParentID: 2164,
Name: `Doomsday Devices`,
Description: ``,
},
2181: &MarketGroup{
ID: 2181,
ParentID: 2166,
Name: `Citadel Service Modules`,
Description: ``,
},
2182: &MarketGroup{
ID: 2182,
ParentID: 2166,
Name: `Resource Processing Service Modules`,
Description: ``,
},
2183: &MarketGroup{
ID: 2183,
ParentID: 2178,
Name: `Structure Anticapital Launcher`,
Description: ``,
},
2184: &MarketGroup{
ID: 2184,
ParentID: 2178,
Name: `Structure Antisubcapital Launcher`,
Description: ``,
},
2185: &MarketGroup{
ID: 2185,
ParentID: 2164,
Name: `Guided Bomb Launchers`,
Description: ``,
},
2186: &MarketGroup{
ID: 2186,
ParentID: 2163,
Name: `Point Defense Batteries`,
Description: ``,
},
2187: &MarketGroup{
ID: 2187,
ParentID: 2162,
Name: `Ship Tractor Beams`,
Description: ``,
},
2188: &MarketGroup{
ID: 2188,
ParentID: 2163,
Name: `Capacitor Power Relays`,
Description: ``,
},
2189: &MarketGroup{
ID: 2189,
ParentID: 2179,
Name: `Ballistic Control Systems`,
Description: ``,
},
2190: &MarketGroup{
ID: 2190,
ParentID: 2179,
Name: `Missile Guidance Enhancers`,
Description: ``,
},
2191: &MarketGroup{
ID: 2191,
ParentID: 314,
Name: `Structure Anticapital Missiles`,
Description: ``,
},
2192: &MarketGroup{
ID: 2192,
ParentID: 314,
Name: `Structure Antisubcapital Missiles`,
Description: ``,
},
2193: &MarketGroup{
ID: 2193,
ParentID: 211,
Name: `Structure Guided Bombs`,
Description: ``,
},
2196: &MarketGroup{
ID: 2196,
ParentID: 114,
Name: `Structure Antisubcapital Missiles`,
Description: ``,
},
2197: &MarketGroup{
ID: 2197,
ParentID: 114,
Name: `Structure Anticapital Missiles`,
Description: ``,
},
2198: &MarketGroup{
ID: 2198,
ParentID: 11,
Name: `Structure Guided Bombs`,
Description: ``,
},
2199: &MarketGroup{
ID: 2199,
ParentID: 477,
Name: `Citadels`,
Description: ``,
},
2200: &MarketGroup{
ID: 2200,
ParentID: 2199,
Name: `Faction Citadels`,
Description: ``,
},
2201: &MarketGroup{
ID: 2201,
ParentID: 2199,
Name: `Standard Citadels`,
Description: ``,
},
2202: &MarketGroup{
ID: 2202,
ParentID: 0,
Name: `Structure Equipment`,
Description: `Modules that provide functionality to Structures`,
},
2203: &MarketGroup{
ID: 2203,
ParentID: 0,
Name: `Structure Modifications`,
Description: `Rigs that improve functionality in Structures`,
},
2204: &MarketGroup{
ID: 2204,
ParentID: 2203,
Name: `Structure Resource Processing Rigs`,
Description: `Structure Reprocessing and Reaction Rigs`,
},
2205: &MarketGroup{
ID: 2205,
ParentID: 2203,
Name: `Structure Combat Rigs`,
Description: `Structure Combat Rigs`,
},
2206: &MarketGroup{
ID: 2206,
ParentID: 2202,
Name: `Electronic Warfare`,
Description: ``,
},
2207: &MarketGroup{
ID: 2207,
ParentID: 2202,
Name: `Electronics and Sensor Upgrades`,
Description: ``,
},
2208: &MarketGroup{
ID: 2208,
ParentID: 2202,
Name: `Engineering Equipment`,
Description: ``,
},
2209: &MarketGroup{
ID: 2209,
ParentID: 2202,
Name: `Structure Weapons`,
Description: ``,
},
2210: &MarketGroup{
ID: 2210,
ParentID: 2202,
Name: `Service Modules`,
Description: ``,
},
2211: &MarketGroup{
ID: 2211,
ParentID: 2206,
Name: `Electronic Counter Measures`,
Description: ``,
},
2212: &MarketGroup{
ID: 2212,
ParentID: 2206,
Name: `Remote Sensor Dampeners`,
Description: ``,
},
2213: &MarketGroup{
ID: 2213,
ParentID: 2206,
Name: `Stasis Webifiers`,
Description: ``,
},
2214: &MarketGroup{
ID: 2214,
ParentID: 2206,
Name: `Target Painters`,
Description: ``,
},
2215: &MarketGroup{
ID: 2215,
ParentID: 2206,
Name: `Weapon Disruptors`,
Description: `Weapon Disruptors`,
},
2216: &MarketGroup{
ID: 2216,
ParentID: 2206,
Name: `Warp Disruptors`,
Description: ``,
},
2218: &MarketGroup{
ID: 2218,
ParentID: 2207,
Name: `CPU Upgrades`,
Description: ``,
},
2219: &MarketGroup{
ID: 2219,
ParentID: 2207,
Name: `Signal Amplifiers`,
Description: ``,
},
2220: &MarketGroup{
ID: 2220,
ParentID: 2208,
Name: `Capacitor Power Relays`,
Description: ``,
},
2221: &MarketGroup{
ID: 2221,
ParentID: 2208,
Name: `Point Defense Batteries`,
Description: ``,
},
2222: &MarketGroup{
ID: 2222,
ParentID: 2208,
Name: `Reactor Control Units`,
Description: ``,
},
2223: &MarketGroup{
ID: 2223,
ParentID: 2208,
Name: `Energy Neutralizers`,
Description: ``,
},
2224: &MarketGroup{
ID: 2224,
ParentID: 2208,
Name: `Capacitor Batteries`,
Description: ``,
},
2226: &MarketGroup{
ID: 2226,
ParentID: 2209,
Name: `Missile Launchers`,
Description: ``,
},
2227: &MarketGroup{
ID: 2227,
ParentID: 2209,
Name: `Weapon Upgrades`,
Description: ``,
},
2228: &MarketGroup{
ID: 2228,
ParentID: 2209,
Name: `Doomsday Devices`,
Description: ``,
},
2229: &MarketGroup{
ID: 2229,
ParentID: 2209,
Name: `Guided Bomb Launchers`,
Description: ``,
},
2230: &MarketGroup{
ID: 2230,
ParentID: 2226,
Name: `Structure Anticapital Launcher`,
Description: ``,
},
2231: &MarketGroup{
ID: 2231,
ParentID: 2226,
Name: `Structure Antisubcapital Launcher`,
Description: ``,
},
2232: &MarketGroup{
ID: 2232,
ParentID: 2210,
Name: `Citadel Service Modules`,
Description: ``,
},
2233: &MarketGroup{
ID: 2233,
ParentID: 2210,
Name: `Resource Processing Service Modules`,
Description: ``,
},
2234: &MarketGroup{
ID: 2234,
ParentID: 2227,
Name: `Ballistic Control Systems`,
Description: ``,
},
2235: &MarketGroup{
ID: 2235,
ParentID: 2227,
Name: `Missile Guidance Enhancers`,
Description: ``,
},
2236: &MarketGroup{
ID: 2236,
ParentID: 157,
Name: `Fighters`,
Description: `Fighters are small combat craft launched in squadrons from Capital Ships and Structures`,
},
2237: &MarketGroup{
ID: 2237,
ParentID: 357,
Name: `Fighters`,
Description: ``,
},
2238: &MarketGroup{
ID: 2238,
ParentID: 2237,
Name: `Support Fighters`,
Description: `Blueprints of support fighter designs.`,
},
2239: &MarketGroup{
ID: 2239,
ParentID: 2410,
Name: `Support Fighters`,
Description: `Piloted combat vessels, deployable from Carriers and Supercarriers.`,
},
2240: &MarketGroup{
ID: 2240,
ParentID: 133,
Name: `25000mm Armor Plate`,
Description: `25000mm Armor Plate`,
},
2241: &MarketGroup{
ID: 2241,
ParentID: 551,
Name: `Capital`,
Description: `Capital-class shield extension systems.`,
},
2242: &MarketGroup{
ID: 2242,
ParentID: 664,
Name: `Capital`,
Description: `Capital-sized capacitor batteries.`,
},
2243: &MarketGroup{
ID: 2243,
ParentID: 668,
Name: `Capital`,
Description: `Capital-sized capacitor boosters.`,
},
2244: &MarketGroup{
ID: 2244,
ParentID: 538,
Name: `Capital`,
Description: `Hull repair system designs, intended for Capital vessels.`,
},
2245: &MarketGroup{
ID: 2245,
ParentID: 535,
Name: `Scriptable Armor Hardeners`,
Description: `Armor Hardeners that can be reconfigured on the fly using scripts.`,
},
2246: &MarketGroup{
ID: 2246,
ParentID: 553,
Name: `Scriptable Shield Hardeners`,
Description: `Shield Hardeners that can be reconfigured on the fly using scripts.`,
},
2247: &MarketGroup{
ID: 2247,
ParentID: 140,
Name: `Rapid Torpedo Launchers`,
Description: ``,
},
2248: &MarketGroup{
ID: 2248,
ParentID: 1566,
Name: `Burst Projectors`,
Description: ``,
},
2249: &MarketGroup{
ID: 2249,
ParentID: 657,
Name: `Burst Projectors`,
Description: ``,
},
2250: &MarketGroup{
ID: 2250,
ParentID: 661,
Name: `Capital`,
Description: `Capital-sized energy neutralizers.`,
},
2251: &MarketGroup{
ID: 2251,
ParentID: 662,
Name: `Capital`,
Description: `Capital-sized energy nosferatu.`,
},
2252: &MarketGroup{
ID: 2252,
ParentID: 846,
Name: `Extra Large`,
Description: `Fired by dreadnaught-sized guns and stationary defense systems.`,
},
2253: &MarketGroup{
ID: 2253,
ParentID: 847,
Name: `Extra Large`,
Description: `Fired by dreadnaught-sized guns and stationary defense systems.`,
},
2254: &MarketGroup{
ID: 2254,
ParentID: 849,
Name: `Extra Large`,
Description: `Fired by dreadnought-sized guns and stationary defense systems.`,
},
2255: &MarketGroup{
ID: 2255,
ParentID: 850,
Name: `Extra Large`,
Description: `Fired by dreadnought-sized guns and stationary defense systems.`,
},
2256: &MarketGroup{
ID: 2256,
ParentID: 853,
Name: `Extra Large`,
Description: `For use with dreadnought-sized lasers and stationary defense systems.`,
},
2257: &MarketGroup{
ID: 2257,
ParentID: 852,
Name: `Extra Large`,
Description: `For use with dreadnought-sized lasers and stationary defense systems.`,
},
2258: &MarketGroup{
ID: 2258,
ParentID: 505,
Name: `Advanced Anti-Ship XL Torpedoes`,
Description: `Advanced Anti-Ship Torpedoes`,
},
2259: &MarketGroup{
ID: 2259,
ParentID: 505,
Name: `Advanced Long Range XL Torpedoes`,
Description: `Advanced Long Range Torpedoes`,
},
2260: &MarketGroup{
ID: 2260,
ParentID: 1316,
Name: `Advanced High Precision XL Cruise Missiles`,
Description: `Advanced High Precision XL Cruise Missiles`,
},
2261: &MarketGroup{
ID: 2261,
ParentID: 1316,
Name: `Advanced High Damage XL Cruise Missiles`,
Description: `Advanced High Damage XL Cruise Missiles`,
},
2262: &MarketGroup{
ID: 2262,
ParentID: 204,
Name: `Force Auxiliaries`,
Description: `Blueprints of force auxiliary-class vessels.`,
},
2263: &MarketGroup{
ID: 2263,
ParentID: 2262,
Name: `Amarr`,
Description: `Blueprints of Amarr force auxiliary designs.`,
},
2264: &MarketGroup{
ID: 2264,
ParentID: 2262,
Name: `Caldari`,
Description: `Blueprints of Caldari force auxiliary designs.`,
},
2265: &MarketGroup{
ID: 2265,
ParentID: 2262,
Name: `Gallente`,
Description: `Blueprints of Gallente force auxiliary designs.`,
},
2266: &MarketGroup{
ID: 2266,
ParentID: 2262,
Name: `Minmatar`,
Description: `Blueprints of Minmatar force auxiliary designs.`,
},
2267: &MarketGroup{
ID: 2267,
ParentID: 912,
Name: `Amarr`,
Description: ``,
},
2268: &MarketGroup{
ID: 2268,
ParentID: 912,
Name: `Caldari`,
Description: ``,
},
2269: &MarketGroup{
ID: 2269,
ParentID: 912,
Name: `Gallente`,
Description: ``,
},
2270: &MarketGroup{
ID: 2270,
ParentID: 912,
Name: `Minmatar`,
Description: ``,
},
2271: &MarketGroup{
ID: 2271,
ParentID: 1381,
Name: `Force Auxiliaries`,
Description: `Capital warships, able to support fleets with their logistics power.`,
},
2272: &MarketGroup{
ID: 2272,
ParentID: 2271,
Name: `Amarr`,
Description: `Amarr force auxiliary designs.`,
},
2273: &MarketGroup{
ID: 2273,
ParentID: 2271,
Name: `Caldari`,
Description: `Caldari force auxiliary designs.`,
},
2274: &MarketGroup{
ID: 2274,
ParentID: 2271,
Name: `Gallente`,
Description: `Gallente force auxiliary designs.`,
},
2275: &MarketGroup{
ID: 2275,
ParentID: 2271,
Name: `Minmatar`,
Description: `Minmatar force auxiliary designs.`,
},
2276: &MarketGroup{
ID: 2276,
ParentID: 912,
Name: `Non-Racial`,
Description: ``,
},
2277: &MarketGroup{
ID: 2277,
ParentID: 1968,
Name: `Force Auxiliaries`,
Description: ``,
},
2278: &MarketGroup{
ID: 2278,
ParentID: 2277,
Name: `Amarr`,
Description: ``,
},
2279: &MarketGroup{
ID: 2279,
ParentID: 2277,
Name: `Caldari`,
Description: ``,
},
2280: &MarketGroup{
ID: 2280,
ParentID: 2277,
Name: `Gallente`,
Description: ``,
},
2281: &MarketGroup{
ID: 2281,
ParentID: 2277,
Name: `Minmatar`,
Description: ``,
},
2283: &MarketGroup{
ID: 2283,
ParentID: 1954,
Name: `Capsules`,
Description: ``,
},
2285: &MarketGroup{
ID: 2285,
ParentID: 2283,
Name: `Capsules`,
Description: ``,
},
2286: &MarketGroup{
ID: 2286,
ParentID: 2283,
Name: `Special Edition Capsules`,
Description: ``,
},
2287: &MarketGroup{
ID: 2287,
ParentID: 812,
Name: `Faction Titans`,
Description: `Faction Titan designs.`,
},
2288: &MarketGroup{
ID: 2288,
ParentID: 761,
Name: `Faction Dreadnoughts`,
Description: `Faction Dreadnought designs.`,
},
2290: &MarketGroup{
ID: 2290,
ParentID: 211,
Name: `Command Burst Charges`,
Description: `Command Burst Charges`,
},
2291: &MarketGroup{
ID: 2291,
ParentID: 2290,
Name: `Armor Command Burst Charges`,
Description: `Armor Command Burst Charges`,
},
2292: &MarketGroup{
ID: 2292,
ParentID: 2290,
Name: `Information Command Burst Charges`,
Description: `Information Command Burst Charges`,
},
2293: &MarketGroup{
ID: 2293,
ParentID: 2290,
Name: `Mining Foreman Burst Charges`,
Description: `Mining Foreman Burst Charges`,
},
2294: &MarketGroup{
ID: 2294,
ParentID: 2290,
Name: `Shield Command Burst Charges`,
Description: `Shield Command Burst Charges`,
},
2295: &MarketGroup{
ID: 2295,
ParentID: 2290,
Name: `Skirmish Command Burst Charges`,
Description: `Skirmish Command Burst Charges`,
},
2297: &MarketGroup{
ID: 2297,
ParentID: 11,
Name: `Command Burst Charges`,
Description: `Command Burst Charges`,
},
2298: &MarketGroup{
ID: 2298,
ParentID: 2297,
Name: `Armor Command Burst Charges`,
Description: `Armor Command Burst Charges`,
},
2299: &MarketGroup{
ID: 2299,
ParentID: 2297,
Name: `Information Command Burst Charges`,
Description: `Information Command Burst Charges`,
},
2300: &MarketGroup{
ID: 2300,
ParentID: 2297,
Name: `Mining Foreman Burst Charges`,
Description: `Mining Foreman Burst Charges`,
},
2301: &MarketGroup{
ID: 2301,
ParentID: 2297,
Name: `Shield Command Burst Charges`,
Description: `Shield Command Burst Charges`,
},
2302: &MarketGroup{
ID: 2302,
ParentID: 2297,
Name: `Skirmish Command Burst Charges`,
Description: `Skirmish Command Burst Charges`,
},
2306: &MarketGroup{
ID: 2306,
ParentID: 1954,
Name: `Corvettes`,
Description: ``,
},
2307: &MarketGroup{
ID: 2307,
ParentID: 2306,
Name: `Standard Corvettes`,
Description: ``,
},
2308: &MarketGroup{
ID: 2308,
ParentID: 2307,
Name: `Amarr`,
Description: ``,
},
2309: &MarketGroup{
ID: 2309,
ParentID: 2036,
Name: `Tactical Destroyers`,
Description: ``,
},
2310: &MarketGroup{
ID: 2310,
ParentID: 2309,
Name: `Amarr`,
Description: ``,
},
2311: &MarketGroup{
ID: 2311,
ParentID: 1955,
Name: `Special Battlecruisers`,
Description: ``,
},
2312: &MarketGroup{
ID: 2312,
ParentID: 2311,
Name: `Special Battlecruisers`,
Description: ``,
},
2313: &MarketGroup{
ID: 2313,
ParentID: 1989,
Name: `Special Destroyers`,
Description: ``,
},
2314: &MarketGroup{
ID: 2314,
ParentID: 2313,
Name: `Special Destroyers`,
Description: ``,
},
2315: &MarketGroup{
ID: 2315,
ParentID: 1954,
Name: `Shuttles`,
Description: ``,
},
2316: &MarketGroup{
ID: 2316,
ParentID: 2315,
Name: `Special Shuttles`,
Description: ``,
},
2317: &MarketGroup{
ID: 2317,
ParentID: 19,
Name: `Strong Boxes`,
Description: ``,
},
2318: &MarketGroup{
ID: 2318,
ParentID: 1972,
Name: `ORE`,
Description: ``,
},
2319: &MarketGroup{
ID: 2319,
ParentID: 2011,
Name: `Mining Barges`,
Description: ``,
},
2320: &MarketGroup{
ID: 2320,
ParentID: 2042,
Name: `Expedition Frigates`,
Description: ``,
},
2321: &MarketGroup{
ID: 2321,
ParentID: 2001,
Name: `ORE`,
Description: ``,
},
2322: &MarketGroup{
ID: 2322,
ParentID: 1338,
Name: `Engineering Complexes`,
Description: `Blueprints of Engineering Complex structures.`,
},
2323: &MarketGroup{
ID: 2323,
ParentID: 2166,
Name: `Engineering Service Modules`,
Description: ``,
},
2324: &MarketGroup{
ID: 2324,
ParentID: 477,
Name: `Engineering Complexes`,
Description: ``,
},
2325: &MarketGroup{
ID: 2325,
ParentID: 912,
Name: `ORE`,
Description: ``,
},
2327: &MarketGroup{
ID: 2327,
ParentID: 477,
Name: `Refineries`,
Description: ``,
},
2328: &MarketGroup{
ID: 2328,
ParentID: 2085,
Name: `ORE`,
Description: ``,
},
2330: &MarketGroup{
ID: 2330,
ParentID: 2006,
Name: `Special Edition Industrial Ships`,
Description: ``,
},
2331: &MarketGroup{
ID: 2331,
ParentID: 2330,
Name: `ORE`,
Description: ``,
},
2332: &MarketGroup{
ID: 2332,
ParentID: 2210,
Name: `Engineering Service Modules`,
Description: ``,
},
2333: &MarketGroup{
ID: 2333,
ParentID: 204,
Name: `Industrial Command Ships`,
Description: `Blueprints of industrial command ships`,
},
2334: &MarketGroup{
ID: 2334,
ParentID: 2333,
Name: `ORE`,
Description: `Blueprints of ORE industrial command ship designs.`,
},
2335: &MarketGroup{
ID: 2335,
ParentID: 1382,
Name: `Industrial Command Ships`,
Description: ``,
},
2336: &MarketGroup{
ID: 2336,
ParentID: 2335,
Name: `ORE`,
Description: ``,
},
2337: &MarketGroup{
ID: 2337,
ParentID: 2006,
Name: `Industrial Command Ships`,
Description: ``,
},
2338: &MarketGroup{
ID: 2338,
ParentID: 2337,
Name: `ORE`,
Description: ``,
},
2339: &MarketGroup{
ID: 2339,
ParentID: 2157,
Name: `Structure Engineering Rigs`,
Description: `Blueprints for Structure Combat rigs.`,
},
2340: &MarketGroup{
ID: 2340,
ParentID: 2203,
Name: `Structure Engineering Rigs`,
Description: `Structure Engineering Rigs`,
},
2341: &MarketGroup{
ID: 2341,
ParentID: 2204,
Name: `Medium Structure Resource Processing Rigs`,
Description: `Medium Structure Reprocessing and Reaction Rigs`,
},
2342: &MarketGroup{
ID: 2342,
ParentID: 2204,
Name: `Large Structure Resource Processing Rigs`,
Description: `Large Structure Reprocessing and Reaction Rigs`,
},
2343: &MarketGroup{
ID: 2343,
ParentID: 2204,
Name: `X-Large Structure Resource Processing Rigs`,
Description: `X-Large Structure Reprocessing and Reaction Rigs`,
},
2344: &MarketGroup{
ID: 2344,
ParentID: 2205,
Name: `Medium Structure Combat Rigs`,
Description: `Medium Structure Combat Rigs`,
},
2345: &MarketGroup{
ID: 2345,
ParentID: 2205,
Name: `Large Structure Combat Rigs`,
Description: `Large Structure Combat Rigs`,
},
2346: &MarketGroup{
ID: 2346,
ParentID: 2205,
Name: `X-Large Structure Combat Rigs`,
Description: `X-Large Structure Combat Rigs`,
},
2347: &MarketGroup{
ID: 2347,
ParentID: 2340,
Name: `Medium Structure Engineering Rigs`,
Description: `Medium Structure Engineering Rigs`,
},
2348: &MarketGroup{
ID: 2348,
ParentID: 2340,
Name: `Large Structure Engineering Rigs`,
Description: `Large Structure Engineering Rigs`,
},
2349: &MarketGroup{
ID: 2349,
ParentID: 2340,
Name: `X-Large Structure Engineering Rigs`,
Description: `X-Large Structure Engineering Rigs`,
},
2350: &MarketGroup{
ID: 2350,
ParentID: 1612,
Name: `Special Edition Destroyers`,
Description: `Destroyers which have been offered to capsuleers on occasion for limited periods.`,
},
2351: &MarketGroup{
ID: 2351,
ParentID: 140,
Name: `Defender Launchers`,
Description: `For launching defender missiles.`,
},
2353: &MarketGroup{
ID: 2353,
ParentID: 2307,
Name: `Caldari`,
Description: ``,
},
2354: &MarketGroup{
ID: 2354,
ParentID: 2307,
Name: `Gallente`,
Description: ``,
},
2355: &MarketGroup{
ID: 2355,
ParentID: 2307,
Name: `Minmatar`,
Description: ``,
},
2356: &MarketGroup{
ID: 2356,
ParentID: 2309,
Name: `Gallente`,
Description: ``,
},
2357: &MarketGroup{
ID: 2357,
ParentID: 2271,
Name: `Faction Force Auxiliaries`,
Description: `Faction force auxiliary designs.`,
},
2358: &MarketGroup{
ID: 2358,
ParentID: 1922,
Name: `Skill Trading`,
Description: `Items used to extract and inject skill points.`,
},
2359: &MarketGroup{
ID: 2359,
ParentID: 2306,
Name: `Faction Corvettes`,
Description: ``,
},
2360: &MarketGroup{
ID: 2360,
ParentID: 2359,
Name: `Pirate Faction`,
Description: ``,
},
2361: &MarketGroup{
ID: 2361,
ParentID: 2044,
Name: `Special Covert Ops`,
Description: ``,
},
2362: &MarketGroup{
ID: 2362,
ParentID: 2068,
Name: `Special Recon Ships`,
Description: ``,
},
2369: &MarketGroup{
ID: 2369,
ParentID: 2064,
Name: `Strategic Cruisers`,
Description: ``,
},
2370: &MarketGroup{
ID: 2370,
ParentID: 2369,
Name: `Caldari`,
Description: ``,
},
2371: &MarketGroup{
ID: 2371,
ParentID: 2369,
Name: `Amarr`,
Description: ``,
},
2372: &MarketGroup{
ID: 2372,
ParentID: 2369,
Name: `Gallente`,
Description: ``,
},
2373: &MarketGroup{
ID: 2373,
ParentID: 2369,
Name: `Minmatar`,
Description: ``,
},
2374: &MarketGroup{
ID: 2374,
ParentID: 1970,
Name: `Standard Carriers`,
Description: ``,
},
2375: &MarketGroup{
ID: 2375,
ParentID: 1970,
Name: `Faction Carriers`,
Description: ``,
},
2376: &MarketGroup{
ID: 2376,
ParentID: 2375,
Name: `Pirate Faction`,
Description: ``,
},
2377: &MarketGroup{
ID: 2377,
ParentID: 1971,
Name: `Standard Dreadnoughts`,
Description: ``,
},
2378: &MarketGroup{
ID: 2378,
ParentID: 1971,
Name: `Faction Dreadnoughts`,
Description: ``,
},
2380: &MarketGroup{
ID: 2380,
ParentID: 2378,
Name: `Pirate Faction`,
Description: ``,
},
2381: &MarketGroup{
ID: 2381,
ParentID: 1973,
Name: `Standard Titans`,
Description: ``,
},
2382: &MarketGroup{
ID: 2382,
ParentID: 1973,
Name: `Faction Titans`,
Description: ``,
},
2383: &MarketGroup{
ID: 2383,
ParentID: 2382,
Name: `Pirate Faction`,
Description: ``,
},
2387: &MarketGroup{
ID: 2387,
ParentID: 2315,
Name: `Amarr`,
Description: `Amarr Shuttle Skins`,
},
2388: &MarketGroup{
ID: 2388,
ParentID: 2315,
Name: `Caldari`,
Description: `Caldari Shuttle Skins`,
},
2389: &MarketGroup{
ID: 2389,
ParentID: 2315,
Name: `Gallente`,
Description: `Gallente Shuttle Skins`,
},
2390: &MarketGroup{
ID: 2390,
ParentID: 2315,
Name: `Minmatar`,
Description: `Minmatar Shuttle Skins`,
},
2391: &MarketGroup{
ID: 2391,
ParentID: 2309,
Name: `Caldari`,
Description: ``,
},
2392: &MarketGroup{
ID: 2392,
ParentID: 2309,
Name: `Minmatar`,
Description: ``,
},
2393: &MarketGroup{
ID: 2393,
ParentID: 1338,
Name: `Refineries`,
Description: `Blueprints of Refinery structures.`,
},
2395: &MarketGroup{
ID: 2395,
ParentID: 1031,
Name: `Moon Ores`,
Description: ``,
},
2396: &MarketGroup{
ID: 2396,
ParentID: 2395,
Name: `Ubiquitous Moon Ores`,
Description: `The most common forms of moon ore`,
},
2397: &MarketGroup{
ID: 2397,
ParentID: 2395,
Name: `Common Moon Ores`,
Description: ``,
},
2398: &MarketGroup{
ID: 2398,
ParentID: 2395,
Name: `Uncommon Moon Ores`,
Description: ``,
},
2400: &MarketGroup{
ID: 2400,
ParentID: 2395,
Name: `Rare Moon Ores`,
Description: ``,
},
2401: &MarketGroup{
ID: 2401,
ParentID: 2395,
Name: `Exceptional Moon Ores`,
Description: ``,
},
2402: &MarketGroup{
ID: 2402,
ParentID: 1849,
Name: `Biochemical Reaction Formulas`,
Description: `Reaction formulas that enable the creation of raw boosters in Refineries`,
},
2403: &MarketGroup{
ID: 2403,
ParentID: 1849,
Name: `Composite Reaction Formulas`,
Description: `Reaction formulas that enable the creation of Tech 2 construction materials in Refineries`,
},
2404: &MarketGroup{
ID: 2404,
ParentID: 1849,
Name: `Polymer Reaction Formulas`,
Description: `Reaction formulas that enable the creation of Tech 3 construction materials in Refineries`,
},
2406: &MarketGroup{
ID: 2406,
ParentID: 2109,
Name: `Special Black Ops`,
Description: ``,
},
2407: &MarketGroup{
ID: 2407,
ParentID: 2161,
Name: `Structure Burst Projectors`,
Description: ``,
},
2408: &MarketGroup{
ID: 2408,
ParentID: 2206,
Name: `Structure Burst Projectors`,
Description: ``,
},
2409: &MarketGroup{
ID: 2409,
ParentID: 2236,
Name: `Structure-based Fighters`,
Description: ``,
},
2410: &MarketGroup{
ID: 2410,
ParentID: 2236,
Name: `Carrier-based Fighters`,
Description: ``,
},
2411: &MarketGroup{
ID: 2411,
ParentID: 2409,
Name: `Standup Light Fighters`,
Description: ``,
},
2412: &MarketGroup{
ID: 2412,
ParentID: 2409,
Name: `Standup Heavy Fighters`,
Description: ``,
},
2413: &MarketGroup{
ID: 2413,
ParentID: 2409,
Name: `Standup Support Fighters`,
Description: ``,
},
2414: &MarketGroup{
ID: 2414,
ParentID: 2208,
Name: `Armor Reinforcers`,
Description: ``,
},
2415: &MarketGroup{
ID: 2415,
ParentID: 2163,
Name: `Armor Reinforcers`,
Description: ``,
},
2416: &MarketGroup{
ID: 2416,
ParentID: 1368,
Name: `Flag Cruisers`,
Description: ``,
},
2417: &MarketGroup{
ID: 2417,
ParentID: 2416,
Name: `CONCORD`,
Description: ``,
},
2418: &MarketGroup{
ID: 2418,
ParentID: 2064,
Name: `Flag Cruisers`,
Description: ``,
},
2419: &MarketGroup{
ID: 2419,
ParentID: 2418,
Name: `CONCORD`,
Description: ``,
},
2420: &MarketGroup{
ID: 2420,
ParentID: 1960,
Name: `Special Battleships`,
Description: ``,
},
2421: &MarketGroup{
ID: 2421,
ParentID: 2420,
Name: `Special Battleships`,
Description: ``,
},
2425: &MarketGroup{
ID: 2425,
ParentID: 1361,
Name: `Precursor Frigates`,
Description: `Precursor Frigates`,
},
2426: &MarketGroup{
ID: 2426,
ParentID: 2425,
Name: `Triglavian`,
Description: `Triglavian Frigates`,
},
2427: &MarketGroup{
ID: 2427,
ParentID: 1367,
Name: `Precursor Cruisers`,
Description: `Precursor Cruisers`,
},
2428: &MarketGroup{
ID: 2428,
ParentID: 2427,
Name: `Triglavian`,
Description: `Triglavian Cruisers`,
},
2429: &MarketGroup{
ID: 2429,
ParentID: 1376,
Name: `Precursor Battleships`,
Description: `Precursor Battleships`,
},
2430: &MarketGroup{
ID: 2430,
ParentID: 2429,
Name: `Triglavian`,
Description: `Triglavian Battleships`,
},
2431: &MarketGroup{
ID: 2431,
ParentID: 10,
Name: `Precursor Turrets`,
Description: `Precursor Turrets`,
},
2432: &MarketGroup{
ID: 2432,
ParentID: 2431,
Name: `Entropic Disintegrators`,
Description: `Entropic Disintegrators`,
},
2433: &MarketGroup{
ID: 2433,
ParentID: 2432,
Name: `Small`,
Description: `Small Entropic Disintegrators`,
},
2434: &MarketGroup{
ID: 2434,
ParentID: 2432,
Name: `Medium`,
Description: `Medium Entropic Disintegrators`,
},
2435: &MarketGroup{
ID: 2435,
ParentID: 2432,
Name: `Large`,
Description: `Large Entropic Disintegrators`,
},
2436: &MarketGroup{
ID: 2436,
ParentID: 955,
Name: `Mutaplasmids`,
Description: `Mutaplasmids permanently alter a module's attributes`,
},
2437: &MarketGroup{
ID: 2437,
ParentID: 2436,
Name: `Armor Mutaplasmids`,
Description: `Mutaplasmids that are used on Armor modules`,
},
2438: &MarketGroup{
ID: 2438,
ParentID: 2436,
Name: `Shield Mutaplasmids`,
Description: `Mutaplasmids that can be used on shield modules.`,
},
2439: &MarketGroup{
ID: 2439,
ParentID: 2436,
Name: `Astronautic Mutaplasmids`,
Description: `Mutaplasmids that can be used on Propulsion modules.`,
},
2440: &MarketGroup{
ID: 2440,
ParentID: 2436,
Name: `Engineering Mutaplasmids`,
Description: `Mutaplasmids that can be used on Engineering modules`,
},
2441: &MarketGroup{
ID: 2441,
ParentID: 2436,
Name: `Warp Disruption Mutaplasmids`,
Description: `Mutaplasmids that can be used on Warp Disruption modules.`,
},
2442: &MarketGroup{
ID: 2442,
ParentID: 2436,
Name: `Stasis Webifier Mutaplasmids`,
Description: `Mutaplasmids that can be used on Stasis Webifier modules.`,
},
2443: &MarketGroup{
ID: 2443,
ParentID: 2437,
Name: `Small Armor Mutaplasmids`,
Description: `Small Armor Mutaplasmids`,
},
2444: &MarketGroup{
ID: 2444,
ParentID: 2437,
Name: `Medium Armor Mutaplasmids`,
Description: `Medium Armor Mutaplasmids`,
},
2445: &MarketGroup{
ID: 2445,
ParentID: 2437,
Name: `Large Armor Mutaplasmids`,
Description: `Large Armor Mutaplasmids`,
},
2446: &MarketGroup{
ID: 2446,
ParentID: 2438,
Name: `Small Shield Mutaplasmids`,
Description: `Small Shield Mutaplasmids`,
},
2447: &MarketGroup{
ID: 2447,
ParentID: 2438,
Name: `Medium Shield Mutaplasmids`,
Description: `Medium Shield Mutaplasmids`,
},
2448: &MarketGroup{
ID: 2448,
ParentID: 2438,
Name: `Large Shield Mutaplasmids`,
Description: `Large Shield Mutaplasmids`,
},
2449: &MarketGroup{
ID: 2449,
ParentID: 2438,
Name: `X-Large Shield Mutaplasmids`,
Description: `X-Large Shield Mutaplasmids`,
},
2450: &MarketGroup{
ID: 2450,
ParentID: 2439,
Name: `Small Astronautic Mutaplasmids`,
Description: `Small Atronautic Mutaplasmids`,
},
2451: &MarketGroup{
ID: 2451,
ParentID: 2439,
Name: `Medium Astronautic Mutaplasmids`,
Description: `Medium Astronautic Mutaplasmids`,
},
2452: &MarketGroup{
ID: 2452,
ParentID: 2439,
Name: `Large Astronautic Mutaplasmids`,
Description: `Large Astronautic Mutaplasmids`,
},
2453: &MarketGroup{
ID: 2453,
ParentID: 2440,
Name: `Small Engineering Mutaplasmids`,
Description: `Small Engineering Mutaplasmids`,
},
2454: &MarketGroup{
ID: 2454,
ParentID: 2440,
Name: `Medium Engineering Mutaplasmids`,
Description: `Medium Engineering Mutaplasmids`,
},
2455: &MarketGroup{
ID: 2455,
ParentID: 2440,
Name: `Large Engineering Mutaplasmids`,
Description: `Large Engineering Mutaplasmids`,
},
2456: &MarketGroup{
ID: 2456,
ParentID: 19,
Name: `Filaments`,
Description: `Abyssal Filaments and Jump Filaments`,
},
2457: &MarketGroup{
ID: 2457,
ParentID: 2456,
Name: `Exotic Filaments`,
Description: `Filaments connected to Abyssal pockets with Exotic Particle Storms`,
},
2458: &MarketGroup{
ID: 2458,
ParentID: 2456,
Name: `Dark Filaments`,
Description: `Filaments that connect to Abyssal pockets with Dark Matter Fields`,
},
2459: &MarketGroup{
ID: 2459,
ParentID: 2456,
Name: `Firestorm Filaments`,
Description: `Filaments that connect to Abyssal pockets with Plasma Firestorms`,
},
2460: &MarketGroup{
ID: 2460,
ParentID: 2456,
Name: `Gamma Filaments`,
Description: `Filaments that connect to Abyssal pockets with Gamma-Ray Afterglow`,
},
2461: &MarketGroup{
ID: 2461,
ParentID: 2456,
Name: `Electrical Filaments`,
Description: `Filaments that connect to Abyssal pockets with Electrical Storms`,
},
2462: &MarketGroup{
ID: 2462,
ParentID: 11,
Name: `Exotic Plasma Charges`,
Description: `Exotic Plasma Charges are used in Entropic Disintegrators`,
},
2463: &MarketGroup{
ID: 2463,
ParentID: 2462,
Name: `Standard Exotic Plasma Charges`,
Description: `Standard Exotic Plasma Charges`,
},
2464: &MarketGroup{
ID: 2464,
ParentID: 2462,
Name: `Advanced Exotic Plasma Charges`,
Description: `Advanced Exotic Plasma Charges`,
},
2465: &MarketGroup{
ID: 2465,
ParentID: 2463,
Name: `Small`,
Description: `Small Exotic Plasma Charge`,
},
2466: &MarketGroup{
ID: 2466,
ParentID: 2463,
Name: `Medium`,
Description: `Medium Exotic Plasma Charges`,
},
2467: &MarketGroup{
ID: 2467,
ParentID: 2463,
Name: `Large`,
Description: `Large Exotic Plasma Charges`,
},
2468: &MarketGroup{
ID: 2468,
ParentID: 2464,
Name: `Small`,
Description: `Small Advanced Exotic Plasma Charges`,
},
2469: &MarketGroup{
ID: 2469,
ParentID: 2464,
Name: `Medium`,
Description: `Medium Advanced Exotic Plasma Charges`,
},
2470: &MarketGroup{
ID: 2470,
ParentID: 2464,
Name: `Large`,
Description: `Large Advanced Exotic Plasma Charges`,
},
2471: &MarketGroup{
ID: 2471,
ParentID: 143,
Name: `Entropic Radiation Sinks`,
Description: `Increases damage and rate of fire for Entropic Disintegrators`,
},
2473: &MarketGroup{
ID: 2473,
ParentID: 531,
Name: `Drone Implants`,
Description: `Drone Implants`,
},
2474: &MarketGroup{
ID: 2474,
ParentID: 2473,
Name: `Implant Slot 06`,
Description: `Implant Slot 06`,
},
2475: &MarketGroup{
ID: 2475,
ParentID: 2473,
Name: `Implant Slot 07`,
Description: `Implant Slot 07`,
},
2476: &MarketGroup{
ID: 2476,
ParentID: 2473,
Name: `Implant Slot 08`,
Description: `Implant Slot 08`,
},
2477: &MarketGroup{
ID: 2477,
ParentID: 2473,
Name: `Implant Slot 09`,
Description: `Implant Slot 09`,
},
2478: &MarketGroup{
ID: 2478,
ParentID: 2473,
Name: `Implant Slot 10`,
Description: `Implant Slot 10`,
},
2479: &MarketGroup{
ID: 2479,
ParentID: 1031,
Name: `Abyssal Materials`,
Description: `Materials found in Abyssal Deadspace`,
},
2480: &MarketGroup{
ID: 2480,
ParentID: 19,
Name: `Triglavian Data`,
Description: `Triglavian Data Storage Devices`,
},
2481: &MarketGroup{
ID: 2481,
ParentID: 1960,
Name: `Precursor Battleships`,
Description: `Precursor Battleships`,
},
2482: &MarketGroup{
ID: 2482,
ParentID: 2481,
Name: `Triglavian`,
Description: `Triglavian Battleships`,
},
2483: &MarketGroup{
ID: 2483,
ParentID: 1988,
Name: `Precursor Cruisers`,
Description: `Precursor Cruisers`,
},
2484: &MarketGroup{
ID: 2484,
ParentID: 1998,
Name: `Precursor Frigates`,
Description: `Precursor Frigates`,
},
2485: &MarketGroup{
ID: 2485,
ParentID: 2483,
Name: `Triglavian`,
Description: `Triglavian Cruisers`,
},
2486: &MarketGroup{
ID: 2486,
ParentID: 2484,
Name: `Triglavian`,
Description: `Triglavian Frigates`,
},
2487: &MarketGroup{
ID: 2487,
ParentID: 24,
Name: `Cerebral Accelerators`,
Description: `Cerebral Accelerators come in a variety of different forms, but in the end the effects are similar. These drugs, devices, and boosters that significantly increase a pilot's skill development.`,
},
2488: &MarketGroup{
ID: 2488,
ParentID: 977,
Name: `Booster Slot 01`,
Description: `Booster Slot 01`,
},
2489: &MarketGroup{
ID: 2489,
ParentID: 977,
Name: `Booster Slot 02`,
Description: `Booster Slot 02`,
},
2490: &MarketGroup{
ID: 2490,
ParentID: 977,
Name: `Booster Slot 03`,
Description: `Booster Slot 03`,
},
2491: &MarketGroup{
ID: 2491,
ParentID: 2488,
Name: `Blue Pill`,
Description: `Blue Pill Boosters`,
},
2492: &MarketGroup{
ID: 2492,
ParentID: 2488,
Name: `Exile`,
Description: `Exile Boosters`,
},
2493: &MarketGroup{
ID: 2493,
ParentID: 2488,
Name: `Mindflood`,
Description: `Mindflood Boosters`,
},
2494: &MarketGroup{
ID: 2494,
ParentID: 2488,
Name: `X-Instinct`,
Description: `X-Instinct Boosters`,
},
2495: &MarketGroup{
ID: 2495,
ParentID: 2488,
Name: `Antipharmakon`,
Description: `Antipharmakon Boosters`,
},
2496: &MarketGroup{
ID: 2496,
ParentID: 2489,
Name: `Drop`,
Description: `Drop Boosters`,
},
2497: &MarketGroup{
ID: 2497,
ParentID: 2489,
Name: `Frentix`,
Description: `Frentix Boosters`,
},
2498: &MarketGroup{
ID: 2498,
ParentID: 2489,
Name: `Sooth Sayer`,
Description: `Sooth Sayer Boosters`,
},
2499: &MarketGroup{
ID: 2499,
ParentID: 2489,
Name: `Antipharmakon`,
Description: `Antipharmakon Boosters`,
},
2500: &MarketGroup{
ID: 2500,
ParentID: 2490,
Name: `Crash`,
Description: `Crash Boosters`,
},
2501: &MarketGroup{
ID: 2501,
ParentID: 2490,
Name: `Antipharmakon`,
Description: `Antipharmakon Boosters`,
},
2502: &MarketGroup{
ID: 2502,
ParentID: 977,
Name: `Booster Slot 11`,
Description: `Booster Slot 11`,
},
2503: &MarketGroup{
ID: 2503,
ParentID: 2502,
Name: `Hardshell`,
Description: `Hardshell Boosters`,
},
2504: &MarketGroup{
ID: 2504,
ParentID: 2502,
Name: `Overclocker`,
Description: `Overclocker Boosters`,
},
2505: &MarketGroup{
ID: 2505,
ParentID: 2502,
Name: `Pyrolancea`,
Description: `Pyrolancea Boosters`,
},
2506: &MarketGroup{
ID: 2506,
ParentID: 2488,
Name: `Other`,
Description: `Other Boosters`,
},
2508: &MarketGroup{
ID: 2508,
ParentID: 214,
Name: `Mass Entangler`,
Description: `Blueprints for Mass Entanglers.`,
},
2509: &MarketGroup{
ID: 2509,
ParentID: 14,
Name: `Mass Entanglers`,
Description: `Mass Entanglers lower ship mass at the cost of ship velocity.`,
},
2510: &MarketGroup{
ID: 2510,
ParentID: 1338,
Name: `Navigation Structures`,
Description: `Blueprints of navigation structures.`,
},
2511: &MarketGroup{
ID: 2511,
ParentID: 477,
Name: `Navigation Structures`,
Description: ``,
},
2512: &MarketGroup{
ID: 2512,
ParentID: 2436,
Name: `Weapon Upgrade Mutaplasmids`,
Description: ``,
},
2513: &MarketGroup{
ID: 2513,
ParentID: 2512,
Name: `Magnetic Field Stabilizer Mutaplasmids`,
Description: ``,
},
2514: &MarketGroup{
ID: 2514,
ParentID: 2512,
Name: `Heat Sink Mutaplasmids`,
Description: ``,
},
2515: &MarketGroup{
ID: 2515,
ParentID: 2512,
Name: `Gyrostabilizer Mutaplasmids`,
Description: ``,
},
2516: &MarketGroup{
ID: 2516,
ParentID: 2512,
Name: `Entropic Radiation Sink Mutaplasmids`,
Description: ``,
},
2517: &MarketGroup{
ID: 2517,
ParentID: 2512,
Name: `Ballistic Control System Mutaplasmids`,
Description: ``,
},
2518: &MarketGroup{
ID: 2518,
ParentID: 1955,
Name: `Precursor Battlecruisers`,
Description: `Precursor Battlecruisers`,
},
2519: &MarketGroup{
ID: 2519,
ParentID: 2518,
Name: `Triglavian`,
Description: `Triglavian Battlecruisers`,
},
2520: &MarketGroup{
ID: 2520,
ParentID: 1989,
Name: `Precursor Destroyers`,
Description: `Precursor Destroyers`,
},
2521: &MarketGroup{
ID: 2521,
ParentID: 2520,
Name: `Triglavian`,
Description: `Triglavian Destroyers`,
},
2522: &MarketGroup{
ID: 2522,
ParentID: 1372,
Name: `Precursor Destroyers`,
Description: `Precursor Destroyers`,
},
2523: &MarketGroup{
ID: 2523,
ParentID: 2522,
Name: `Triglavian`,
Description: `Triglavian Destroyers`,
},
2524: &MarketGroup{
ID: 2524,
ParentID: 1374,
Name: `Precursor Battlecruisers`,
Description: `Precursor Battlecruisers.`,
},
2525: &MarketGroup{
ID: 2525,
ParentID: 2524,
Name: `Triglavian`,
Description: `Triglavian Battlecruisers.`,
},
2526: &MarketGroup{
ID: 2526,
ParentID: 437,
Name: `Triglavian`,
Description: `Triglavian Logistics Cruisers`,
},
2527: &MarketGroup{
ID: 2527,
ParentID: 14,
Name: `Mutadaptive Remote Armor Repairers`,
Description: ``,
},
2529: &MarketGroup{
ID: 2529,
ParentID: 2527,
Name: `Medium`,
Description: `Medium Mutadaptive Remote Armor Repairers`,
},
2530: &MarketGroup{
ID: 2530,
ParentID: 977,
Name: `Booster Slot 12`,
Description: ``,
},
2531: &MarketGroup{
ID: 2531,
ParentID: 977,
Name: `Booster Slot 14`,
Description: ``,
},
2532: &MarketGroup{
ID: 2532,
ParentID: 2436,
Name: `Damage Control Mutaplasmids`,
Description: `Damage Control and Assault Damage Control Mutaplasmids`,
},
2533: &MarketGroup{
ID: 2533,
ParentID: 2532,
Name: `Damage Control Mutaplasmids`,
Description: `Damage Control Mutaplasmids`,
},
2534: &MarketGroup{
ID: 2534,
ParentID: 2532,
Name: `Assault Damage Control Mutaplasmids`,
Description: `Assault Damage Control Mutaplasmids`,
},
2535: &MarketGroup{
ID: 2535,
ParentID: 448,
Name: `Triglavian`,
Description: `Triglavian Heavy Assault Cruisers`,
},
2536: &MarketGroup{
ID: 2536,
ParentID: 432,
Name: `Triglavian`,
Description: `Triglavian Assault Frigates`,
},
2537: &MarketGroup{
ID: 2537,
ParentID: 2125,
Name: `Triglavian`,
Description: `Triglavian Command Destroyers`,
},
2538: &MarketGroup{
ID: 2538,
ParentID: 54,
Name: `Bezdnacine`,
Description: `Bezdnacine`,
},
2539: &MarketGroup{
ID: 2539,
ParentID: 54,
Name: `Rakovene`,
Description: `Rakovene`,
},
2540: &MarketGroup{
ID: 2540,
ParentID: 54,
Name: `Talassonite`,
Description: `Talassonite`,
},
2658: &MarketGroup{
ID: 2658,
ParentID: 65,
Name: `Triglavian`,
Description: `Advanced Components of Triglavian origin.`,
},
2690: &MarketGroup{
ID: 2690,
ParentID: 761,
Name: `<NAME>`,
Description: `Triglavian Dreadnoughts`,
},
2691: &MarketGroup{
ID: 2691,
ParentID: 2432,
Name: `Extra Large`,
Description: `Capital ship entropic disintegrators, for use on dreadnoughts and titans.`,
},
2692: &MarketGroup{
ID: 2692,
ParentID: 2463,
Name: `Extra Large`,
Description: `For use with dreadnought-sized entropic disintegrators.`,
},
2693: &MarketGroup{
ID: 2693,
ParentID: 1883,
Name: `Triglavian`,
Description: ``,
},
2701: &MarketGroup{
ID: 2701,
ParentID: 1922,
Name: `HyperNet Relay`,
Description: `Items used in the HyperNet Relay`,
},
2702: &MarketGroup{
ID: 2702,
ParentID: 1612,
Name: `Special Edition Corvettes`,
Description: `Corvettes that have been offered to capsuleers on occasion for limited periods.`,
},
2703: &MarketGroup{
ID: 2703,
ParentID: 1971,
Name: `Precursor Dreadnoughts`,
Description: `Precursor Dreadnoughts`,
},
2704: &MarketGroup{
ID: 2704,
ParentID: 2703,
Name: `Triglavian`,
Description: `Triglavian Dreadnoughts`,
},
2706: &MarketGroup{
ID: 2706,
ParentID: 2456,
Name: `Jump Filaments`,
Description: `Filaments that jump fleets unpredictably to other locations in known space.`,
},
2728: &MarketGroup{
ID: 2728,
ParentID: 11,
Name: `Condenser Packs`,
Description: `Condenser Pack charges used in Vorton Projectors`,
},
2729: &MarketGroup{
ID: 2729,
ParentID: 2728,
Name: `Standard Condenser Packs`,
Description: `Standard Condenser Packs`,
},
2730: &MarketGroup{
ID: 2730,
ParentID: 2728,
Name: `Advanced Condenser Packs`,
Description: `Advanced Condenser Packs`,
},
2734: &MarketGroup{
ID: 2734,
ParentID: 2729,
Name: `Small`,
Description: ``,
},
2735: &MarketGroup{
ID: 2735,
ParentID: 2729,
Name: `Medium`,
Description: ``,
},
2736: &MarketGroup{
ID: 2736,
ParentID: 2729,
Name: `Large`,
Description: ``,
},
2737: &MarketGroup{
ID: 2737,
ParentID: 2730,
Name: `Small`,
Description: ``,
},
2738: &MarketGroup{
ID: 2738,
ParentID: 2730,
Name: `Medium`,
Description: ``,
},
2739: &MarketGroup{
ID: 2739,
ParentID: 2730,
Name: `Large`,
Description: ``,
},
2740: &MarketGroup{
ID: 2740,
ParentID: 143,
Name: `Vorton Tuning Systems`,
Description: ``,
},
2741: &MarketGroup{
ID: 2741,
ParentID: 10,
Name: `<NAME>`,
Description: `Vorton Projectors made by Upwell`,
},
2742: &MarketGroup{
ID: 2742,
ParentID: 2741,
Name: `Small`,
Description: ``,
},
2743: &MarketGroup{
ID: 2743,
ParentID: 2741,
Name: `Medium`,
Description: ``,
},
2744: &MarketGroup{
ID: 2744,
ParentID: 2741,
Name: `Large`,
Description: ``,
},
2747: &MarketGroup{
ID: 2747,
ParentID: 2456,
Name: `Proving Ground Filaments`,
Description: `Filaments that connect to the Abyssal Proving Grounds for limited time PVP events.`,
},
2749: &MarketGroup{
ID: 2749,
ParentID: 2202,
Name: `Quantum Cores`,
Description: `Quantum Cores are FTL communications facillities vital to the efficient operation of Upwell structures.`,
},
2750: &MarketGroup{
ID: 2750,
ParentID: 2439,
Name: `Capital Astronautic Mutaplasmids`,
Description: `Capital Astronautic Mutaplasmids`,
},
2751: &MarketGroup{
ID: 2751,
ParentID: 2437,
Name: `Capital Armor Mutaplasmids`,
Description: `Capital Armor Mutaplasmids`,
},
2752: &MarketGroup{
ID: 2752,
ParentID: 2438,
Name: `Capital Shield Mutaplasmids`,
Description: `Capital Shield Mutaplasmids`,
},
2753: &MarketGroup{
ID: 2753,
ParentID: 2440,
Name: `Capital Engineering Mutaplasmids`,
Description: `Capital Engineering Mutaplasmids`,
},
2754: &MarketGroup{
ID: 2754,
ParentID: 2512,
Name: `Siege Module Mutaplasmids`,
Description: `Siege Module Mutaplasmids`,
},
2756: &MarketGroup{
ID: 2756,
ParentID: 2456,
Name: `Triglavian Space Outbound`,
Description: `Filaments that jump users out of the Triglavian Region Pochven`,
},
2757: &MarketGroup{
ID: 2757,
ParentID: 2456,
Name: `Triglavian Space Inbound`,
Description: `Filaments that jump users into the Triglavian Region Pochven`,
},
2760: &MarketGroup{
ID: 2760,
ParentID: 1659,
Name: `Special Edition Deployable Structures`,
Description: `Special Edition Deployable Structures`,
},
} | sde/marketGroups.go | 0.568176 | 0.567817 | marketGroups.go | starcoder |
package sort
/*
InsertionSort performs insertion sort on a given array and given range.
Performs an in place sort with no return
Parameters:
===========
start: Start of sorting, this position is also included in sorted array
end: End of sorting, this position is not included in sorted array
compare: Function to compare between different elements, can be used to
sort in ascending or descending order
swap: Function to swap elements
It is recommended that you use InsertionSortInts, InsertionSortFloat64s
and InsertionSortString, as they have inbuilt func and sanity check.
*/
func InsertionSort(start, end int, compare func(int, int) bool, swap func(int, int)) {
for i := start + 1; i < end; i++ {
for j := i; j > start && compare(j, j-1); j-- {
swap(j, j-1)
}
}
}
/*
InsertionSortInts performs sort on integer array with given range.
Performs an in place sort with no return
Parameters:
===========
start: Start of sorting, this position is also included in sorted array
end: End of sorting, this position is not included in sorted array
*/
func InsertionSortInts(start, end int, arr []int) {
InsertionSort(start, end,
func(i int, j int) bool { return arr[i] < arr[j] }, func(i int, j int) { arr[i], arr[j] = arr[j], arr[i] })
}
/*
InsertionSortFloat64s performs sort on float64 array with given range.
Performs an in place sort with no return
Parameters:
===========
start: Start of sorting, this position is also included in sorted array
end: End of sorting, this position is not included in sorted array
*/
func InsertionSortFloat64s(start, end int, arr []float64) {
InsertionSort(start, end,
func(i int, j int) bool { return arr[i] < arr[j] }, func(i int, j int) { arr[i], arr[j] = arr[j], arr[i] })
}
/*
InsertionSortString performs sort on string array with given range.
Performs an in place sort with no return
Parameters:
===========
start: Start of sorting, this position is also included in sorted array
end: End of sorting, this position is not included in sorted array
*/
func InsertionSortString(start, end int, arr []string) {
InsertionSort(start, end,
func(i int, j int) bool { return arr[i] < arr[j] }, func(i int, j int) { arr[i], arr[j] = arr[j], arr[i] })
} | algorithms/sort/insertionSort.go | 0.741206 | 0.63409 | insertionSort.go | starcoder |
package interpreter
import (
"github.com/google/cel-go/common/overloads"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
)
// InterpretableDecorator is a functional interface for decorating or replacing
// Interpretable expression nodes at construction time.
type InterpretableDecorator func(Interpretable) (Interpretable, error)
// decObserveEval records evaluation state into an EvalState object.
func decObserveEval(observer EvalObserver) InterpretableDecorator {
return func(i Interpretable) (Interpretable, error) {
switch inst := i.(type) {
case *evalWatch, *evalWatchAttr, *evalWatchConst:
// these instruction are already watching, return straight-away.
return i, nil
case InterpretableAttribute:
return &evalWatchAttr{
InterpretableAttribute: inst,
observer: observer,
}, nil
case InterpretableConst:
return &evalWatchConst{
InterpretableConst: inst,
observer: observer,
}, nil
default:
return &evalWatch{
Interpretable: i,
observer: observer,
}, nil
}
}
}
// decInterruptFolds creates an intepretable decorator which marks comprehensions as interruptable
// where the interrupt state is communicated via a hidden variable on the Activation.
func decInterruptFolds() InterpretableDecorator {
return func(i Interpretable) (Interpretable, error) {
fold, ok := i.(*evalFold)
if !ok {
return i, nil
}
fold.interruptable = true
return fold, nil
}
}
// decDisableShortcircuits ensures that all branches of an expression will be evaluated, no short-circuiting.
func decDisableShortcircuits() InterpretableDecorator {
return func(i Interpretable) (Interpretable, error) {
switch expr := i.(type) {
case *evalOr:
return &evalExhaustiveOr{
id: expr.id,
lhs: expr.lhs,
rhs: expr.rhs,
}, nil
case *evalAnd:
return &evalExhaustiveAnd{
id: expr.id,
lhs: expr.lhs,
rhs: expr.rhs,
}, nil
case *evalFold:
expr.exhaustive = true
return expr, nil
case InterpretableAttribute:
cond, isCond := expr.Attr().(*conditionalAttribute)
if isCond {
return &evalExhaustiveConditional{
id: cond.id,
attr: cond,
adapter: expr.Adapter(),
}, nil
}
}
return i, nil
}
}
// decOptimize optimizes the program plan by looking for common evaluation patterns and
// conditionally precomputating the result.
// - build list and map values with constant elements.
// - convert 'in' operations to set membership tests if possible.
func decOptimize() InterpretableDecorator {
return func(i Interpretable) (Interpretable, error) {
switch inst := i.(type) {
case *evalList:
return maybeBuildListLiteral(i, inst)
case *evalMap:
return maybeBuildMapLiteral(i, inst)
case InterpretableCall:
if inst.OverloadID() == overloads.InList {
return maybeOptimizeSetMembership(i, inst)
}
if overloads.IsTypeConversionFunction(inst.Function()) {
return maybeOptimizeConstUnary(i, inst)
}
}
return i, nil
}
}
// decRegexOptimizer compiles regex pattern string constants.
func decRegexOptimizer(regexOptimizations ...*RegexOptimization) InterpretableDecorator {
functionMatchMap := make(map[string]*RegexOptimization)
overloadMatchMap := make(map[string]*RegexOptimization)
for _, m := range regexOptimizations {
functionMatchMap[m.Function] = m
if m.OverloadID != "" {
overloadMatchMap[m.OverloadID] = m
}
}
return func(i Interpretable) (Interpretable, error) {
call, ok := i.(InterpretableCall)
if !ok {
return i, nil
}
var matcher *RegexOptimization
var found bool
if call.OverloadID() != "" {
matcher, found = overloadMatchMap[call.OverloadID()]
}
if !found {
matcher, found = functionMatchMap[call.Function()]
}
if !found || matcher.RegexIndex >= len(call.Args()) {
return i, nil
}
args := call.Args()
regexArg := args[matcher.RegexIndex]
regexStr, isConst := regexArg.(InterpretableConst)
if !isConst {
return i, nil
}
pattern, ok := regexStr.Value().(types.String)
if !ok {
return i, nil
}
return matcher.Factory(call, string(pattern))
}
}
func maybeOptimizeConstUnary(i Interpretable, call InterpretableCall) (Interpretable, error) {
args := call.Args()
if len(args) != 1 {
return i, nil
}
_, isConst := args[0].(InterpretableConst)
if !isConst {
return i, nil
}
val := call.Eval(EmptyActivation())
if types.IsError(val) {
return nil, val.(*types.Err)
}
return NewConstValue(call.ID(), val), nil
}
func maybeBuildListLiteral(i Interpretable, l *evalList) (Interpretable, error) {
for _, elem := range l.elems {
_, isConst := elem.(InterpretableConst)
if !isConst {
return i, nil
}
}
return NewConstValue(l.ID(), l.Eval(EmptyActivation())), nil
}
func maybeBuildMapLiteral(i Interpretable, mp *evalMap) (Interpretable, error) {
for idx, key := range mp.keys {
_, isConst := key.(InterpretableConst)
if !isConst {
return i, nil
}
_, isConst = mp.vals[idx].(InterpretableConst)
if !isConst {
return i, nil
}
}
return NewConstValue(mp.ID(), mp.Eval(EmptyActivation())), nil
}
// maybeOptimizeSetMembership may convert an 'in' operation against a list to map key membership
// test if the following conditions are true:
// - the list is a constant with homogeneous element types.
// - the elements are all of primitive type.
func maybeOptimizeSetMembership(i Interpretable, inlist InterpretableCall) (Interpretable, error) {
args := inlist.Args()
lhs := args[0]
rhs := args[1]
l, isConst := rhs.(InterpretableConst)
if !isConst {
return i, nil
}
// When the incoming binary call is flagged with as the InList overload, the value will
// always be convertible to a `traits.Lister` type.
list := l.Value().(traits.Lister)
if list.Size() == types.IntZero {
return NewConstValue(inlist.ID(), types.False), nil
}
it := list.Iterator()
valueSet := make(map[ref.Val]ref.Val)
for it.HasNext() == types.True {
elem := it.Next()
if !types.IsPrimitiveType(elem) {
// Note, non-primitive type are not yet supported.
return i, nil
}
valueSet[elem] = types.True
switch ev := elem.(type) {
case types.Double:
iv := ev.ConvertToType(types.IntType)
// Ensure that only lossless conversions are added to the set
if !types.IsError(iv) && iv.Equal(ev) == types.True {
valueSet[iv] = types.True
}
// Ensure that only lossless conversions are added to the set
uv := ev.ConvertToType(types.UintType)
if !types.IsError(uv) && uv.Equal(ev) == types.True {
valueSet[uv] = types.True
}
case types.Int:
dv := ev.ConvertToType(types.DoubleType)
if !types.IsError(dv) {
valueSet[dv] = types.True
}
uv := ev.ConvertToType(types.UintType)
if !types.IsError(uv) {
valueSet[uv] = types.True
}
case types.Uint:
dv := ev.ConvertToType(types.DoubleType)
if !types.IsError(dv) {
valueSet[dv] = types.True
}
iv := ev.ConvertToType(types.IntType)
if !types.IsError(iv) {
valueSet[iv] = types.True
}
}
}
return &evalSetMembership{
inst: inlist,
arg: lhs,
valueSet: valueSet,
}, nil
} | vendor/github.com/google/cel-go/interpreter/decorators.go | 0.644113 | 0.457016 | decorators.go | starcoder |
package runeset
//RuneSet is a map[rune]bool with the methods you would expect from a set type.
//Eg, Contains, Union, Intersection, and Difference.
//I will make code generation for further set types in the future.
type Signal struct{}
var yes Signal
type RuneSet map[rune]Signal
//Contains shows whether r is in the RuneSet.
func (rs RuneSet) Contains(r rune) bool {
_, ok := rs[r]
return ok
}
//Intersection returns the intersection of the sets;
func (rs RuneSet) Intersection(sets ...RuneSet) (intersection RuneSet) {
intersection = rs.Copy()
for _, set := range sets {
for key := range intersection {
if _, ok := set[key]; !ok {
delete(intersection, key)
}
}
}
return intersection
}
func Intersection(set RuneSet, sets ...RuneSet) RuneSet {
return set.Intersection(sets...)
}
//Equal shows whether two RuneSets are equal; i.e, they contain the same items.
func (rs RuneSet) Equal(other RuneSet) bool {
if len(rs) != len(other) {
return false
}
for r := range rs {
if !other.Contains(r) {
return false
}
}
return true
}
//Union returns the union of the sets.
func (rs RuneSet) Union(sets ...RuneSet) (union RuneSet) {
sets = append(sets, rs)
return Union(sets...)
}
func Union(sets ...RuneSet) RuneSet {
union := make(RuneSet)
for _, set := range sets {
for r := range set {
union[r] = yes
}
}
return union
}
//Difference returns the items in the reciever but not any other arguments
//i.e, if set = {'a', b' 'c'}; set.Difference({'b', 'c'}) = {'a'}
func (rs RuneSet) Difference(sets ...RuneSet) (difference RuneSet) {
difference = rs.Copy()
for _, set := range sets {
for key := range difference {
if _, ok := set[key]; ok {
delete(difference, key)
}
}
}
return difference
}
//FromRunes creates a set from runes
func FromRunes(runes ...rune) RuneSet {
set := make(RuneSet)
for _, r := range runes {
set[r] = yes
}
return set
}
//FromString converts a string to a RuneSet of the runes inside.
func FromString(s string) (set RuneSet) {
set = make(RuneSet)
for _, r := range s {
set[r] = yes
}
return set
}
//Copy returns a copy of the RuneSet.
func (rs RuneSet) Copy() RuneSet {
copy := make(RuneSet)
for k, v := range rs {
copy[k] = v
}
return copy
} | runeset/runeset.go | 0.775307 | 0.418519 | runeset.go | starcoder |
package gofilter
import (
"bytes"
"net"
"regexp"
"strings"
)
type node interface {
Apply(map[string]interface{}) bool
}
type node2 interface {
Apply(map[string]interface{}) bool
FieldName() string
applyOne(v interface{}) bool
}
// AND
type nodeAnd struct {
left node
right node
}
func (n *nodeAnd) Apply(p map[string]interface{}) bool {
return (n.left.Apply(p) && n.right.Apply(p))
}
// OR
type nodeOr struct {
left node
right node
}
func (n *nodeOr) Apply(p map[string]interface{}) bool {
return n.left.Apply(p) || n.right.Apply(p)
}
// NOT
type nodeNot struct {
right node
}
func (n *nodeNot) Apply(p map[string]interface{}) bool {
return !n.right.Apply(p)
}
// EXIST
type nodeExist struct {
fieldName string
}
func (n *nodeExist) Apply(p map[string]interface{}) bool {
_, ok := p[n.fieldName]
return ok
}
// EQUAL
type nodeEq struct {
fieldName string
value interface{}
}
func (n *nodeEq) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeEq) applyOne(v interface{}) bool {
switch n.value.(type) {
case []byte:
if x, ok := v.([]byte); ok {
return bytes.Equal(x, n.value.([]byte))
} else if x, ok := v.(string); ok {
return bytes.Equal([]byte(x), n.value.([]byte))
}
case net.IP:
if x, ok := v.(net.IP); ok {
return x.Equal(n.value.(net.IP))
}
case *net.IPNet:
if x, ok := v.(net.IP); ok {
return n.value.(*net.IPNet).Contains(x)
}
case net.HardwareAddr:
if x, ok := v.(net.HardwareAddr); ok {
return bytes.Equal(n.value.([]byte), x)
}
case uint:
if x, ok := v.(uint); ok {
return x == n.value.(uint)
}
case uint8:
if x, ok := v.(uint8); ok {
return x == n.value.(uint8)
}
case uint16:
if x, ok := v.(uint16); ok {
return x == n.value.(uint16)
}
case uint32:
if x, ok := v.(uint32); ok {
return x == n.value.(uint32)
}
case uint64:
if x, ok := v.(uint64); ok {
return x == n.value.(uint64)
}
case int:
if x, ok := v.(int); ok {
return x == n.value.(int)
}
case int8:
if x, ok := v.(int8); ok {
return x == n.value.(int8)
}
case int16:
if x, ok := v.(int16); ok {
return x == n.value.(int16)
}
case int32:
if x, ok := v.(int32); ok {
return x == n.value.(int32)
}
case int64:
if x, ok := v.(int64); ok {
return x == n.value.(int64)
}
case float32:
if x, ok := v.(float32); ok {
return x == n.value.(float32)
}
case float64:
if x, ok := v.(float64); ok {
return x == n.value.(float64)
}
case string:
if x, ok := v.(string); ok {
return x == n.value.(string)
}
}
return false // было v == n.value
}
func (n *nodeEq) FieldName() string {
return n.fieldName
}
// TEST_GT
type nodeGt struct {
fieldName string
value interface{}
}
func (n *nodeGt) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeGt) applyOne(v interface{}) bool {
switch n.value.(type) {
case []byte:
if x, ok := v.([]byte); ok {
if bytes.Compare(x, n.value.([]byte)) == 1 {
return true
} else {
return false
}
} else if x, ok := v.(string); ok {
if bytes.Compare([]byte(x), n.value.([]byte)) == 1 {
return true
} else {
return false
}
}
case net.IP:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(net.IP))) == 1 {
return true
} else {
return false
}
}
case *net.IPNet:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(*net.IPNet).IP)) == 1 {
return true
} else {
return false
}
}
case net.HardwareAddr:
if x, ok := v.(net.HardwareAddr); ok {
if bytes.Compare([]byte(x), []byte(n.value.(net.HardwareAddr))) == 1 {
return true
} else {
return false
}
}
case uint:
if x, ok := v.(uint); ok {
return x > n.value.(uint)
}
case uint8:
if x, ok := v.(uint8); ok {
return x > n.value.(uint8)
}
case uint16:
if x, ok := v.(uint16); ok {
return x > n.value.(uint16)
}
case uint32:
if x, ok := v.(uint32); ok {
return x > n.value.(uint32)
}
case uint64:
if x, ok := v.(uint64); ok {
return x > n.value.(uint64)
}
case int:
if x, ok := v.(int); ok {
return x > n.value.(int)
}
case int8:
if x, ok := v.(int8); ok {
return x > n.value.(int8)
}
case int16:
if x, ok := v.(int16); ok {
return x > n.value.(int16)
}
case int32:
if x, ok := v.(int32); ok {
return x > n.value.(int32)
}
case int64:
if x, ok := v.(int64); ok {
return x > n.value.(int64)
}
case float32:
if x, ok := v.(float32); ok {
return x > n.value.(float32)
}
case float64:
if x, ok := v.(float64); ok {
return x > n.value.(float64)
}
case string:
if x, ok := v.(string); ok {
return x > n.value.(string)
}
}
return false
}
func (n *nodeGt) FieldName() string {
return n.fieldName
}
// TEST_GE
type nodeGe struct {
fieldName string
value interface{}
}
func (n *nodeGe) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeGe) applyOne(v interface{}) bool {
switch n.value.(type) {
case []byte:
if x, ok := v.([]byte); ok {
if bytes.Compare(x, n.value.([]byte)) >= 0 {
return true
} else {
return false
}
} else if x, ok := v.(string); ok {
if bytes.Compare([]byte(x), n.value.([]byte)) >= 0 {
return true
} else {
return false
}
}
case net.IP:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(net.IP))) >= 0 {
return true
} else {
return false
}
}
case *net.IPNet:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(*net.IPNet).IP)) >= 0 {
return true
} else {
return false
}
}
case net.HardwareAddr:
if x, ok := v.(net.HardwareAddr); ok {
if bytes.Compare([]byte(x), []byte(n.value.(net.HardwareAddr))) >= 0 {
return true
} else {
return false
}
}
case int:
if x, ok := v.(int); ok {
return x >= n.value.(int)
}
case int8:
if x, ok := v.(int8); ok {
return x >= n.value.(int8)
}
case int16:
if x, ok := v.(int16); ok {
return x >= n.value.(int16)
}
case int32:
if x, ok := v.(int32); ok {
return x >= n.value.(int32)
}
case int64:
if x, ok := v.(int64); ok {
return x >= n.value.(int64)
}
case uint:
if x, ok := v.(uint); ok {
return x >= n.value.(uint)
}
case uint8:
if x, ok := v.(uint8); ok {
return x >= n.value.(uint8)
}
case uint16:
if x, ok := v.(uint16); ok {
return x >= n.value.(uint16)
}
case uint32:
if x, ok := v.(uint32); ok {
return x >= n.value.(uint32)
}
case uint64:
if x, ok := v.(uint64); ok {
return x >= n.value.(uint64)
}
case float32:
if x, ok := v.(float32); ok {
return x >= n.value.(float32)
}
case float64:
if x, ok := v.(float64); ok {
return x >= n.value.(float64)
}
case string:
if x, ok := v.(string); ok {
return x >= n.value.(string)
}
}
return false
}
func (n *nodeGe) FieldName() string {
return n.fieldName
}
// TEST_LT
type nodeLt struct {
fieldName string
value interface{}
}
func (n *nodeLt) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeLt) applyOne(v interface{}) bool {
switch n.value.(type) {
case []byte:
if x, ok := v.([]byte); ok {
if bytes.Compare(x, n.value.([]byte)) == -1 {
return true
} else {
return false
}
} else if x, ok := v.(string); ok {
if bytes.Compare([]byte(x), n.value.([]byte)) == -1 {
return true
} else {
return false
}
}
case net.IP:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(net.IP))) == -1 {
return true
} else {
return false
}
}
case *net.IPNet:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x.To16()), []byte(n.value.(*net.IPNet).IP)) == -1 {
return true
} else {
return false
}
}
case net.HardwareAddr:
if x, ok := v.(net.HardwareAddr); ok {
if bytes.Compare([]byte(x), []byte(n.value.(net.HardwareAddr))) == -1 {
return true
} else {
return false
}
}
case uint:
if x, ok := v.(uint); ok {
return x < n.value.(uint)
}
case uint8:
if x, ok := v.(uint8); ok {
return x < n.value.(uint8)
}
case uint16:
if x, ok := v.(uint16); ok {
return x < n.value.(uint16)
}
case uint32:
if x, ok := v.(uint32); ok {
return x < n.value.(uint32)
}
case uint64:
if x, ok := v.(uint64); ok {
return x < n.value.(uint64)
}
case int:
if x, ok := v.(int); ok {
return x < n.value.(int)
}
case int8:
if x, ok := v.(int8); ok {
return x < n.value.(int8)
}
case int16:
if x, ok := v.(int16); ok {
return x < n.value.(int16)
}
case int32:
if x, ok := v.(int32); ok {
return x < n.value.(int32)
}
case int64:
if x, ok := v.(int64); ok {
return x < n.value.(int64)
}
case float32:
if x, ok := v.(float32); ok {
return x < n.value.(float32)
}
case float64:
if x, ok := v.(float64); ok {
return x < n.value.(float64)
}
case string:
if x, ok := v.(string); ok {
return x < n.value.(string)
}
}
return false
}
func (n *nodeLt) FieldName() string {
return n.fieldName
}
// TEST_LE
type nodeLe struct {
fieldName string
value interface{}
}
func (n *nodeLe) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeLe) applyOne(v interface{}) bool {
switch n.value.(type) {
case []byte:
if x, ok := v.([]byte); ok {
if bytes.Compare(x, n.value.([]byte)) <= 0 {
return true
} else {
return false
}
} else if x, ok := v.(string); ok {
if bytes.Compare([]byte(x), n.value.([]byte)) <= 0 {
return true
} else {
return false
}
}
case net.IP:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x), []byte(n.value.(net.IP))) <= 0 {
return true
} else {
return false
}
}
case *net.IPNet:
if x, ok := v.(net.IP); ok {
if bytes.Compare([]byte(x), []byte(n.value.(*net.IPNet).IP)) <= 0 {
return true
} else {
return false
}
}
case net.HardwareAddr:
if x, ok := v.(net.HardwareAddr); ok {
if bytes.Compare([]byte(x), []byte(n.value.(net.HardwareAddr))) <= 0 {
return true
} else {
return false
}
}
case uint:
if x, ok := v.(uint); ok {
return x <= n.value.(uint)
}
case uint8:
if x, ok := v.(uint8); ok {
return x <= n.value.(uint8)
}
case uint16:
if x, ok := v.(uint16); ok {
return x <= n.value.(uint16)
}
case uint32:
if x, ok := v.(uint32); ok {
return x <= n.value.(uint32)
}
case uint64:
if x, ok := v.(uint64); ok {
return x <= n.value.(uint64)
}
case int:
if x, ok := v.(int); ok {
return x <= n.value.(int)
}
case int8:
if x, ok := v.(int8); ok {
return x <= n.value.(int8)
}
case int16:
if x, ok := v.(int16); ok {
return x <= n.value.(int16)
}
case int32:
if x, ok := v.(int32); ok {
return x <= n.value.(int32)
}
case int64:
if x, ok := v.(int64); ok {
return x <= n.value.(int64)
}
case float32:
if x, ok := v.(float32); ok {
return x <= n.value.(float32)
}
case float64:
if x, ok := v.(float64); ok {
return x <= n.value.(float64)
}
case string:
if x, ok := v.(string); ok {
return x <= n.value.(string)
}
}
return false
}
func (n *nodeLe) FieldName() string {
return n.fieldName
}
// TEST_CONTAINS
type nodeContains struct {
fieldName string
value interface{}
}
func (n *nodeContains) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeContains) applyOne(v interface{}) bool {
switch n.value.(type) {
case string:
if x, ok := v.(string); ok {
return strings.Contains(x, n.value.(string))
}
case []byte:
node_value := n.value.([]byte)
switch v.(type) {
case []byte:
return bytes.Contains(v.([]byte), node_value)
case string:
return bytes.Contains([]byte(v.(string)), node_value)
/*case net.IP:
return bytes.Contains([]byte(v.(net.IP)), node_value)
case net.IPNet:
return bytes.Contains([]byte(v.(net.IPNet).IP), node_value)*/
case net.HardwareAddr:
return bytes.Contains([]byte(v.(net.HardwareAddr)), node_value)
}
}
return false
}
func (n *nodeContains) FieldName() string {
return n.fieldName
}
// TEST_MATCHES
type nodeMatch struct {
fieldName string
reg_expr *regexp.Regexp
}
func (n *nodeMatch) Apply(p map[string]interface{}) bool {
return applyRange(p, n)
}
func (n *nodeMatch) applyOne(v interface{}) bool {
switch v.(type) {
case string:
return n.reg_expr.MatchString(v.(string))
}
return false
}
func (n *nodeMatch) FieldName() string {
return n.fieldName
}
// invoke apply for slice of values
func applyRange(p map[string]interface{}, n node2) bool {
v, ok := p[n.FieldName()]
if !ok {
return false
}
switch v.(type) {
case []bool:
for _, x := range v.([]bool) {
if n.applyOne(x) {
return true
}
}
return false
case []uint:
for _, x := range v.([]uint) {
if n.applyOne(x) {
return true
}
}
return false
case []uint8:
for _, x := range v.([]uint8) {
if n.applyOne(x) {
return true
}
}
return false
case []uint16:
for _, x := range v.([]uint16) {
if n.applyOne(x) {
return true
}
}
return false
case []uint32:
for _, x := range v.([]uint32) {
if n.applyOne(x) {
return true
}
}
return false
case []uint64:
for _, x := range v.([]uint64) {
if n.applyOne(x) {
return true
}
}
return false
case []int:
for _, x := range v.([]int) {
if n.applyOne(x) {
return true
}
}
return false
case []int8:
for _, x := range v.([]int8) {
if n.applyOne(x) {
return true
}
}
return false
case []int16:
for _, x := range v.([]int16) {
if n.applyOne(x) {
return true
}
}
return false
case []int32:
for _, x := range v.([]int32) {
if n.applyOne(x) {
return true
}
}
return false
case []int64:
for _, x := range v.([]int64) {
if n.applyOne(x) {
return true
}
}
return false
case []float32:
for _, x := range v.([]float32) {
if n.applyOne(x) {
return true
}
}
return false
case []float64:
for _, x := range v.([]float64) {
if n.applyOne(x) {
return true
}
}
return false
case []string:
for _, x := range v.([]string) {
if n.applyOne(x) {
return true
}
}
return false
case []net.IP:
for _, x := range v.([]net.IP) {
if n.applyOne(x) {
return true
}
}
return false
case []net.HardwareAddr:
for _, x := range v.([]net.HardwareAddr) {
if n.applyOne(x) {
return true
}
}
return false
default:
return n.applyOne(v)
}
return false
} | nodes.go | 0.557123 | 0.4165 | nodes.go | starcoder |
package evaluator
import (
"fmt"
"strings"
"github.com/vita-dounai/Firework/ast"
"github.com/vita-dounai/Firework/object"
)
var (
NULL = &object.Null{}
TRUE = &object.Boolean{Value: true}
FALSE = &object.Boolean{Value: false}
BREAK = &object.LoopControl{ControlType: object.BREAK}
CONTINUE = &object.LoopControl{ControlType: object.CONTINUE}
)
func newError(format string, a ...interface{}) *object.Error {
return &object.Error{Message: fmt.Sprintf(format, a...)}
}
func evalProgram(program *ast.Program, env *object.Environment) object.Object {
var result object.Object
for _, statement := range program.Statements {
result = Eval(statement, env)
switch result := result.(type) {
case *object.ReturnValue:
return result.Value
case *object.Error:
return result
}
}
return result
}
func nativeBoolToBooleanObject(value bool) object.Object {
if value {
return TRUE
} else {
return FALSE
}
}
func evalExclamationOperatorExpression(right object.Object) object.Object {
switch right := right.(type) {
case *object.Integer:
if right.Value != 0 {
return FALSE
} else {
return TRUE
}
case *object.Boolean:
return nativeBoolToBooleanObject(!right.Value)
default:
return FALSE
}
}
func evalMinusPrefixOperatorExpression(right object.Object) object.Object {
if right.Type() != object.INTEGER_OBJ {
return newError("Unknown operator: -%s", right.Type())
}
value := right.(*object.Integer).Value
return &object.Integer{Value: -value}
}
func evalPrefixExpression(operator string, right object.Object) object.Object {
switch operator {
case "!":
return evalExclamationOperatorExpression(right)
case "-":
return evalMinusPrefixOperatorExpression(right)
default:
return newError("Unknown operator: %s%s", operator, right.Type())
}
}
func evalIntegerInfixExpression(operator string, left, right object.Object) object.Object {
leftValue := left.(*object.Integer).Value
rightValue := right.(*object.Integer).Value
switch operator {
case "+":
return &object.Integer{Value: leftValue + rightValue}
case "-":
return &object.Integer{Value: leftValue - rightValue}
case "*":
return &object.Integer{Value: leftValue * rightValue}
case "/":
return &object.Integer{Value: leftValue / rightValue}
case "**":
result := int64(1)
for i := rightValue; i > 0; i >>= 1 {
if i&1 != 0 {
result *= leftValue
}
leftValue *= leftValue
}
return &object.Integer{Value: result}
case "%":
return &object.Integer{Value: leftValue % rightValue}
case ">":
return nativeBoolToBooleanObject(leftValue > rightValue)
case ">=":
return nativeBoolToBooleanObject(leftValue >= rightValue)
case "<":
return nativeBoolToBooleanObject(leftValue < rightValue)
case "<=":
return nativeBoolToBooleanObject(leftValue <= rightValue)
case "==":
return nativeBoolToBooleanObject(leftValue == rightValue)
case "!=":
return nativeBoolToBooleanObject(leftValue != rightValue)
default:
return newError("Unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
}
func evalStringInfixExpression(operator string, left, right object.Object) object.Object {
leftValue := left.(*object.String).Value
rightValue := right.(*object.String).Value
switch operator {
case "+":
return &object.String{Value: leftValue + rightValue}
case "<":
return nativeBoolToBooleanObject(strings.Compare(leftValue, rightValue) < 0)
case ">":
return nativeBoolToBooleanObject(strings.Compare(leftValue, rightValue) > 0)
case "==":
return nativeBoolToBooleanObject(strings.Compare(leftValue, rightValue) == 0)
case "!=":
return nativeBoolToBooleanObject(strings.Compare(leftValue, rightValue) != 0)
default:
return newError("Unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
}
func evalInfixExpression(operator string, left, right object.Object) object.Object {
switch {
case left.Type() == object.INTEGER_OBJ && right.Type() == object.INTEGER_OBJ:
return evalIntegerInfixExpression(operator, left, right)
case left.Type() == object.STRING_OBJ && right.Type() == object.STRING_OBJ:
return evalStringInfixExpression(operator, left, right)
case operator == "==":
return nativeBoolToBooleanObject(left == right)
case operator == "!=":
return nativeBoolToBooleanObject(left != right)
default:
switch {
case left.Type() != right.Type():
return newError("Type mismatch: %s %s %s", left.Type(), operator, right.Type())
default:
return newError("Unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
}
}
func isTruthy(obj object.Object) bool {
switch obj {
case FALSE:
return false
case NULL:
return false
default:
return true
}
}
func evalIfExpression(ie *ast.IfExpression, env *object.Environment) object.Object {
condition := Eval(ie.Condition, env)
if isError(condition) {
return condition
}
if isTruthy(condition) {
return Eval(ie.Consequence, env)
}
if ie.Alternative != nil {
return Eval(ie.Alternative, env)
}
return NULL
}
func evalBlockStatement(block *ast.BlockStatement, env *object.Environment) object.Object {
extendedEnv := object.ExtendEnvironment(env)
var result object.Object
for _, statement := range block.Statements {
result = Eval(statement, extendedEnv)
if result != nil {
switch result.Type() {
case object.RETURN_VALUE_OBJ:
fallthrough
case object.ERROR_OBJ:
fallthrough
case object.BREAK:
fallthrough
case object.CONTINUE:
return result
}
}
}
return result
}
func isError(obj object.Object) bool {
if obj != nil {
return obj.Type() == object.ERROR_OBJ
}
return false
}
func isReturn(obj object.Object) bool {
if obj != nil {
return obj.Type() == object.RETURN_VALUE_OBJ
}
return false
}
func isBreak(obj object.Object) bool {
if obj != nil {
return obj.Type() == object.BREAK
}
return false
}
func evalIdentifier(node *ast.Identifier, env *object.Environment) object.Object {
if value, ok := env.Get(node.Value); ok {
return value
}
if builtin, ok := builtins[node.Value]; ok {
return builtin
}
return newError("Identifier not found: " + node.Value)
}
func evalExpressions(exps []ast.Expression, env *object.Environment) []object.Object {
var result []object.Object
for _, e := range exps {
evaluated := Eval(e, env)
if isError(evaluated) {
return []object.Object{evaluated}
}
result = append(result, evaluated)
}
return result
}
func extendFunctionEnv(fn *object.Function, args []object.Object) *object.Environment {
extendedEnv := object.ExtendEnvironment(fn.Env)
for idx, param := range fn.Parameters {
extendedEnv.Set(param.Value, args[idx])
}
return extendedEnv
}
func unwrapReturnValue(obj object.Object) object.Object {
if returnValue, ok := obj.(*object.ReturnValue); ok {
return returnValue.Value
}
return obj
}
func applyFunction(fn object.Object, args []object.Object) object.Object {
switch function := fn.(type) {
case *object.Function:
extendedEnv := extendFunctionEnv(function, args)
evaluated := Eval(function.Body, extendedEnv)
return unwrapReturnValue(evaluated)
case *object.Builtin:
return function.Fn(args...)
default:
return newError("Not a function: %s", fn.Type())
}
}
func evalIndexExpression(leftObject, indexObject object.Object) object.Object {
switch left := leftObject.(type) {
case *object.Array:
index, ok := indexObject.(*object.Integer)
if !ok {
return newError("Subscript not support: %s", index.Type())
}
subscript := index.Value
if subscript < 0 || subscript >= int64(len(left.Elements)) {
return NULL
}
return left.Elements[subscript]
case *object.Map:
index, ok := indexObject.(object.Hashable)
if !ok {
return newError("unusable as map key: %s", indexObject.Type())
}
hashKey := index.Hash()
pair, ok := left.Pairs[hashKey]
if !ok {
return NULL
}
return pair.Value
}
return NULL
}
func Eval(node ast.Node, env *object.Environment) object.Object {
switch node := node.(type) {
case *ast.Program:
return evalProgram(node, env)
case *ast.ExpressionStatement:
return Eval(node.Expression, env)
case *ast.IntegerLiteral:
return &object.Integer{Value: node.Value}
case *ast.StringLiteral:
return &object.String{Value: node.Value}
case *ast.Boolean:
if node.Value {
return TRUE
}
return FALSE
case *ast.PrefixExpression:
right := Eval(node.Right, env)
if isError(right) {
return right
}
return evalPrefixExpression(node.Operator, right)
case *ast.InfixExpression:
left := Eval(node.Left, env)
if isError(left) {
return left
}
right := Eval(node.Right, env)
if isError(right) {
return right
}
return evalInfixExpression(node.Operator, left, right)
case *ast.BlockStatement:
return evalBlockStatement(node, env)
case *ast.IfExpression:
return evalIfExpression(node, env)
case *ast.ReturnStatement:
returnValue := Eval(node.ReturnValue, env)
if isError(returnValue) {
return returnValue
}
return &object.ReturnValue{Value: returnValue}
case *ast.AssignStatement:
value := Eval(node.Value, env)
if isError(value) {
return value
}
env.Set(node.Name.Value, value)
case *ast.Identifier:
return evalIdentifier(node, env)
case *ast.FunctionLiteral:
parameters := node.Parameters
body := node.Body
return &object.Function{Parameters: parameters, Body: body, Env: env}
case *ast.CallExpression:
if name, ok := node.Function.(*ast.Identifier); ok && name.Value == "quote" {
return quote(node.Arguments[0], env)
}
function := Eval(node.Function, env)
if isError(function) {
return function
}
args := evalExpressions(node.Arguments, env)
if len(args) == 1 && isError(args[0]) {
return args[0]
}
return applyFunction(function, args)
case *ast.WhileStatement:
for true {
condition := Eval(node.Condition, env)
if isError(condition) {
return condition
}
if !isTruthy(condition) {
break
}
body := Eval(node.Body, env)
if isError(body) || isReturn(body) {
return body
}
if isBreak(body) {
break
}
}
case *ast.BreakStatement:
return BREAK
case *ast.ContinueStatement:
return CONTINUE
case *ast.ArrayLiteral:
elements := evalExpressions(node.Elements, env)
if len(elements) == 1 && isError(elements[0]) {
return elements[0]
}
return &object.Array{Elements: elements}
case *ast.IndexExpression:
left := Eval(node.Left, env)
if isError(left) {
return left
}
if left.Type() != object.ARRAY_OBJ && left.Type() != object.MAP_OBJ {
return newError("Index operator not support: %s", left.Type())
}
index := Eval(node.Index, env)
if isError(index) {
return index
}
return evalIndexExpression(left, index)
case *ast.MapLiteral:
pairs := make(map[object.HashKey]object.MapPair)
for keyNode, valueNode := range node.Pairs {
key := Eval(keyNode, env)
if isError(key) {
return key
}
hashableKeyObject, ok := key.(object.Hashable)
if !ok {
return newError("unusable as map key: %s", key.Type())
}
value := Eval(valueNode, env)
if isError(value) {
return value
}
hashKey := hashableKeyObject.Hash()
pairs[hashKey] = object.MapPair{Key: key, Value: value}
}
return &object.Map{Pairs: pairs}
}
return nil
} | evaluator/evaluator.go | 0.637257 | 0.40439 | evaluator.go | starcoder |
package knn
import (
"math"
"sort"
)
type Sample struct {
Attributes []float64
Class string
}
// Distance calculates the euclidean distance from one sample to another.
// SDD work
func (s *Sample) Distance(other *Sample) float64 {
//Use this formula: d = (x1 - x2)^2 + (y1 - y2)^2 + (z1 - z2)^2
eucdist := 0.0
for i := 0; i < len(other.Attributes); i++ {
eucdist += math.Pow(s.Attributes[i]-other.Attributes[i], 2)
}
return eucdist
}
// NewSample creates the object with the data points.
func NewSample(attributes []float64, class string) *Sample {
return &Sample{Attributes: attributes, Class: class}
}
type KNNClassifier struct {
K int
TrainingData []*Sample
}
func NewKNNClassifier(k int) *KNNClassifier {
return &KNNClassifier{K: k}
}
// Fit method stores the slice of samples in the struct
// SDD work
func (knn *KNNClassifier) Fit(samples []*Sample) {
knn.TrainingData = samples
}
// SampleWithDist appends a distance field onto the Sample struct
// SDD work
type SampleWithDist struct {
*Sample
Distance float64
}
// Following sections used for struct value sort.
// SDD work
type sampleSorter struct {
distances []SampleWithDist
by func(p1, p2 *SampleWithDist) bool // Closure used in the Less method.
}
// By is the type of a "less" function that defines the ordering of its SampleWithDist arguments.
type By func(p1, p2 *SampleWithDist) bool
// Sort is a method on the function type, By, that sorts the argument slice according to the function.
func (by By) Sort(distances []SampleWithDist) {
ps := &sampleSorter{
distances: distances,
by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
}
sort.Sort(ps)
}
// Len is part of sort.Interface.
func (s *sampleSorter) Len() int {
return len(s.distances)
}
// Swap is part of sort.Interface.
func (s *sampleSorter) Swap(i, j int) {
s.distances[i], s.distances[j] = s.distances[j], s.distances[i]
}
// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
func (s *sampleSorter) Less(i, j int) bool {
return s.by(&s.distances[i], &s.distances[j])
}
// End required code section for sort.
// Predict is where the work happens
func (knn *KNNClassifier) Predict(sample *Sample) string {
//1. Get the distance from sample to every other sample in knn.TrainingData using Distance func.
distances := []SampleWithDist{}
for _, sampleGroup := range knn.TrainingData {
distances = append(distances, SampleWithDist{Sample: sampleGroup,
Distance: sample.Distance(sampleGroup)})
}
//2. Sort by closest samples based on calculated distances
distance := func(p1, p2 *SampleWithDist) bool {
return p1.Distance < p2.Distance
}
By(distance).Sort(distances)
//fmt.Println("By distance:", distances)
//3. Get the list of classes from the top K closest samples
topK := distances[:knn.K]
//4. Count the classes and return the one that occurs most.
results := make(map[string]int64)
var highest int64 = 1
var prediction string
for _, cls := range topK {
if _, ok := results[cls.Class]; ok {
results[cls.Class]++
} else {
results[cls.Class] = 1
}
}
for key, value := range results {
if value > highest {
highest = value
prediction = key
}
}
return prediction
} | sdd/knn/knn.go | 0.829871 | 0.403479 | knn.go | starcoder |
package entities
// Definition of the Nalej application descriptors JSON schema
const APP_DESC_SCHEMA = `
{
"definitions": {
"labels": {
"$id": "#/definitions/labels",
"type": "object",
"title": "The Labels Schema",
"additionalProperties": {
"type": "string",
"minItems": 1,
"minLength": 1,
"maxLength": 63
}
},
"host_port": {
"$id": "#/definitions/host_port",
"type": "integer",
"title": "Host port",
"minimum": 1,
"maximum": 65535
},
"security_rule": {
"$id": "#/definitions/security_rule",
"type": "object",
"title": "Security connectivity rules",
"required": [
"name",
"target_service_group_name",
"target_service_name",
"target_port",
"access"
],
"properties": {
"name": {
"title": "Rule name",
"type": "string",
"minLength": 1,
"maxLength": 63
},
"target_service_group_name": {
"title": "Name of the target service group",
"type": "string",
"minLength": 1,
"maxLength": 63
},
"target_service_name": {
"title": "Name of the target service contained by the service group",
"type": "string",
"minLength": 1,
"maxLength": 63
},
"target_port": {
"title": "Access port",
"$ref": "#/definitions/host_port"
},
"access": {
"title": "Port this rule refers to",
"type": "integer",
"$comment": "ALL_APP_SERVICES,APP_SERVICES,PUBLIC,DEVICE_GROUP",
"enum": [0, 1, 2, 3]
},
"auth_service_group_name": {
"title": "Name of the group with permission granted to access the target_service_name",
"type": "string",
"minLength": 1,
"maxLength": 63
},
"auth_services": {
"type": "array",
"title": "List of services authenticated to access",
"minLength": 1,
"items": {
"type": "string",
"minLength": 1,
"maxLength": 63
}
},
"device_group_names": {
"type": "array",
"minLength": 1,
"title": "List of device group names with access granted",
"items": {
"type": "string",
"minLength": 1,
"maxLength": 63
}
}
}
},
"service_group_deployment_specs": {
"$id": "#/definitions/service_group_deployment_specs",
"title": "Definition of deployment specs for a service group",
"properties": {
"multi_cluster_replica": {
"title": "Set the multiple cluster replication policy",
"type": "boolean"
},
"replicas": {
"title": "Number of replicas for the service group",
"type": "integer",
"minimum": 0
},
"deployment_selectors": {
"title": "Set of labels to be matched by target application clusters",
"$ref": "#/definitions/labels"
}
},
"oneOf": [
{
"required": ["multi_cluster_replica"],
"not": {"required": ["replicas"]}
},
{
"required": ["replicas"],
"not": {"required": ["multi_cluster_replica"]}
},
{
"oneOf": [
{},
{
"required": ["multi_cluster_replica"]
},
{
"required": ["replicas"]
}
]
}
]
},
"image_credentials": {
"$id": "#/definitions/image_credentials",
"title": "Credentials for an image",
"properties": {
"username": {
"type": "string"
},
"password": {
"type": "string"
},
"email": {
"type": "string",
"pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"
},
"docker_repository":{
"type": "string"
}
}
},
"deploy_specs": {
"$id": "#/definitions/deploy_specs",
"title": "Deployment specifications for a service",
"properties": {
"cpu": {
"type": "number",
"title": "Ratio of reserved cpu",
"minimum": 0.1
},
"memory": {
"type": "integer",
"title": "Amount of memory required",
"minimum": 16
},
"replicas": {
"type": "integer",
"title": "Number of replicas of this service",
"minimum": 0,
"maximum": 255
}
}
},
"storage": {
"$id": "#/definitions/storage",
"type": "object",
"title": "Storage service definition",
"required": [
"size",
"mount_path"
],
"properties": {
"size": {
"title": "Size of the storage volume",
"type": "integer",
"minimum": 100
},
"mount_path": {
"title": "Path to mount the volume in the service instance",
"type": "string"
}
}
},
"port": {
"$id": "#/definitions/port",
"type": "object",
"title": "Definition of an exposed port",
"required": [
"name",
"internal_port",
"exposed_port"
],
"properties": {
"name": {
"type": "string",
"title": "Name of the port",
"minLength": 2,
"maxLength": 63
},
"internal_port": {
"$ref": "#/definitions/host_port",
"title": "Internal image port"
},
"exposed_port": {
"$ref": "#/definitions/host_port",
"title": "Exposed image port"
},
"endpoints": {
"type": "array",
"title": "List of endpoints for the service",
"minLength": 1,
"items": {
"$ref": "#/definitions/endpoint"
}
},
"environment_variables": {
"$ref": "#/definitions/labels",
"title": "Map of environment variables for the service"
},
"configs": {
"$ref": "#/definitions/labels",
"title": "Map of configuration options for the application"
},
"labels": {
"$ref": "#/definitions/labels",
"title": "Labels for this service"
},
"deploy_after": {
"type": "array",
"title": "Name of services that have to be deployed before this",
"minLength": 1,
"items": {
"type": "string"
}
},
"run_arguments": {
"type": "array",
"title": "List of running arguments for the service",
"minLength": 1,
"items": {
"type": "string"
}
}
}
},
"endpoint": {
"$id": "#/definitions/endpoint",
"type": "object",
"title": "Endpoint definition",
"required": [
"path",
"type"
],
"properties": {
"path": {
"type": "string",
"minLength": 1
},
"type": {
"type": "integer",
"$comment": "IS_ALIVE=0; REST=1; WEB=2; PROMETHEUS=3; INGESTION=4;",
"enum": [0,1,2,3,4]
}
}
},
"service": {
"$id": "#/definitions/service",
"title": "Definition of service",
"required": [
"name",
"image"
],
"properties": {
"name": {
"type": "string",
"title": "Name of a service"
},
"image": {
"type": "string",
"title": "Name of the image to download"
},
"image_credentials": {
"title": "Definition of credentials to download the image",
"$ref": "#/definitions/image_credentials"
},
"specs": {
"title": "Service deployment specs",
"$ref": "#/definitions/deploy_specs"
},
"storage": {
"type": "array",
"title": "Storage definition for this service",
"minLength": 1,
"items": {
"$ref": "#/definitions/storage"
}
},
"exposed_ports": {
"type": "array",
"title": "List of exposed ports",
"minLength": 1,
"items": {
"$ref": "#/definitions/port"
}
}
}
},
"service_group": {
"$id": "#/definitions/service_group",
"type": "object",
"title": "Group of services to be allocated together",
"required": [
"name",
"services"
],
"properties": {
"name": {
"type": "string",
"title": "Name of the service group",
"minLength": 4,
"maxLength": 63
},
"specs": {
"title": "Deployment specifications for this service group",
"$ref": "#/definitions/service_group_deployment_specs"
},
"services": {
"title": "Array of defined services",
"type": "array",
"minLength": 1,
"items": {
"$ref": "#/definitions/service"
}
}
}
}
},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "http://nalej.com/app_descriptor.json",
"type": "object",
"title": "Nalej application descriptor",
"required": [
"name",
"groups"
],
"properties": {
"name": {
"$id": "#/properties/name",
"type": "string",
"minLength": 4,
"maxLength": 63,
"title": "Name of the application descriptor",
"pattern": "^(.*)$"
},
"labels": {
"$id": "#/properties/labels",
"title": "Labels for this app",
"$ref": "#/definitions/labels"
},
"rules": {
"$id": "#/properties/rules",
"title": "Connectivity rules",
"type": "array",
"items": {
"$ref": "#/definitions/security_rule",
"minLength": 1
}
},
"groups": {
"$id": "#/properties/groups",
"type": "array",
"minLength": 1,
"items": {
"$ref": "#/definitions/service_group"
}
}
}
}
` | internal/pkg/entities/app_desc_schema.go | 0.742982 | 0.532668 | app_desc_schema.go | starcoder |
package tree
// Iterator defines a tree iterator
type Iterator interface {
// Next iterates to the next node in the tree and returns the iterator, or nil if there is no next node
Next() Iterator
// Previous iterates to the previous node in the tree and returns the iterator, or nil if there is no previous node
Previous() Iterator
// Get returns the value of the iterator's current node
Get() interface{}
}
// Tree defines a tree
// Trees consists of nodes which are not exposed to the user. Only the values of each node is exposed.
// Trees are sorted by a compare function which also helps to identify nodes in the tree. This compare function makes use of the values of each node.
type Tree interface {
// Clear resets the tree to zero nodes and resets the tree's meta data
Clear()
// Len returns the current node count
Len() int
// Empty returns true if the current node count is zero
Empty() bool
// Chan returns a channel which iterates from the front to the back of the tree
Chan(n int) <-chan interface{}
// ChanBack returns a channel which iterates from the back to the front of the tree
ChanBack(n int) <-chan interface{}
// Iter returns an iterator which starts at the front of the tree, or nil if there are no nodes in the tree
Iter() Iterator
// IterBack returns an iterator which starts at the back of the tree, or nil if there are no nodes in the tree
IterBack() Iterator
// First returns the first value of the tree and true, or false if there is no value
First() (interface{}, bool)
// Last returns the last value of the tree and true, or false if there is no value
Last() (interface{}, bool)
// Get returns the value of the node identified by the given id value and true, or false if there is no such node
Get(id interface{}) (interface{}, bool)
// GetFunc returns the value of the first node selected by the given function and true, or false if there is no such node
GetFunc(m func(v interface{}) bool) (interface{}, bool)
// Set sets the value of the node identified by the given id value and returns true, or false if there is no such node
Set(id interface{}, v interface{}) bool
// SetFunc sets the value of the first node selected by the given function and returns true, or false if there is no such node
SetFunc(m func(v interface{}) bool, v interface{}) bool
// Contains returns true if a node identified by the given id value exists in the tree, or false if it does not
Contains(id interface{}) bool
// Copy returns an exact copy of the tree
Copy() Tree
// Slice returns a copy of the tree as a slice
Slice() []interface{}
// Insert inserts a new node into the tree with the given value
Insert(v interface{})
// Remove removes the node identified by the given id value and returns its value and true, or false if there is no such node
Remove(id interface{}) (interface{}, bool)
// Pop removes the last node and returns its value and true, or false if there is no such node
Pop() (interface{}, bool)
// Shift removes the first node and returns its value and true, or false if there is no such node
Shift() (interface{}, bool)
} | tree/tree.go | 0.733643 | 0.625123 | tree.go | starcoder |
package pure
import (
"context"
"fmt"
"time"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component"
"github.com/benthosdev/benthos/v4/internal/component/output"
"github.com/benthosdev/benthos/v4/internal/component/output/processors"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
)
func init() {
err := bundle.AllOutputs.Add(processors.WrapConstructor(func(c output.Config, nm bundle.NewManagement) (output.Streamed, error) {
if !nm.ProbeOutput(c.Resource) {
return nil, fmt.Errorf("output resource '%v' was not found", c.Resource)
}
ctx, done := context.WithCancel(context.Background())
return &resourceOutput{
mgr: nm,
name: c.Resource,
log: nm.Logger(),
ctx: ctx,
done: done,
}, nil
}), docs.ComponentSpec{
Name: "resource",
Summary: `
Resource is an output type that runs a resource output by its name.`,
Description: `
This output allows you to reference the same configured output resource in multiple places, and can also tidy up large nested configs. For example, the config:
` + "```yaml" + `
output:
broker:
pattern: fan_out
outputs:
- kafka:
addresses: [ TODO ]
topic: foo
- gcp_pubsub:
project: bar
topic: baz
` + "```" + `
Could also be expressed as:
` + "```yaml" + `
output:
broker:
pattern: fan_out
outputs:
- resource: foo
- resource: bar
output_resources:
- label: foo
kafka:
addresses: [ TODO ]
topic: foo
- label: bar
gcp_pubsub:
project: bar
topic: baz
` + "```" + `
You can find out more about resources [in this document.](/docs/configuration/resources)`,
Categories: []string{
"Utility",
},
Config: docs.FieldString("", "").HasDefault(""),
})
if err != nil {
panic(err)
}
}
type resourceOutput struct {
mgr bundle.NewManagement
name string
log log.Modular
transactions <-chan message.Transaction
ctx context.Context
done func()
}
func (r *resourceOutput) loop() {
var ts *message.Transaction
for {
if ts == nil {
select {
case t, open := <-r.transactions:
if !open {
r.done()
return
}
ts = &t
case <-r.ctx.Done():
return
}
}
var err error
if oerr := r.mgr.AccessOutput(context.Background(), r.name, func(o output.Sync) {
err = o.WriteTransaction(r.ctx, *ts)
}); oerr != nil {
err = oerr
}
if err != nil {
r.log.Errorf("Failed to obtain output resource '%v': %v", r.name, err)
select {
case <-time.After(time.Second):
case <-r.ctx.Done():
return
}
} else {
ts = nil
}
}
}
func (r *resourceOutput) Consume(ts <-chan message.Transaction) error {
if r.transactions != nil {
return component.ErrAlreadyStarted
}
r.transactions = ts
go r.loop()
return nil
}
func (r *resourceOutput) Connected() (isConnected bool) {
var err error
if err = r.mgr.AccessOutput(context.Background(), r.name, func(o output.Sync) {
isConnected = o.Connected()
}); err != nil {
r.log.Errorf("Failed to obtain output resource '%v': %v", r.name, err)
}
return
}
func (r *resourceOutput) CloseAsync() {
r.done()
}
func (r *resourceOutput) WaitForClose(timeout time.Duration) error {
select {
case <-r.ctx.Done():
case <-time.After(timeout):
return component.ErrTimeout
}
return nil
} | internal/impl/pure/output_resource.go | 0.502441 | 0.462959 | output_resource.go | starcoder |
package cutil
//#include <stdbool.h>
//#include <stdio.h>
import "C"
import (
"unsafe"
"github.com/dereklstinson/half"
)
//CScalar is used for scalar multiplications with cudnn. They have to be Ctypes. It could have easily been called voider
type CScalar interface {
CPtr() unsafe.Pointer
SIB() uint
}
//CScalartoFloat64 changes a CScalar to a float64 value so it could be read or debugging.
func CScalartoFloat64(x CScalar) float64 {
switch y := x.(type) {
case CDouble:
return float64(y)
case CFloat:
return float64(y)
case CInt:
return float64(y)
case CUInt:
return float64(y)
case CHalf:
h := (half.Float16)(y)
return float64(h)
case CChar:
return float64(y)
case CUChar:
return float64(y)
}
panic("Unsupported val for CScalartoFloat64")
}
//CScalarConversion takes a go type and converts it to a CScalar interface. golang type int and int32 will both be converted to a CInt type.
//If a go type is not supported then it will return a nil.
//Current support is float64,float32,int, int32, int8,uint32, uint, uint8(byte)
func CScalarConversion(gotype interface{}) CScalar {
switch x := gotype.(type) {
case float64:
return CDouble(x)
case float32:
return CFloat(x)
case int:
return CInt(x)
case int32:
return CInt(x)
case int8:
return CChar(x)
case uint8:
return CUChar(x)
case uint32:
return CUInt(x)
case uint:
return CUInt(x)
case half.Float16:
return CHalf(x)
case bool:
return (CBool)(x)
case CScalar:
return x
default:
return nil
}
}
//CHalf is a half precision
type CHalf C.ushort
//CPtr returns an unsafe pointer of the half
func (f CHalf) CPtr() unsafe.Pointer { return unsafe.Pointer(&f) }
//SIB returns the number of bytes the CScalar has as an sizeT
func (f CHalf) SIB() uint { return (2) }
//CFloat is a float in C
type CFloat C.float
//CPtr returns an unsafe pointer of the float
func (f CFloat) CPtr() unsafe.Pointer { return unsafe.Pointer(&f) }
//SIB returns the number of bytes the CScalar has
func (f CFloat) SIB() uint { return 4 }
//CDouble is a double in C
type CDouble C.double
//CPtr returns an unsafe pointer of the double
func (d CDouble) CPtr() unsafe.Pointer { return unsafe.Pointer(&d) }
//SIB returns the number of bytes the CScalar has
func (d CDouble) SIB() uint { return 8 }
//CInt is a int in C
type CInt C.int
//CPtr returns an unsafe pointer of the int
func (i CInt) CPtr() unsafe.Pointer { return unsafe.Pointer(&i) }
//SIB returns the number of bytes the CScalar has
func (i CInt) SIB() uint { return 4 }
//CUInt is an unsigned int in C
type CUInt C.uint
//CPtr returns an unsafe pointer of the Unsigned Int
func (i CUInt) CPtr() unsafe.Pointer { return unsafe.Pointer(&i) }
//SIB returns the number of bytes the CScalar has
func (i CUInt) SIB() uint { return 4 }
//CChar is a signed char
type CChar C.char
//CPtr retunrs an unsafe pointer for CInt8
func (c CChar) CPtr() unsafe.Pointer { return unsafe.Pointer(&c) }
//SIB returns the number of bytes the CScalar has
func (c CChar) SIB() uint { return 1 }
//CUChar is a C.uchar
type CUChar C.uchar
//SIB returns the number of bytes the CScalar has
func (c CUChar) SIB() uint { return 1 }
//CPtr retunrs an unsafe pointer for CUInt8
func (c CUChar) CPtr() unsafe.Pointer { return unsafe.Pointer(&c) }
//CBool is a wrapper for C.bool it is in the stdbool.h header
type CBool C.bool
//SIB returns the number of bytes the CScalar has
func (c CBool) SIB() uint { return (uint)(C.sizeof_bool) }
//CPtr retunrs an unsafe pointer for CBool
func (c CBool) CPtr() unsafe.Pointer { return (unsafe.Pointer)(&c) }
//CSizet is a wrapper for C.size_t
type CSizet C.size_t
//SIB returns the number of bytes the CScalar has
func (c CSizet) SIB() uint { return (uint)(C.sizeof_size_t) }
//CPtr retunrs an unsafe pointer for CSizet
func (c CSizet) CPtr() unsafe.Pointer { return (unsafe.Pointer)(&c) } | ctype.go | 0.544559 | 0.50891 | ctype.go | starcoder |
package operator
import (
"github.com/matrixorigin/matrixone/pkg/container/nulls"
"github.com/matrixorigin/matrixone/pkg/container/types"
"github.com/matrixorigin/matrixone/pkg/container/vector"
"github.com/matrixorigin/matrixone/pkg/vectorize/gt"
"github.com/matrixorigin/matrixone/pkg/vectorize/lt"
"github.com/matrixorigin/matrixone/pkg/vm/process"
)
func less[T OrderedValue](d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(T), d2.(T)
return l < v
}
func less_B(d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(bool), d2.(bool)
return !l && v
}
func less_D(d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(types.Decimal128), d2.(types.Decimal128)
return types.CompareDecimal128Decimal128(l, v, aScale, bScale) < 0
}
type LtOpFunc = func(d1, d2 interface{}, aScale, bScale int32) bool
var LtOpFuncMap = map[int]LtOpFunc{}
var LtOpFuncVec = []LtOpFunc{
less[int8], less[int16], less[int32], less[int64], less[uint8], less[uint16], less[uint32],
less[uint64], less[float32], less[float64], less[string], less_B, less[types.Date],
less[types.Datetime], less[types.Decimal64], less_D,
}
func InitLtOpFuncMap() {
for i := 0; i < len(LtOpFuncVec); i++ {
LtOpFuncMap[i] = LtOpFuncVec[i]
}
}
var StrLtOpFuncMap = map[int]StrCompOpFunc{}
var StrLtOpFuncVec = []StrCompOpFunc{
lessCol_Col, lessCol_Const, lessConst_Col, lessConst_Const,
}
func lessCol_Col(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(lvs.Lengths))
rs = lt.StrLt(lvs, rvs, rs)
col := make([]bool, len(lvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func lessCol_Const(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(lvs.Lengths))
rs = gt.StrGtScalar(rvs.Data, lvs, rs)
col := make([]bool, len(lvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func lessConst_Col(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(rvs.Lengths))
rs = lt.StrLtScalar(lvs.Data, rvs, rs)
col := make([]bool, len(rvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func lessConst_Const(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
return []bool{string(lvs.Data) < string(rvs.Data)}
}
func InitStrLtOpFuncMap() {
for i := 0; i < len(StrLeOpFuncVec); i++ {
StrLtOpFuncMap[i] = StrLtOpFuncVec[i]
}
}
func ColLtCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
n := GetRetColLen[T](lv)
vec, err := proc.AllocVector(proc.GetBoolTyp(lv.Typ), int64(n)*1)
if err != nil {
return nil, err
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, GetRetCol[T](lv, rv, col_col, LtOpFuncMap, StrLtOpFuncMap))
return vec, nil
}
func ColLtConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
n := GetRetColLen[T](lv)
vec, err := proc.AllocVector(proc.GetBoolTyp(lv.Typ), int64(n)*1)
if err != nil {
return nil, err
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, GetRetCol[T](lv, rv, col_const, LtOpFuncMap, StrLtOpFuncMap))
return vec, nil
}
func ColLtNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func ConstLtCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
n := GetRetColLen[T](lv)
vec, err := proc.AllocVector(proc.GetBoolTyp(lv.Typ), int64(n)*1)
if err != nil {
return nil, err
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, GetRetCol[T](lv, rv, const_col, LtOpFuncMap, StrLtOpFuncMap))
return vec, nil
}
func ConstLtConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
vec := proc.AllocScalarVector(proc.GetBoolTyp(lv.Typ))
vector.SetCol(vec, GetRetCol[T](lv, rv, const_const, LtOpFuncMap, StrLtOpFuncMap))
return vec, nil
}
func ConstLtNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullLtCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullLtConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullLtNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
type LtFunc = func(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error)
var LtFuncMap = map[int]LtFunc{}
var LtFuncVec = []LtFunc{
ColLtCol[int8], ColLtCol[int16], ColLtCol[int32], ColLtCol[int64], ColLtCol[uint8], ColLtCol[uint16],
ColLtCol[uint32], ColLtCol[uint64], ColLtCol[float32], ColLtCol[float64], ColLtCol[string], ColLtCol[bool],
ColLtCol[types.Date], ColLtCol[types.Datetime], ColLtCol[types.Decimal64], ColLtCol[types.Decimal128],
ColLtConst[int8], ColLtConst[int16], ColLtConst[int32], ColLtConst[int64], ColLtConst[uint8], ColLtConst[uint16],
ColLtConst[uint32], ColLtConst[uint64], ColLtConst[float32], ColLtConst[float64], ColLtConst[string], ColLtConst[bool],
ColLtConst[types.Date], ColLtConst[types.Datetime], ColLtConst[types.Decimal64], ColLtConst[types.Decimal128],
ColLtNull[int8], ColLtNull[int16], ColLtNull[int32], ColLtNull[int64], ColLtNull[uint8], ColLtNull[uint16],
ColLtNull[uint32], ColLtNull[uint64], ColLtNull[float32], ColLtNull[float64], ColLtNull[string], ColLtNull[bool],
ColLtNull[types.Date], ColLtNull[types.Datetime], ColLtNull[types.Decimal64], ColLtNull[types.Decimal128],
ConstLtCol[int8], ConstLtCol[int16], ConstLtCol[int32], ConstLtCol[int64], ConstLtCol[uint8], ConstLtCol[uint16],
ConstLtCol[uint32], ConstLtCol[uint64], ConstLtCol[float32], ConstLtCol[float64], ConstLtCol[string], ConstLtCol[bool],
ConstLtCol[types.Date], ConstLtCol[types.Datetime], ConstLtCol[types.Decimal64], ConstLtCol[types.Decimal128],
ConstLtConst[int8], ConstLtConst[int16], ConstLtConst[int32], ConstLtConst[int64], ConstLtConst[uint8], ConstLtConst[uint16],
ConstLtConst[uint32], ConstLtConst[uint64], ConstLtConst[float32], ConstLtConst[float64], ConstLtConst[string], ConstLtConst[bool],
ConstLtConst[types.Date], ConstLtConst[types.Datetime], ConstLtConst[types.Decimal64], ConstLtConst[types.Decimal128],
ConstLtNull[int8], ConstLtNull[int16], ConstLtNull[int32], ConstLtNull[int64], ConstLtNull[uint8], ConstLtNull[uint16],
ConstLtNull[uint32], ConstLtNull[uint64], ConstLtNull[float32], ConstLtNull[float64], ConstLtNull[string], ConstLtNull[bool],
ConstLtNull[types.Date], ConstLtNull[types.Datetime], ConstLtNull[types.Decimal64], ConstLtNull[types.Decimal128],
NullLtCol[int8], NullLtCol[int16], NullLtCol[int32], NullLtCol[int64], NullLtCol[uint8], NullLtCol[uint16],
NullLtCol[uint32], NullLtCol[uint64], NullLtCol[float32], NullLtCol[float64], NullLtCol[string], NullLtCol[bool],
NullLtCol[types.Date], NullLtCol[types.Datetime], NullLtCol[types.Decimal64], NullLtCol[types.Decimal128],
NullLtConst[int8], NullLtConst[int16], NullLtConst[int32], NullLtConst[int64], NullLtConst[uint8], NullLtConst[uint16],
NullLtConst[uint32], NullLtConst[uint64], NullLtConst[float32], NullLtConst[float64], NullLtConst[string], NullLtConst[bool],
NullLtConst[types.Date], NullLtConst[types.Datetime], NullLtConst[types.Decimal64], NullLtConst[types.Decimal128],
NullLtNull[int8], NullLtNull[int16], NullLtNull[int32], NullLtNull[int64], NullLtNull[uint8], NullLtNull[uint16],
NullLtNull[uint32], NullLtNull[uint64], NullLtNull[float32], NullLtNull[float64], NullLtNull[string], NullLtNull[bool],
NullLtNull[types.Date], NullLtNull[types.Datetime], NullLtNull[types.Decimal64], NullLtNull[types.Decimal128],
}
func InitLtFuncMap() {
InitLtOpFuncMap()
InitStrLtOpFuncMap()
for i := 0; i < len(LtFuncVec); i++ {
LtFuncMap[i] = LtFuncVec[i]
}
}
func LtDataValue[T DataValue](vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
lv := vectors[0]
rv := vectors[1]
lt, rt := GetTypeID(lv), GetTypeID(rv)
dataID := GetDatatypeID[T]()
vec, err := LtFuncMap[(lt*3+rt)*dataTypeNum+dataID](lv, rv, proc)
if err != nil {
return nil, err
}
return vec, nil
} | pkg/sql/plan2/function/operator/lt.go | 0.605566 | 0.434641 | lt.go | starcoder |
2D Rendering Code
*/
//-----------------------------------------------------------------------------
package sdf
import (
"image"
"image/color"
"image/png"
"os"
"github.com/llgcode/draw2d/draw2dimg"
)
//-----------------------------------------------------------------------------
// PNG is a png image object.
type PNG struct {
name string
bb Box2
pixels V2i
m *Map2
img *image.RGBA
}
// NewPNG returns an empty PNG object.
func NewPNG(name string, bb Box2, pixels V2i) (*PNG, error) {
d := PNG{}
d.name = name
d.bb = bb
d.pixels = pixels
m, err := NewMap2(bb, pixels, true)
if err != nil {
return nil, err
}
d.m = m
d.img = image.NewRGBA(image.Rect(0, 0, pixels[0]-1, pixels[1]-1))
return &d, nil
}
// RenderSDF2 renders a 2d signed distance field as gray scale.
func (d *PNG) RenderSDF2(s SDF2) {
// sample the distance field
var dmax, dmin float64
distance := make([]float64, d.pixels[0]*d.pixels[1])
xofs := 0
for x := 0; x < d.pixels[0]; x++ {
for y := 0; y < d.pixels[1]; y++ {
d := s.Evaluate(d.m.ToV2(V2i{x, y}))
dmax = Max(dmax, d)
dmin = Min(dmin, d)
distance[xofs+y] = d
}
xofs += d.pixels[1]
}
// scale and set the pixel values
xofs = 0
for x := 0; x < d.pixels[0]; x++ {
for y := 0; y < d.pixels[1]; y++ {
val := 255.0 * ((distance[xofs+y] - dmin) / (dmax - dmin))
d.img.Set(x, y, color.Gray{uint8(val)})
}
xofs += d.pixels[1]
}
}
// Line adds a line to a png object.
func (d *PNG) Line(p0, p1 V2) {
gc := draw2dimg.NewGraphicContext(d.img)
gc.SetFillColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetStrokeColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetLineWidth(1)
p := d.m.ToV2i(p0)
gc.MoveTo(float64(p[0]), float64(p[1]))
p = d.m.ToV2i(p1)
gc.LineTo(float64(p[0]), float64(p[1]))
gc.Stroke()
}
// Lines adds a a set of lines line to a png object.
func (d *PNG) Lines(s V2Set) {
gc := draw2dimg.NewGraphicContext(d.img)
gc.SetFillColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetStrokeColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetLineWidth(1)
p := d.m.ToV2i(s[0])
gc.MoveTo(float64(p[0]), float64(p[1]))
for i := 1; i < len(s); i++ {
p := d.m.ToV2i(s[i])
gc.LineTo(float64(p[0]), float64(p[1]))
}
gc.Stroke()
}
// Triangle adds a triangle to a png object.
func (d *PNG) Triangle(t Triangle2) {
d.Lines([]V2{t[0], t[1], t[2], t[0]})
}
// Save saves a png object to a file.
func (d *PNG) Save() error {
f, err := os.Create(d.name)
if err != nil {
return err
}
defer f.Close()
png.Encode(f, d.img)
return nil
}
//----------------------------------------------------------------------------- | sdf/png.go | 0.653238 | 0.508727 | png.go | starcoder |
package solver
import (
"github.com/mokiat/gomath/sprec"
"github.com/mokiat/lacking/game/physics"
)
var _ physics.DBConstraintSolver = (*HingedRod)(nil)
// NewHingedRod creates a new HingedRod constraint solution.
func NewHingedRod() *HingedRod {
result := &HingedRod{
length: 1.0,
}
result.DBJacobianConstraintSolver = physics.NewDBJacobianConstraintSolver(result.calculate)
return result
}
// HingedRod represents the solution for a constraint
// that keeps two bodies tied together with a hard link
// of specific length.
type HingedRod struct {
*physics.DBJacobianConstraintSolver
primaryAnchor sprec.Vec3
secondaryAnchor sprec.Vec3
length float32
}
// PrimaryAnchor returns the attachment point of the link
// on the primary body.
func (r *HingedRod) PrimaryAnchor() sprec.Vec3 {
return r.primaryAnchor
}
// SetPrimaryAnchor changes the attachment point of the link
// on the primary body.
func (r *HingedRod) SetPrimaryAnchor(anchor sprec.Vec3) *HingedRod {
r.primaryAnchor = anchor
return r
}
// SecondaryAnchor returns the attachment point of the link
// on the secondary body.
func (r *HingedRod) SecondaryAnchor() sprec.Vec3 {
return r.secondaryAnchor
}
// SetSecondaryAnchor changes the attachment point of the link
// on the secondary body.
func (r *HingedRod) SetSecondaryAnchor(anchor sprec.Vec3) *HingedRod {
r.secondaryAnchor = anchor
return r
}
// Length returns the link length.
func (r *HingedRod) Length() float32 {
return r.length
}
// SetLength changes the link length.
func (r *HingedRod) SetLength(length float32) *HingedRod {
r.length = length
return r
}
func (r *HingedRod) calculate(ctx physics.DBSolverContext) (physics.PairJacobian, float32) {
firstRadiusWS := sprec.QuatVec3Rotation(ctx.Primary.Orientation(), r.primaryAnchor)
secondRadiusWS := sprec.QuatVec3Rotation(ctx.Secondary.Orientation(), r.secondaryAnchor)
firstAnchorWS := sprec.Vec3Sum(ctx.Primary.Position(), firstRadiusWS)
secondAnchorWS := sprec.Vec3Sum(ctx.Secondary.Position(), secondRadiusWS)
deltaPosition := sprec.Vec3Diff(secondAnchorWS, firstAnchorWS)
normal := sprec.BasisXVec3()
if deltaPosition.SqrLength() > sqrEpsilon {
normal = sprec.UnitVec3(deltaPosition)
}
return physics.PairJacobian{
Primary: physics.Jacobian{
SlopeVelocity: sprec.NewVec3(
-normal.X,
-normal.Y,
-normal.Z,
),
SlopeAngularVelocity: sprec.NewVec3(
-(normal.Z*firstRadiusWS.Y - normal.Y*firstRadiusWS.Z),
-(normal.X*firstRadiusWS.Z - normal.Z*firstRadiusWS.X),
-(normal.Y*firstRadiusWS.X - normal.X*firstRadiusWS.Y),
),
},
Secondary: physics.Jacobian{
SlopeVelocity: sprec.NewVec3(
normal.X,
normal.Y,
normal.Z,
),
SlopeAngularVelocity: sprec.NewVec3(
normal.Z*secondRadiusWS.Y-normal.Y*secondRadiusWS.Z,
normal.X*secondRadiusWS.Z-normal.Z*secondRadiusWS.X,
normal.Y*secondRadiusWS.X-normal.X*secondRadiusWS.Y,
),
},
},
deltaPosition.Length() - r.length
} | game/physics/solver/hinged_rod.go | 0.854308 | 0.405802 | hinged_rod.go | starcoder |
package main
import (
"math"
"strings"
)
type Diff struct {
Blocks []Block
}
// Block is common interface fo EqualBlock and DiffBlock
type Block interface {
// Returns true on EqualBlock, false on DiffBlock
IsEqual() bool
}
type EqualBlock struct {
Equals []Equal
}
func (block EqualBlock) IsEqual() bool {
return true
}
type DiffBlock struct {
LeftChanges []Change
RightChanges []Change
}
func (block DiffBlock) IsEqual() bool {
return false
}
type Equal struct {
LeftLine int
RightLine int
Value string
}
type Change struct {
Line int
Value string
}
func NewDiffLines(left, right string) Diff {
var rawBlocks []Block
leftLines := strings.Split(strings.TrimSuffix(left, "\n"), "\n")
rightLines := strings.Split(strings.TrimSuffix(right, "\n"), "\n")
for leftIndex, leftLine := range leftLines {
for rightIndex, rightLine := range rightLines {
if leftLine == rightLine {
matchIsValid := checkAndCleanLessFitItems(&rawBlocks, leftIndex, rightIndex)
if matchIsValid {
addMatchToBlocks(&rawBlocks, leftIndex, rightIndex, leftLine)
break
}
}
}
}
// Remove empty equal blocks
var blocks []Block
for index, block := range rawBlocks {
if block.IsEqual() {
if len(block.(EqualBlock).Equals) > 0 {
blocks = append(blocks, block)
}
} else {
blocks = append(blocks, block)
}
rawBlocks[index] = nil
}
//Filling in diffs
for index, block := range blocks {
if !block.IsEqual() {
var startLeft, startRight, endLeft, endRight int
if index == 0 {
startLeft = 0
startRight = 0
} else {
startLeft = blocks[index-1].(EqualBlock).Equals[len(blocks[index-1].(EqualBlock).Equals)-1].LeftLine + 1
startRight = blocks[index-1].(EqualBlock).Equals[len(blocks[index-1].(EqualBlock).Equals)-1].RightLine + 1
}
if index == len(blocks)-1 {
endLeft = len(leftLines) - 1
endRight = len(rightLines) - 1
} else {
endLeft = blocks[index+1].(EqualBlock).Equals[0].LeftLine
endRight = blocks[index+1].(EqualBlock).Equals[0].RightLine
}
for i := startLeft; i < endLeft; i++ {
blocks[index] = DiffBlock{LeftChanges: append(blocks[index].(DiffBlock).LeftChanges, Change{
Line: i,
Value: leftLines[i],
}),
RightChanges: []Change{}}
}
for i := startRight; i < endRight; i++ {
blocks[index] = DiffBlock{LeftChanges: blocks[index].(DiffBlock).LeftChanges,
RightChanges: append(blocks[index].(DiffBlock).RightChanges, Change{
Line: i,
Value: rightLines[i],
})}
}
}
}
return Diff{Blocks: blocks}
}
// Just removes an equal item from the slice with saving order
func removeItemFromEquals(slice []Equal, index int) []Equal {
copy(slice[index:], slice[index+1:])
slice[len(slice)-1] = Equal{}
slice = slice[:len(slice)-1]
return slice
}
// checkAndCleanLessFitItems function checks all possible collisions (cases where matched line is already used or there are matches where right line > mathedLine)
// then if those ones were found - function decides which match is more fit (existing or current), removes existing ones ore returns false to block adding current match into the blocks
func checkAndCleanLessFitItems(blocks *[]Block, curentLine, matchedLine int) bool {
for i, block := range *blocks {
if block.IsEqual() {
for j, equal := range block.(EqualBlock).Equals {
if equal.RightLine >= matchedLine {
if math.Abs(float64(equal.LeftLine-equal.RightLine)) > math.Abs(float64(curentLine-matchedLine)) {
(*blocks)[i] = EqualBlock{removeItemFromEquals(block.(EqualBlock).Equals, j)}
} else {
return false
}
}
}
}
}
return true
}
// addMatchToBlocks adds new match to blocks, if previous match lines are missing - adds an empty diff block before creating new equal block
func addMatchToBlocks(blocks *[]Block, left, right int, value string) {
if len(*blocks) > 0 && (*blocks)[len(*blocks)-1].IsEqual() {
if len(*blocks) > 0 && (left-(*blocks)[len(*blocks)-1].(EqualBlock).Equals[len((*blocks)[len(*blocks)-1].(EqualBlock).Equals)-1].LeftLine > 1 ||
right-(*blocks)[len(*blocks)-1].(EqualBlock).Equals[len((*blocks)[len(*blocks)-1].(EqualBlock).Equals)-1].RightLine > 1) {
*blocks = append(*blocks, DiffBlock{}, EqualBlock{[]Equal{{
LeftLine: left,
RightLine: right,
Value: value,
}}})
} else {
(*blocks)[len(*blocks)-1] = EqualBlock{
Equals: append((*blocks)[len(*blocks)-1].(EqualBlock).Equals, Equal{
LeftLine: left,
RightLine: right,
Value: value,
}),
}
}
} else {
*blocks = append(*blocks, EqualBlock{[]Equal{{
LeftLine: left,
RightLine: right,
Value: value,
}}})
}
} | go-differ.go | 0.668015 | 0.472014 | go-differ.go | starcoder |
package astar
import (
"fmt"
)
/*** Helper functions ***/
func abs(a int) int {
if a < 0 {
return -a
}
return a
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
/*** MapData type and related consts ***/
// Tile information
const (
LAND = 1 << iota
WALL
)
// Tile movement cotsts
const (
COST_STRAIGHT = 1000
COST_DIAGONAL = 1414
)
type MapData [][]int
// Return a new MapData by value given some dimensions
func NewMapData(rows, cols int) MapData {
result := make([]([]int), rows)
for i := 0; i < rows; i++ {
result[i] = make([]int, cols)
}
return result
}
func (m MapData) Clone() (MapData) {
rows := len(m)
cols := len(m[0])
result := make([]([]int), rows)
for i := 0; i < rows; i++ {
result[i] = make([]int, cols)
copy(result[i], m[i])
}
return result
}
func str_map(data MapData, nodes []*Node) string {
var result string
for i, row := range data {
for j, cell := range row {
added := false
for _, node := range nodes {
if node.X == i && node.Y == j {
result += "o"
added = true
break
}
}
if !added {
switch cell {
case LAND:
result += "."
case WALL:
result += "#"
default: //Unknown
result += "?"
}
}
}
result += "\n"
}
return result
}
/*** Node type ***/
// X and Y are coordinates, parent is a link to where we came from. cost are the
// estimated cost from start along the best known path. h is the heuristic value
// (air line distance to goal).
type Node struct {
X, Y int
parent *Node
f, g, h int
heap_index int // only used and maintained by pqueue
}
// Create a new Node
func NewNode(x, y int) *Node {
node := &Node{
X: x,
Y: y,
parent: nil,
f: 0, // f = g + h
g: 0, // Cost from node to start
h: 0, // Estimated cost from node to finish
}
return node
}
// Return string representation of the node
func (n *Node) String() string {
return fmt.Sprintf("<Node x:%d y:%d addr:%d>", n.X, n.Y, &n)
}
/*** nodeList type ***/
type nodeList struct {
nodes map[int]*Node
rows, cols int
}
func newNodeList(rows, cols int) *nodeList {
return &nodeList{
nodes: make(map[int]*Node, rows*cols),
rows: rows,
cols: cols,
}
}
func (n *nodeList)addNode(node *Node) {
n.nodes[node.X + node.Y*n.rows] = node
}
func (n *nodeList)getNode(x, y int) *Node {
return n.nodes[x + y*n.rows]
}
func (n *nodeList)removeNode(node *Node) {
delete(n.nodes, node.X + node.Y*n.rows)
}
func (n *nodeList)hasNode(node *Node) bool {
if n.getNode(node.X, node.Y) != nil {
return true
}
return false
}
/*** Graph type ***/
// Start, stop nodes and a slice of nodes
type Graph struct {
nodes *nodeList // Used to avoid duplicated nodes!
data MapData
}
// Return a Graph from a map of coordinates (those that are passible)
func NewGraph(map_data MapData) *Graph {
//var start, stop *Node
return &Graph{
nodes: newNodeList(len(map_data), len(map_data[0])),
data: map_data,
}
}
// Get or create a *Node based on x, y coordinates. Avoids duplicated nodes!
func (g *Graph) Node(x, y int) *Node {
//Check if node is already in the graph
var node *Node
node = g.nodes.getNode(x, y)
if node == nil && (g.data[x][y] != WALL) {
//Create a new node and add it to the graph
node = NewNode(x, y)
g.nodes.addNode(node)
}
return node
}
/* Astar func */
func retracePath(current_node *Node) []*Node {
var path []*Node
path = append(path, current_node)
for current_node.parent != nil {
path = append(path, current_node.parent)
current_node = current_node.parent
}
//Reverse path
for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 {
path[i], path[j] = path[j], path[i]
}
return path
}
// Diagonal/Chebyshev distance is used.
func Heuristic(tile, stop *Node) (h int) {
h_diag := min(abs(tile.X-stop.X), abs(tile.Y-stop.Y))
h_stra := abs(tile.X-stop.X) + abs(tile.Y-stop.Y)
h = COST_DIAGONAL*h_diag + COST_STRAIGHT*(h_stra - 2*h_diag)
/* TODO: Breaking ties:
dx1 := tile.X - stop.X
dy1 := tile.Y - stop.Y
dx2 := start.X - stop.X
dy2 := start.Y - stop.Y
cross := abs(dx1*dy2 - dx2*dy1)
h += cross * COST_DIAGONAL/100
*/
return
}
// 8 directions adjecentDirs and costs
var adjecentDirs8 = [][3]int{
{-1,-1,COST_DIAGONAL},{-1, 0,COST_STRAIGHT },{-1, 1,COST_DIAGONAL},
{ 0,-1,COST_STRAIGHT}, { 0, 1,COST_STRAIGHT },
{ 1,-1,COST_DIAGONAL},{ 1, 0,COST_STRAIGHT },{ 1, 1,COST_DIAGONAL},
}
// 4 directions adjecentDirs and costs
var adjecentDirs4 = [][3]int{
{-1, 0,COST_STRAIGHT},
{ 0,-1,COST_STRAIGHT}, { 0, 1,COST_STRAIGHT},
{ 1, 0,COST_STRAIGHT},
}
// A* search algorithm. See http://en.wikipedia.org/wiki/A*_search_algorithm
func Astar(map_data MapData, startx, starty, stopx, stopy int, dir8 bool) []*Node {
graph := NewGraph(map_data)
rows, cols := len(graph.data), len(graph.data[0])
// Create lists
closedSet := newNodeList(rows, cols)
openSet := newNodeList(rows, cols)
pq := make(PriorityQueue, 0, rows*cols) // heap, used to find minF
// Move in 8 or 4 directions?
var adjecentDirs [][3]int
if dir8 {
adjecentDirs = adjecentDirs8
} else {
adjecentDirs = adjecentDirs4
}
// TODO: GUARD: startx... stopy inside array range?
// Add start node to the task list
start := NewNode(startx, starty)
stop := NewNode(stopx, stopy)
openSet.addNode(start)
pq.PushNode(start)
for len(openSet.nodes) != 0 {
// Get the node with the min H
//current := openSet.minF()
current := pq.PopNode()
openSet.removeNode(current)
closedSet.addNode(current)
if current.X == stop.X && current.Y == stop.Y {
// Finished, return shortest path
//fmt.Println(str_map(map_data, retracePath(current)))
return retracePath(current)
}
for _, adir := range(adjecentDirs) {
x, y := (current.X + adir[0]), (current.Y + adir[1])
// Check if x, y is inside the map:
if (x < 0) || (x >= rows) || (y < 0) || (y >= cols) {
continue
}
neighbor := graph.Node(x, y)
if neighbor == nil || closedSet.hasNode(neighbor) {
// Wall, or old node
continue
}
g_score := current.g + adir[2]
if !openSet.hasNode(neighbor) {
// Add new interesting node
neighbor.parent = current
neighbor.g = g_score
neighbor.f = neighbor.g + Heuristic(neighbor, stop)
openSet.addNode(neighbor)
pq.PushNode(neighbor)
} else if g_score < neighbor.g {
// Update, old node
pq.RemoveNode(neighbor)
neighbor.parent = current
neighbor.g = g_score
neighbor.f = neighbor.g + Heuristic(neighbor, stop)
pq.PushNode(neighbor)
}
}
}
return nil
} | astar.go | 0.572484 | 0.413004 | astar.go | starcoder |
// This file contains the definitions of the non-math elementary
// (builtin) functions.
package lisp1_5
func evalInit() {
if elementary == nil {
// Initialized here to avoid initialization loop.
elementary = funcMap{
tokAdd: (*Context).addFunc,
tokAnd: (*Context).andFunc,
tokApply: (*Context).applyFunc,
tokAtom: (*Context).atomFunc,
tokCar: (*Context).carFunc,
tokCdr: (*Context).cdrFunc,
tokCons: (*Context).consFunc,
tokDefn: (*Context).defnFunc,
tokDiv: (*Context).divFunc,
tokEq: (*Context).eqFunc,
tokGe: (*Context).geFunc,
tokGt: (*Context).gtFunc,
tokLe: (*Context).leFunc,
tokList: (*Context).listFunc,
tokLt: (*Context).ltFunc,
tokMul: (*Context).mulFunc,
tokNe: (*Context).neFunc,
tokNull: (*Context).nullFunc,
tokOr: (*Context).orFunc,
tokRem: (*Context).remFunc,
tokSub: (*Context).subFunc,
}
}
constT = atomExpr(tokT)
constF = atomExpr(tokF)
constNIL = atomExpr(tokNil)
}
func (c *Context) applyFunc(name *token, expr *Expr) *Expr {
return c.apply("applyFunc", Car(expr), Cdr(expr))
}
func (c *Context) defnFunc(name *token, expr *Expr) *Expr {
var names []*Expr
for expr = Car(expr); expr != nil; expr = Cdr(expr) {
fn := Car(expr)
if fn == nil {
errorf("empty function in defn")
}
name := Car(fn)
atom := name.getAtom()
if atom == nil {
errorf("malformed defn")
}
names = append(names, name)
c.set(atom, Car(Cdr(fn)))
}
var result *Expr
for i := len(names) - 1; i >= 0; i-- {
result = Cons(names[i], result)
}
return result
}
func (c *Context) atomFunc(name *token, expr *Expr) *Expr {
atom := Car(expr)
return truthExpr(atom != nil && atom.atom != nil)
}
func (c *Context) carFunc(name *token, expr *Expr) *Expr {
return Car(Car(expr))
}
func (c *Context) cdrFunc(name *token, expr *Expr) *Expr {
return Cdr(Car(expr))
}
func (c *Context) cadrFunc(name *token, expr *Expr) *Expr {
str := name.text
if !isCadR(str) {
return nil
}
expr = Car(expr)
for i := len(str) - 2; expr != nil && i > 0; i-- {
if str[i] == 'a' {
expr = Car(expr)
} else {
expr = Cdr(expr)
}
}
return expr
}
func (c *Context) consFunc(name *token, expr *Expr) *Expr {
return Cons(Car(expr), Car(Cdr(expr)))
}
func (c *Context) eqFunc(name *token, expr *Expr) *Expr {
a := Car(expr)
b := Car(Cdr((expr)))
return truthExpr(eq(a, b))
}
func eq(a, b *Expr) bool {
if a == nil || b == nil {
return a == nil && b == nil
}
if a.atom == nil || b.atom == nil || a.atom.typ != b.atom.typ {
return false
}
if a.atom.typ == tokenNumber {
return a.atom.num.Cmp(b.atom.num) == 0
}
return a.atom == b.atom
}
func (c *Context) listFunc(name *token, expr *Expr) *Expr {
if expr == nil {
return nil
}
return Cons(Car(expr), Cdr(expr))
}
func (c *Context) nullFunc(name *token, expr *Expr) *Expr {
return truthExpr(Car(expr) == nil)
}
func atomExpr(tok *token) *Expr {
return &Expr{
atom: tok,
}
}
// truthExpr converts the boolean argument to the constant atom T or F.
func truthExpr(t bool) *Expr {
if t {
return constT
}
return constF
}
func (e *Expr) isNumber() bool {
return e != nil && e.atom != nil && e.atom.typ == tokenNumber
} | lisp1_5/elementary.go | 0.582135 | 0.504089 | elementary.go | starcoder |
package model
import (
"image"
"image/draw"
"math/rand"
"sync"
)
// Mutex to be used to synchronize model modifications
var Mutex sync.Mutex
// The model/data of the labyrinth
var Lab [][]Block
// MovingObj is a struct describing a moving object.
type MovingObj struct {
// The position in the labyrinth in pixel coordinates
Pos struct {
X, Y float64
}
// Direction where the object is facing toward
Direction Dir
// Target position the object is moving to
TargetPos image.Point
// Images for each direction, each has zero Min point
Imgs []*image.RGBA
}
// DrawImg draws the image of the MovingObj to the LabImg.
func (m *MovingObj) DrawImg() {
m.DrawWithImg(m.Imgs[m.Direction])
}
// EraseImg erases the image of the MovingObj from the LabImg by drawing empty block to it.
func (m *MovingObj) EraseImg() {
m.DrawWithImg(EmptyImg)
}
// DrawWithImage draws the specified image at the position of the moving object onto the LabImg.
func (m *MovingObj) DrawWithImg(img image.Image) {
DrawImgAt(img, int(m.Pos.X), int(m.Pos.Y))
}
// DrawImgAt draws the specified image at the specified position which specifies the center of the area to draw.
// The size of the image draw is the block size.
func DrawImgAt(img image.Image, x, y int) {
r := image.Rect(0, 0, BlockSize, BlockSize).Add(image.Point{x - BlockSize/2, y - BlockSize/2})
draw.Draw(LabImg, r, img, image.Point{}, draw.Over)
}
// Gopher is our hero, the moving object the user can control.
var Gopher = new(MovingObj)
// Dead tells if Gopher died
var Dead bool
// Tells if we won
var Won bool
// For Gopher we maintain multiple target positions which define a path on which Gopher will move along
var TargetPoss = make([]image.Point, 0, 20)
// Slice of Bulldogs, the ancient enemy of Gophers.
var Bulldogs []*MovingObj
// Exit position
var ExitPos = image.Point{}
// Channel to signal new game
var NewGameCh = make(chan int, 1)
// Constant for the right Mouse button value in the Click struct.
// Button value for left and middle may not be the same for older browsers, but right button always has this value.
const MouseBtnRight = 2
// Click describes a mouse click.
type Click struct {
// X, Y are the mouse coordinates in pixel, in the coordinate system of the Labyrinth
X, Y int
// Btn is the mouse button
Btn int
}
// Channel to receive mouse clicks on (view package sends, ctrl package (engine) processes them)
var ClickCh = make(chan Click, 10)
// InitNew initializes a new game.
func InitNew() {
LabImg = image.NewRGBA(image.Rect(0, 0, LabWidth, LabHeight))
Bulldogs = make([]*MovingObj, int(float64(Rows*Cols)*BulldogDensity/1000))
Dead = false
Won = false
initLab()
initGopher()
initBulldogs()
initLabImg()
ExitPos.X, ExitPos.Y = (Cols-2)*BlockSize+BlockSize/2, (Rows-2)*BlockSize+BlockSize/2
}
// initLab initializes and generates a new Labyrinth.
func initLab() {
Lab = make([][]Block, Rows)
for i := range Lab {
Lab[i] = make([]Block, Cols)
}
// Zero value of the labyrinth is full of empty blocks
// generate labyrinth
genLab()
}
// initGopher initializes Gopher.
func initGopher() {
// Position Gopher to top left corner
Gopher.Pos.X = BlockSize + BlockSize/2
Gopher.Pos.Y = Gopher.Pos.X
Gopher.Direction = DirRight
Gopher.TargetPos.X, Gopher.TargetPos.Y = int(Gopher.Pos.X), int(Gopher.Pos.Y)
Gopher.Imgs = GopherImgs
// Throw away queued targets
TargetPoss = TargetPoss[0:0]
}
// initBulldogs creates and initializes the Bulldogs.
func initBulldogs() {
for i := 0; i < len(Bulldogs); i++ {
bd := new(MovingObj)
Bulldogs[i] = bd
// Place bulldog at a random position
var row, col = int(Gopher.Pos.Y) / BlockSize, int(Gopher.Pos.X) / BlockSize
// Give some space to Gopher: do not generate Bulldogs too close:
for gr, gc := row, col; (row-gr)*(row-gr) <= 16 && (col-gc)*(col-gc) <= 16; row, col = rPassPos(0, Rows), rPassPos(0, Cols) {
}
bd.Pos.X = float64(col*BlockSize + BlockSize/2)
bd.Pos.Y = float64(row*BlockSize + BlockSize/2)
bd.TargetPos.X, bd.TargetPos.Y = int(bd.Pos.X), int(bd.Pos.Y)
bd.Imgs = BulldogImgs
}
}
// initLabImg initializes and draws the image of the Labyrinth.
func initLabImg() {
// Clear the labyrinth image
draw.Draw(LabImg, LabImg.Bounds(), EmptyImg, image.Pt(0, 0), draw.Over)
// Draw walls
zeroPt := image.Point{}
for ri, row := range Lab {
for ci, block := range row {
if block == BlockWall {
x, y := ci*BlockSize, ri*BlockSize
rect := image.Rect(x, y, x+BlockSize, y+BlockSize)
draw.Draw(LabImg, rect, WallImg, zeroPt, draw.Over)
}
}
}
}
// genLab generates a random labyrinth.
func genLab() {
// Create a "frame":
for ri := range Lab {
Lab[ri][0] = BlockWall
Lab[ri][Cols-1] = BlockWall
}
for ci := range Lab[0] {
Lab[0][ci] = BlockWall
Lab[Rows-1][ci] = BlockWall
}
genLabArea(0, 0, Rows-1, Cols-1)
}
// genLabArea generates a random labyrinth inside the specified area, borders exclusive.
// This is a recursive implementation, each iteration divides the area into 2 parts.
func genLabArea(x1, y1, x2, y2 int) {
dx, dy := x2-x1, y2-y1
// Exit condition from the recursion:
if dx <= 2 || dy <= 2 {
return
}
// Decide if we do a veritcal or horizontal split
var vert bool
if dy > dx {
vert = false
} else if dx > dy {
vert = true
} else if rand.Intn(2) == 0 { // Area is square, choose randomly
vert = true
}
if vert {
// Add vertical split
var x int
if dx > 6 { // To avoid long straight paths, only use random in smaller areas
x = midWallPos(x1, x2)
} else {
x = rWallPos(x1, x2)
}
// A whole in it:
y := rPassPos(y1, y2)
for i := y1; i <= y2; i++ {
if i != y {
Lab[i][x] = BlockWall
}
}
genLabArea(x1, y1, x, y2)
genLabArea(x, y1, x2, y2)
} else {
// Add horizontal split
var y int
if dy > 6 { // To avoid long straight paths, only use random in smaller areas
y = midWallPos(y1, y2)
} else {
y = rWallPos(y1, y2)
}
// A whole in it:
x := rPassPos(x1, x2)
for i := x1; i <= x2; i++ {
if i != x {
Lab[y][i] = BlockWall
}
}
genLabArea(x1, y1, x2, y)
genLabArea(x1, y, x2, y2)
}
}
// rWallPos returns a random wall position which is an even number between the specified min and max.
func rWallPos(min, max int) int {
return min + (rand.Intn((max-min)/2-1)+1)*2
}
// midWallPos returns the wall position being at the middle of the specified min and max.
func midWallPos(min, max int) int {
n := (min + max) / 2
// make sure it's even
if n&0x01 == 1 {
n--
}
return n
}
// rPassPos returns a random passage position which is an odd number between the specified min and max.
func rPassPos(min, max int) int {
return rWallPos(min, max+2) - 1
} | model/lab.go | 0.636805 | 0.618521 | lab.go | starcoder |
package discount
// This file contains all business rules for calculating discounts.
import (
"math"
"github.com/nglogic/go-application-guide/internal/app/bikerental"
)
// newBikeWeightDiscount returns discount for individual customers based on reservation value and bike weight.
// Discount rules:
// - individual customers only
// - bike weight >= 15kg
// - maximum discount is 20% of reservation value.
func newBikeWeightDiscount(resValue int, customer bikerental.Customer, bike bikerental.Bike) bikerental.Discount {
if customer.Type != bikerental.CustomerTypeIndividual {
return bikerental.Discount{}
}
if bike.Weight < 15 {
return bikerental.Discount{}
}
discountPercent := bike.Weight - 15.0
if discountPercent > 20 {
discountPercent = 20
}
return bikerental.Discount{
Amount: int(math.Round(
(discountPercent / 100.0) * float64(resValue)),
),
}
}
// newTemperatureDiscount creates discount based on weather.
// Discount rules:
// - individual customers only
// - low outside temperature.
func newTemperatureDiscount(resValue int, customer bikerental.Customer, weather *bikerental.Weather) bikerental.Discount {
if customer.Type != bikerental.CustomerTypeIndividual {
return bikerental.Discount{}
}
if weather == nil || weather.Temperature >= 10 {
return bikerental.Discount{}
}
return bikerental.Discount{
Amount: int(math.Round(float64(resValue) * 0.05)),
}
}
// newIncidentsDiscount creates discount based on incidents in the neighborhood.
// Discount rules:
// - individual customers only
// - incidents in neighborhood present.
func newIncidentsDiscount(resValue int, customer bikerental.Customer, incidents *bikerental.BikeIncidentsInfo) bikerental.Discount {
if customer.Type != bikerental.CustomerTypeIndividual {
return bikerental.Discount{}
}
if incidents == nil || incidents.NumberOfIncidents < 3 {
return bikerental.Discount{}
}
discountPercent := 0.0
if incidents.NumberOfIncidents >= 5 {
discountPercent += 10.0
} else {
discountPercent += 5.0
}
return bikerental.Discount{
Amount: int(math.Round(
float64(resValue) * (discountPercent / 100.0),
)),
}
}
// selectOptimalDiscount chooses one discount that should be applied.
// Rules:
// - select discount with greatest value.
func selectOptimalDiscount(discounts ...bikerental.Discount) bikerental.Discount {
maxAmount := -math.MaxInt64
var result bikerental.Discount
for _, d := range discounts {
if d.Amount > maxAmount {
result = d
maxAmount = d.Amount
}
}
return result
} | internal/app/bikerental/discount/rulesindividual.go | 0.798187 | 0.468122 | rulesindividual.go | starcoder |
package coerce
import (
"encoding/json"
"fmt"
"strings"
"github.com/project-flogo/core/data"
)
func init() {
data.SetAttributeTypeConverter(ToType)
}
func NewTypedValue(dataType data.Type, value interface{}) (data.TypedValue, error) {
newVal, err := ToType(value, dataType)
if err != nil {
return nil, err
}
return data.NewTypedValue(dataType, newVal), nil
}
// ToType coerce a value to the specified type
func ToType(value interface{}, dataType data.Type) (interface{}, error) {
var coerced interface{}
var err error
switch dataType {
case data.TypeAny:
coerced, err = ToAny(value)
case data.TypeString:
coerced, err = ToString(value)
case data.TypeInt:
coerced, err = ToInt(value)
case data.TypeInt32:
coerced, err = ToInt32(value)
case data.TypeInt64:
coerced, err = ToInt64(value)
case data.TypeFloat32:
coerced, err = ToFloat32(value)
case data.TypeFloat64:
coerced, err = ToFloat64(value)
case data.TypeBool:
coerced, err = ToBool(value)
case data.TypeBytes:
coerced, err = ToBytes(value)
case data.TypeParams:
coerced, err = ToParams(value)
case data.TypeObject:
coerced, err = ToObject(value)
case data.TypeArray:
coerced, err = ToArrayIfNecessary(value)
case data.TypeComplexObject:
coerced, err = CoerceToComplexObject(value)
case data.TypeUnknown:
coerced = value
}
if err != nil {
return nil, err
}
return coerced, nil
}
// ToAny coerce a value to generic value
func ToAny(val interface{}) (interface{}, error) {
switch t := val.(type) {
case json.Number:
if strings.Contains(t.String(), ".") {
return t.Float64()
} else {
return t.Int64()
}
default:
return val, nil
}
}
//DEPRECATED
// CoerceToObject coerce a value to an complex object
func CoerceToComplexObject(val interface{}) (*data.ComplexObject, error) {
//If the val is nil then just return empty struct
var emptyComplexObject = &data.ComplexObject{Value: "{}"}
if val == nil {
return emptyComplexObject, nil
}
switch t := val.(type) {
case string:
if val == "" {
return emptyComplexObject, nil
} else {
complexObject := &data.ComplexObject{}
err := json.Unmarshal([]byte(t), complexObject)
if err != nil {
return nil, err
}
return handleComplex(complexObject), nil
}
case map[string]interface{}:
v, err := json.Marshal(val)
if err != nil {
return nil, err
}
complexObject := &data.ComplexObject{}
err = json.Unmarshal(v, complexObject)
if err != nil {
return nil, err
}
return handleComplex(complexObject), nil
case *data.ComplexObject:
return handleComplex(val.(*data.ComplexObject)), nil
default:
return nil, fmt.Errorf("unable to coerce %#v to complex object", val)
}
}
func handleComplex(complex *data.ComplexObject) *data.ComplexObject {
if complex != nil {
if complex.Value == "" {
complex.Value = "{}"
}
}
return complex
} | data/coerce/coercion.go | 0.613931 | 0.492981 | coercion.go | starcoder |
package json
import (
"context"
"encoding/json"
"fmt"
"io"
"github.com/pbanos/botanic/feature"
"github.com/pbanos/botanic/tree"
)
/*
WriteJSONTree takes a context.Context, a pointer to a tree.Tree and an
io.Writer and serializes the given tree as JSON onto the io.Writer.
A tree is serialized as a JSON object with the following fields:
* "rootID": a string with the ID of the node at the root of the tree
* "classFeature": a string with the name of the feature the tree predicts
* "nodes": an array containing the nodes that can be traversed on the tree
serialized by MarshalJSONNode.
An error is returned if the tree cannot be traversed, serialized or written
onto the io.Writer.
*/
func WriteJSONTree(ctx context.Context, t *tree.Tree, w io.Writer) error {
err := marshalJSONTreeHeader(ctx, t, w)
if err != nil {
return err
}
var i int
err = t.Traverse(ctx, false, func(ctx context.Context, n *tree.Node) error {
err := writeNode(ctx, i, n, w)
i++
return err
})
if err != nil {
return err
}
return marshalJSONTreeFooter(ctx, t, w)
}
/*
ReadJSONTree takes a context.Context, a pointer to a tree.Tree and an
io.Reader and unmarshals the contents of the io.Reader onto the given
tree.
A tree is expected to be a JSON object with the following fields:
* "rootID": a string with the ID of the node at the root of the tree
* "classFeature": a string with the name of the feature the tree predicts
* "nodes": an array containing the nodes that can be traversed on the tree
unmarshalled by UnmarshalJSONNodeWithFeatures.
An error is returned if the JSON cannot be read from the io.Reader or
unmarshalled onto the tree.
*/
func ReadJSONTree(ctx context.Context, t *tree.Tree, features []feature.Feature, r io.Reader) error {
dec := json.NewDecoder(r)
jt := &struct {
RootID string `json:"rootID"`
ClassFeature string `json:"classFeature"`
Nodes []*json.RawMessage `json:"nodes"`
}{}
err := dec.Decode(jt)
if err != nil {
return err
}
var cf feature.Feature
for _, f := range features {
if f.Name() == jt.ClassFeature {
cf = f
break
}
}
if cf == nil {
return fmt.Errorf("no class feature defined")
}
if jt.RootID == "" {
return fmt.Errorf("no root node id available")
}
t.ClassFeature = cf
t.RootID = jt.RootID
for _, jn := range jt.Nodes {
n := &tree.Node{}
err = UnmarshalJSONNodeWithFeatures(n, *jn, features)
if err != nil {
return err
}
err = t.NodeStore.Store(ctx, n)
if err != nil {
return err
}
}
return nil
}
func marshalJSONTreeHeader(ctx context.Context, t *tree.Tree, w io.Writer) error {
jrootID, err := json.Marshal(t.RootID)
if err != nil {
return err
}
jFeatureName, err := json.Marshal(t.ClassFeature.Name())
if err != nil {
return err
}
header := fmt.Sprintf(`{"rootID":%s,"classFeature":%s,"nodes":[`, jrootID, jFeatureName)
_, err = w.Write([]byte(header))
return err
}
func writeNode(ctx context.Context, i int, n *tree.Node, w io.Writer) error {
if i != 0 {
_, err := w.Write([]byte(","))
if err != nil {
return err
}
}
jn, err := MarshalJSONNode(n)
if err != nil {
return err
}
_, err = w.Write(jn)
return err
}
func marshalJSONTreeFooter(ctx context.Context, t *tree.Tree, w io.Writer) error {
_, err := w.Write([]byte(`]}`))
return err
} | tree/json/tree.go | 0.542136 | 0.505615 | tree.go | starcoder |
package financial
import (
"fmt"
)
const (
SideSell = "SELL"
SideBuy = "BUY"
)
// Budget is the interface that manages the budget granted to a strategy.
// A strategy is granted an initial amount of a base coin and alt coin, with that
// the strategy starts working and in in the event of positive trades, these amounts should
// increase over time. If the strategy is plain trash and looses money, at some point the
// budget is exhausted and the strategy stops
type Budget interface {
// Close prepares the position based on what the BaseBudget determines so it is executed
// (closed) further down the line
Close(o Position, c Candle) error
// Open prepares the position based on what the BaseBudget determines so it is executed
// (opened) further down the line
Open(o Position, c Candle) (float64, error)
}
// Position is a simplified trading position. For the budget purpose, only status and
// side are relevant
type Position struct {
Side string
Status string
}
// BaseBudget defines the data type that manages how much is expensed
// when opening or closing a position. It prepares the position
// so it can be further processed by the client
type BaseBudget struct {
// Base is the base coin we are trading in, ETH, BTC
Base float64
// Alt is the alt coin we are trading in, USDT, BUST
Alt float64
// Lot is amount of Base that is used in every order
Lot float64
}
// FixBudget will always work on a fix amount. Lets say we set .CalculateLot = 0.2,
// every time we Open a buy position we buy 0.2 Base Coin at Close price, and every time
// we sell, we sell 0.2 at Close price
type FixBudget struct {
BaseBudget
}
// RatioBudget strategy operates on a percentage of the current Base coin. For instance, if
// we start trading with .CalculateLot = 0.6 and .Base = 2, starting a sell order would sell 1.2 and starting a buy
// order would buy 1.2. In the other hand, closing the order will attempt to sell or buy what was targeted in the order
type RatioBudget struct {
BaseBudget
}
// empty: when base coin or tether are close to 0
func (b BaseBudget) empty() bool {
return b.Base <= 0.001 || b.Alt <= 1
}
// Open will setup the position and adjust Budget and order details. It returns
// the quantity the position should use to open an order
func (b *FixBudget) Open(p Position, c Candle) (float64, error) {
// When opening a sell order, we deduct .CalculateLot from our Base Coin Budgeter
if p.Side == SideSell {
b.Base -= b.Lot
b.Alt += b.Lot * c.ClosePrice
}
if p.Side == SideBuy {
b.Base += b.Lot
b.Alt -= b.Lot * c.ClosePrice
}
if b.empty() {
return 0.0, fmt.Errorf("not enough in fund to buy. In fund: B[%v] T[%v]", b.Base, b.Alt)
}
return b.Lot, nil
}
// Close will close an order and adjust Budgeter and order details
func (b *FixBudget) Close(p Position, c Candle) error {
if p.Status == "closed" {
return fmt.Errorf("can't close an order that is already closed")
}
if p.Side == SideSell {
b.Base += b.Lot
b.Alt -= b.Lot * c.ClosePrice
}
if p.Side == SideBuy {
b.Base -= b.Lot
b.Alt += b.Lot * c.ClosePrice
}
if b.empty() {
return fmt.Errorf("not enough in fund to buy. In fund: B[%v] T[%v]", b.Base, b.Alt)
}
return nil
}
func (b *FixBudget) String() string {
return fmt.Sprintf("[lot: %f, base: %f, alt: %f]", b.Lot, b.Base, b.Alt)
}
func (b *RatioBudget) Open(p *Position, c Candle) error {
openTether := c.ClosePrice * (b.Lot * b.Base)
openBase := b.Lot * b.Base
if p.Side == SideSell {
b.Base -= openBase
b.Alt += openTether
}
if p.Side == SideBuy {
b.Base += openBase
b.Alt -= openTether
}
if b.empty() {
return fmt.Errorf("not enough in fund to buy. In fund: B[%v] T[%v]", b.Base, b.Alt)
}
return nil
} | business/strategies/financial/budget.go | 0.617743 | 0.491517 | budget.go | starcoder |
package main
import (
"fmt"
"log"
"math"
"strconv"
"strings"
)
// Heading tracks the cardinal direction the avatar is heading.
type Heading uint8
// NORTH, etc, (Compass Directions)
const (
NORTH Heading = iota
EAST
WEST
SOUTH
)
// Rotation identifies the direction (left or right) of a navigational step
type Rotation uint8
// LEFT, etc (Which direction we turn)
const (
LEFT Rotation = iota
RIGHT
)
// Position is the x, y location on the street grid the avatar is located at.
type Position struct {
x, y int
}
// parseInstruction takes a single token ('L20' or 'R2')
// and returns a Rotation type and a distance to travel after rotating that direction
func parseInstruction(ins string) (Rotation, int) {
var rot Rotation
if string(ins[0]) == "R" {
rot = RIGHT
} else if string(ins[0]) == "L" {
rot = LEFT
} else {
log.Fatal("Unable to parse instruction")
}
length, err := strconv.Atoi(ins[1:])
if err != nil {
log.Fatal(err)
}
return rot, length
}
// Heading.update takes a heading struct and a rotation, and sets a new heading
// based on it.
func (head *Heading) update(rot Rotation) {
switch rot {
case LEFT:
switch *head {
case NORTH:
*head = WEST
case WEST:
*head = SOUTH
case SOUTH:
*head = EAST
case EAST:
*head = NORTH
}
case RIGHT:
switch *head {
case NORTH:
*head = EAST
case WEST:
*head = NORTH
case SOUTH:
*head = WEST
case EAST:
*head = SOUTH
}
}
}
// Position.update takes a heading, a distance to travel, and a map of previously visited points.
// It updates the position based on heading and distance, and adds breadcrumbs at every
// point of travel in the `visited` map.
// If our journey ever overlaps a spot we have previously visited, stop the presses!
// Return `true` to indicate that the destination has been reached.
func (pos *Position) update(head Heading, distance int, visited map[Position]bool) bool {
for i := 1; i <= distance; i++ {
switch head {
case NORTH:
pos.y++
case SOUTH:
pos.y--
case EAST:
pos.x++
case WEST:
pos.x--
}
if visited[*pos] {
return true
}
visited[*pos] = true
}
return false
}
// Position.distance returns manhattan distance of a Position object from the origin.
func (pos Position) distance() int {
return int(math.Abs(float64(pos.x)) + math.Abs(float64(pos.y)))
}
// findDistance takes a path string and returns an integer value.
// It implements the core algorithm to solve the first day of the Advent Of Code 2016
func findDistance(path string) int {
var position Position
var heading Heading
var visited = make(map[Position]bool)
visited[position] = true
result := strings.Split(path, ", ")
for i := range result {
rotation, distance := parseInstruction(result[i])
heading.update(rotation)
atdestination := position.update(heading, distance, visited)
if atdestination {
return position.distance()
}
}
return position.distance()
}
// main() includes pre-baked versions of the strings that were given as examples, as well as the real string.
func main() {
paths := [...]string{"R8, R4, R4, R8", "R2, L3", "R2, R2, R2", "R5, L5, R5, R3", "L5, R1, R4, L5, L4, R3, R1, L1, R4, R5, L1, L3, R4, L2, L4, R2, L4, L1, R3, R1, R1, L1, R1, L5, R5, R2, L5, R2, R1, L2, L4, L4, R191, R2, R5, R1, L1, L2, R5, L2, L3, R4, L1, L1, R1, R50, L1, R1, R76, R5, R4, R2, L5, L3, L5, R2, R1, L1, R2, L3, R4, R2, L1, L1, R4, L1, L1, R185, R1, L5, L4, L5, L3, R2, R3, R1, L5, R1, L3, L2, L2, R5, L1, L1, L3, R1, R4, L2, L1, L1, L3, L4, R5, L2, R3, R5, R1, L4, R5, L3, R3, R3, R1, R1, R5, R2, L2, R5, L5, L4, R4, R3, R5, R1, L3, R1, L2, L2, R3, R4, L1, R4, L1, R4, R3, L1, L4, L1, L5, L2, R2, L1, R1, L5, L3, R4, L1, R5, L5, L5, L1, L3, R1, R5, L2, L4, L5, L1, L1, L2, R5, R5, L4, R3, L2, L1, L3, L4, L5, L5, L2, R4, R3, L5, R4, R2, R1, L5"}
for i := range paths {
fmt.Printf("Path: '%v'\nDistance: %v\n\n", paths[i], findDistance(paths[i]))
}
} | day01/main.go | 0.678007 | 0.572842 | main.go | starcoder |
package flag
import (
"errors"
"flag"
"io"
"os"
"path/filepath"
"time"
)
type FlagSetEx struct {
*flag.FlagSet
envPrefix string
}
var DefaultConfigFlagName = "config"
var ex = &FlagSetEx{
flag.NewFlagSet(os.Args[0], flag.ContinueOnError),
"",
}
// Bool defines a bool flag with specified name, default value, and usage string.
// The return value is the address of a bool variable that stores the value of the flag.
func Bool(name string, value bool) *bool {
return ex.FlagSet.Bool(name, value, "")
}
// BoolVar defines a bool flag with specified name, default value, and usage string.
// The argument p points to a bool variable in which to store the value of the flag.
func BoolVar(p *bool, name string, value bool, usage string) {
ex.FlagSet.BoolVar(p, name, value, usage)
}
// Int defines an int flag with specified name, default value, and usage string.
// The return value is the address of an int variable that stores the value of the flag.
func Int(name string, value int) *int {
return ex.FlagSet.Int(name, value, "")
}
// IntVar defines an int flag with specified name, default value, and usage string.
// The argument p points to an int variable in which to store the value of the flag.
func IntVar(p *int, name string, value int, usage string) {
ex.FlagSet.IntVar(p, name, value, usage)
}
// Int64 defines an int64 flag with specified name, default value, and usage string.
// The return value is the address of an int64 variable that stores the value of the flag.
func Int64(name string, value int64) *int64 {
return ex.FlagSet.Int64(name, value, "")
}
// Int64Var defines an int64 flag with specified name, default value, and usage string.
// The argument p points to an int64 variable in which to store the value of the flag.
func Int64Var(p *int64, name string, value int64, usage string) {
ex.FlagSet.Int64Var(p, name, value, usage)
}
// Uint defines a uint flag with specified name, default value, and usage string.
// The return value is the address of a uint variable that stores the value of the flag.
func Uint(name string, value uint) *uint {
return ex.FlagSet.Uint(name, value, "")
}
// UintVar defines a uint flag with specified name, default value, and usage string.
// The argument p points to a uint variable in which to store the value of the flag.
func UintVar(p *uint, name string, value uint, usage string) {
ex.FlagSet.UintVar(p, name, value, usage)
}
// Uint64 defines a uint64 flag with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the flag.
func Uint64(name string, value uint64, usage string) *uint64 {
return ex.FlagSet.Uint64(name, value, usage)
}
// Uint64Var defines a uint64 flag with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the flag.
func Uint64Var(p *uint64, name string, value uint64, usage string) {
ex.FlagSet.Uint64Var(p, name, value, usage)
}
// String defines a string flag with specified name, default value, and usage string.
// The return value is the address of a string variable that stores the value of the flag.
func String(name string, value string, usage string) *string {
return ex.FlagSet.String(name, value, "")
}
// StringVar defines a string flag with specified name, default value, and usage string.
// The argument p points to a string variable in which to store the value of the flag.
func StringVar(p *string, name string, value string, usage string) {
ex.FlagSet.StringVar(p, name, value, usage)
}
// Float64 defines a float64 config variable with a given name and default value.
func Float64(name string, value float64, usage string) *float64 {
return ex.FlagSet.Float64(name, value, usage)
}
// Float64Var defines a float64 flag with specified name, default value, and usage string.
func Float64Var(p *float64, name string, value float64, usage string) {
ex.FlagSet.Float64Var(p, name, value, usage)
}
// Duration defines a time.Duration config variable with a given name and default value.
func Duration(name string, value time.Duration, usage string) *time.Duration {
return ex.FlagSet.Duration(name, value, usage)
}
// DurationVar defines a time.Duration flag with specified name, default value, and usage string.
func DurationVar(p *time.Duration, name string, value time.Duration, usage string) {
ex.FlagSet.DurationVar(p, name, value, usage)
}
// Var defines a flag with the specified name and usage string. The type and
// value of the flag are represented by the first argument, of type Value, which
// typically holds a user-defined implementation of Value. For instance, the
// caller could create a flag that turns a comma-separated string into a slice
// of strings by giving the slice the methods of Value; in particular, Set would
// decompose the comma-separated string into the slice.
func Var(p flag.Value, name string, usage string) {
ex.FlagSet.Var(p, name, usage)
}
// Parse parses the command-line, environment variables and config file flags
// into the global ConfigSet.
// This must be called after all config flags have been defined but before the
// flags are accessed by the program.
func Parse() error {
err := ex.FlagSet.Parse(os.Args[1:])
if err != nil {
return err
}
err = ex.ParseEnv(os.Environ())
if err != nil {
return err
}
cf := ex.FlagSet.Lookup(DefaultConfigFlagName)
if cf == nil {
return nil
}
path := cf.Value.String()
if len(path) > 0 {
info, err := os.Stat(path)
if err != nil || info.IsDir() {
return errors.New("Invalid config file.")
}
ext := filepath.Ext(path)
switch ext {
case ".toml":
return ex.ParseTOML(path)
default:
return errors.New("Unsupported config file.")
}
}
return nil
}
func SetOutput(output io.Writer) {
ex.FlagSet.SetOutput(output)
}
func Init(name string, errorHandling flag.ErrorHandling) {
ex.FlagSet.Init(name, errorHandling)
}
func Set(name, value string) error {
return ex.FlagSet.Set(name, value)
} | flag.go | 0.641647 | 0.438485 | flag.go | starcoder |
package moves
import (
"errors"
)
//ValidCounter is the signature of objects in the moves/count package. It is
//expected within groups in the move/groups package for items like
//ParallelCount. currentCount is the value of the counter in question, and
//length is the context-specific length of the important item, often the
//number of children in the parrent group. If ValidCounter returns nil, the
//count is considered valid and complete; if it is not valid it should return
//a descriptive error. Typically these functions are closures that close over
//configuration options.
type ValidCounter func(currentCount, length int) error
//anyFunc will be returned for Any. Since there are no values to close over,
//we can return the same item each time.
func anyFunc(currentCount, length int) error {
switch currentCount {
case 0:
return errors.New("Not enough count have occurred")
case 1:
return nil
default:
return errors.New("too many count have occurred")
}
}
//allFunc will be returned from All. Since there are no values to close over,
//we can return the same item each time and avoid memory allocation.
func allFunc(currentCount, length int) error {
if currentCount < length {
return errors.New("Not enough count have occurred")
} else if currentCount == length {
return nil
}
return errors.New("too many count have occurred")
}
//CountAny will return nil if currentCount is 1, denoting that any item has matched.
//Equivalent to CountBetween(0,1).
func CountAny() ValidCounter {
return anyFunc
}
//CountAll will return nil if currentCount is precisely the same length as
//length. Equivalent to CountBetween(0,-1). Not to be confused with
//CountInfinite; CountAll expects to see precisely all children matched.
func CountAll() ValidCounter {
return allFunc
}
func infiniteFunc(currentCount, length int) error {
return nil
}
//CountInfinite always returns nil; that is, any count is legal. Not to be
//confused with CountAny, which expects any single item to match, and CountAll
//which expects all children to match.
func CountInfinite() ValidCounter {
return infiniteFunc
}
//CountAtLeast will return nil if currentCount is min or greater.
func CountAtLeast(min int) ValidCounter {
return func(currentCount, length int) error {
if currentCount >= min {
return nil
}
return errors.New("currentCount not yet greater than min configuration")
}
}
//CountAtMost will return nil as long as currentCount is less than or equal to max. A
//max argument of less than 0 will be interpreted to mean precisely the length
//parameter passed into ValidCounter.
func CountAtMost(max int) ValidCounter {
return func(currentCount, length int) error {
if max < 0 {
max = length
}
if currentCount <= max {
return nil
}
return errors.New("currentCount is greater than max configuration")
}
}
//CountBetween returns nil as long as the value is greater than or equal to min and
//less than or equal to max. A max argument of less than 0 will be interpreted
//to mean precise the length parameter passed into ValidCounter.
func CountBetween(min, max int) ValidCounter {
return func(currentCount, length int) error {
if max < 0 {
max = length
}
if currentCount < min {
return errors.New("Count below min")
}
if currentCount > max {
return errors.New("Count above max")
}
return nil
}
}
//CountExactly returns nil if currentCount is precisely equaly to targetCount.
//Equivalent to CountBetween(targetCount,targetCount).
func CountExactly(targetCount int) ValidCounter {
return func(currentCount, length int) error {
if targetCount == currentCount {
return nil
}
if targetCount > currentCount {
return errors.New("currentCount is not yet targetCount")
}
return errors.New("currentCount has already passed targetCount")
}
} | moves/count.go | 0.822153 | 0.515315 | count.go | starcoder |
package main
import "fmt"
type area [][]*tile
func newArea(maxX, maxY int) area {
area := make([][]*tile, maxY)
for y := range area {
area[y] = make([]*tile, maxX)
for x := range area[y] {
area[y][x] = newTile(x, y, area)
}
}
return area
}
func (a area) isOut(x, y int) bool {
return x < 0 || y < 0 || y >= len(a) || x >= len(a[0])
}
func (a area) get(x, y int) *tile {
if a.isOut(x, y) {
return nil
}
return a[y][x]
}
func (a area) parseRunes(x, y int, r rune) {
tile := a.get(x, y)
switch r {
case '<':
tile.visual = '-'
tile.train = newTrain(west, tile)
case '>':
tile.visual = '-'
tile.train = newTrain(east, tile)
case '^':
tile.visual = '|'
tile.train = newTrain(north, tile)
case 'v':
tile.visual = '|'
tile.train = newTrain(south, tile)
default:
tile.visual = r
}
}
func (a area) connectNeighbours() {
for _, row := range a {
for _, tile := range row {
switch tile.visual {
case '-':
tile.allowNeighbours(east, west)
case '|':
tile.allowNeighbours(north, south)
case '+':
tile.allowNeighbours(east, west, north, south)
case '/':
n, w, s, e := tile.getNeighbours()
if n != nil && w != nil && (n.visual == '|' || n.visual == '+') && (w.visual == '-' || w.visual == '+') {
tile.allowNeighbours(north, west)
} else if s != nil && e != nil && (s.visual == '|' || s.visual == '+') && (e.visual == '-' || e.visual == '+') {
tile.allowNeighbours(south, east)
} else {
panic(fmt.Sprintf("imposible curve track '/' on (%d,%d)", tile.x, tile.y))
}
case '\\':
n, w, s, e := tile.getNeighbours()
if n != nil && e != nil && (n.visual == '|' || n.visual == '+') && (e.visual == '-' || e.visual == '+') {
tile.allowNeighbours(north, east)
} else if s != nil && w != nil && (s.visual == '|' || s.visual == '+') && (w.visual == '-' || w.visual == '+') {
tile.allowNeighbours(south, west)
} else {
panic(fmt.Sprintf("imposible curve track '\\' on (%d,%d)", tile.x, tile.y))
}
}
}
}
}
func (a area) moveTrains() {
trains := a.getTrains()
for _, train := range trains {
if !train.isCrashed {
train.move()
}
}
}
func (a area) getTrains() []*train {
trains := []*train{}
for _, row := range a {
for _, tile := range row {
if tile.train != nil && !tile.train.isCrashed {
trains = append(trains, tile.train)
}
}
}
return trains
}
func (a area) print() {
for _, row := range a {
for _, tile := range row {
if tile.train == nil {
fmt.Print(string(tile.visual))
} else {
fmt.Print(tile.train)
}
}
fmt.Println()
}
} | day13b/area.go | 0.555918 | 0.40486 | area.go | starcoder |
package insts
// Reg is the representation of a register
type Reg struct {
RegType RegType
Name string
ByteSize int
IsBool bool
}
// VReg returns a vector register object given a certain index
func VReg(index int) *Reg {
return Regs[V0+RegType(index)]
}
// SReg returns a scalar register object given a certain index
func SReg(index int) *Reg {
return Regs[S0+RegType(index)]
}
// IsVReg checks if a register is a vector register
func (r *Reg) IsVReg() bool {
return r.RegType >= V0 && r.RegType <= V255
}
// IsSReg checks if a register is a scalar register
func (r *Reg) IsSReg() bool {
return r.RegType >= S0 && r.RegType <= S101
}
// RegIndex returns the index of the index in the s-series or the v-series.
// If the register is not s or v register, -1 is returned.
func (r *Reg) RegIndex() int {
if r.IsSReg() {
return int(r.RegType - S0)
} else if r.IsVReg() {
return int(r.RegType - V0)
}
return -1
}
// RegType is the register type
type RegType int
// All the registers
const (
InvalidRegType = iota
PC
V0
V1
V2
V3
V4
V5
V6
V7
V8
V9
V10
V11
V12
V13
V14
V15
V16
V17
V18
V19
V20
V21
V22
V23
V24
V25
V26
V27
V28
V29
V30
V31
V32
V33
V34
V35
V36
V37
V38
V39
V40
V41
V42
V43
V44
V45
V46
V47
V48
V49
V50
V51
V52
V53
V54
V55
V56
V57
V58
V59
V60
V61
V62
V63
V64
V65
V66
V67
V68
V69
V70
V71
V72
V73
V74
V75
V76
V77
V78
V79
V80
V81
V82
V83
V84
V85
V86
V87
V88
V89
V90
V91
V92
V93
V94
V95
V96
V97
V98
V99
V100
V101
V102
V103
V104
V105
V106
V107
V108
V109
V110
V111
V112
V113
V114
V115
V116
V117
V118
V119
V120
V121
V122
V123
V124
V125
V126
V127
V128
V129
V130
V131
V132
V133
V134
V135
V136
V137
V138
V139
V140
V141
V142
V143
V144
V145
V146
V147
V148
V149
V150
V151
V152
V153
V154
V155
V156
V157
V158
V159
V160
V161
V162
V163
V164
V165
V166
V167
V168
V169
V170
V171
V172
V173
V174
V175
V176
V177
V178
V179
V180
V181
V182
V183
V184
V185
V186
V187
V188
V189
V190
V191
V192
V193
V194
V195
V196
V197
V198
V199
V200
V201
V202
V203
V204
V205
V206
V207
V208
V209
V210
V211
V212
V213
V214
V215
V216
V217
V218
V219
V220
V221
V222
V223
V224
V225
V226
V227
V228
V229
V230
V231
V232
V233
V234
V235
V236
V237
V238
V239
V240
V241
V242
V243
V244
V245
V246
V247
V248
V249
V250
V251
V252
V253
V254
V255
S0
S1
S2
S3
S4
S5
S6
S7
S8
S9
S10
S11
S12
S13
S14
S15
S16
S17
S18
S19
S20
S21
S22
S23
S24
S25
S26
S27
S28
S29
S30
S31
S32
S33
S34
S35
S36
S37
S38
S39
S40
S41
S42
S43
S44
S45
S46
S47
S48
S49
S50
S51
S52
S53
S54
S55
S56
S57
S58
S59
S60
S61
S62
S63
S64
S65
S66
S67
S68
S69
S70
S71
S72
S73
S74
S75
S76
S77
S78
S79
S80
S81
S82
S83
S84
S85
S86
S87
S88
S89
S90
S91
S92
S93
S94
S95
S96
S97
S98
S99
S100
S101
EXEC
EXECLO
EXECHI
EXECZ
VCC
VCCLO
VCCHI
VCCZ
SCC
FlatSratch
FlatSratchLo
FlatSratchHi
XnackMask
XnackMaskLo
XnackMaskHi
Status
Mode
M0
Trapsts
Tba
TbaLo
TbaHi
Tma
TmaLo
TmaHi
Timp0
Timp1
Timp2
Timp3
Timp4
Timp5
Timp6
Timp7
Timp8
Timp9
Timp10
Timp11
VMCNT
EXPCNT
LGKMCNT
)
// Regs are a list of all registers
var Regs = map[RegType]*Reg{
InvalidRegType: {InvalidRegType, "invalidregtype", 0, false},
PC: {PC, "pc", 8, false},
V0: {V0, "v0", 4, false},
V1: {V1, "v1", 4, false},
V2: {V2, "v2", 4, false},
V3: {V3, "v3", 4, false},
V4: {V4, "v4", 4, false},
V5: {V5, "v5", 4, false},
V6: {V6, "v6", 4, false},
V7: {V7, "v7", 4, false},
V8: {V8, "v8", 4, false},
V9: {V9, "v9", 4, false},
V10: {V10, "v10", 4, false},
V11: {V11, "v11", 4, false},
V12: {V12, "v12", 4, false},
V13: {V13, "v13", 4, false},
V14: {V14, "v14", 4, false},
V15: {V15, "v15", 4, false},
V16: {V16, "v16", 4, false},
V17: {V17, "v17", 4, false},
V18: {V18, "v18", 4, false},
V19: {V19, "v19", 4, false},
V20: {V20, "v20", 4, false},
V21: {V21, "v21", 4, false},
V22: {V22, "v22", 4, false},
V23: {V23, "v23", 4, false},
V24: {V24, "v24", 4, false},
V25: {V25, "v25", 4, false},
V26: {V26, "v26", 4, false},
V27: {V27, "v27", 4, false},
V28: {V28, "v28", 4, false},
V29: {V29, "v29", 4, false},
V30: {V30, "v30", 4, false},
V31: {V31, "v31", 4, false},
V32: {V32, "v32", 4, false},
V33: {V33, "v33", 4, false},
V34: {V34, "v34", 4, false},
V35: {V35, "v35", 4, false},
V36: {V36, "v36", 4, false},
V37: {V37, "v37", 4, false},
V38: {V38, "v38", 4, false},
V39: {V39, "v39", 4, false},
V40: {V40, "v40", 4, false},
V41: {V41, "v41", 4, false},
V42: {V42, "v42", 4, false},
V43: {V43, "v43", 4, false},
V44: {V44, "v44", 4, false},
V45: {V45, "v45", 4, false},
V46: {V46, "v46", 4, false},
V47: {V47, "v47", 4, false},
V48: {V48, "v48", 4, false},
V49: {V49, "v49", 4, false},
V50: {V50, "v50", 4, false},
V51: {V51, "v51", 4, false},
V52: {V52, "v52", 4, false},
V53: {V53, "v53", 4, false},
V54: {V54, "v54", 4, false},
V55: {V55, "v55", 4, false},
V56: {V56, "v56", 4, false},
V57: {V57, "v57", 4, false},
V58: {V58, "v58", 4, false},
V59: {V59, "v59", 4, false},
V60: {V60, "v60", 4, false},
V61: {V61, "v61", 4, false},
V62: {V62, "v62", 4, false},
V63: {V63, "v63", 4, false},
V64: {V64, "v64", 4, false},
V65: {V65, "v65", 4, false},
V66: {V66, "v66", 4, false},
V67: {V67, "v67", 4, false},
V68: {V68, "v68", 4, false},
V69: {V69, "v69", 4, false},
V70: {V70, "v70", 4, false},
V71: {V71, "v71", 4, false},
V72: {V72, "v72", 4, false},
V73: {V73, "v73", 4, false},
V74: {V74, "v74", 4, false},
V75: {V75, "v75", 4, false},
V76: {V76, "v76", 4, false},
V77: {V77, "v77", 4, false},
V78: {V78, "v78", 4, false},
V79: {V79, "v79", 4, false},
V80: {V80, "v80", 4, false},
V81: {V81, "v81", 4, false},
V82: {V82, "v82", 4, false},
V83: {V83, "v83", 4, false},
V84: {V84, "v84", 4, false},
V85: {V85, "v85", 4, false},
V86: {V86, "v86", 4, false},
V87: {V87, "v87", 4, false},
V88: {V88, "v88", 4, false},
V89: {V89, "v89", 4, false},
V90: {V90, "v90", 4, false},
V91: {V91, "v91", 4, false},
V92: {V92, "v92", 4, false},
V93: {V93, "v93", 4, false},
V94: {V94, "v94", 4, false},
V95: {V95, "v95", 4, false},
V96: {V96, "v96", 4, false},
V97: {V97, "v97", 4, false},
V98: {V98, "v98", 4, false},
V99: {V99, "v99", 4, false},
V100: {V100, "v100", 4, false},
V101: {V101, "v101", 4, false},
V102: {V102, "v102", 4, false},
V103: {V103, "v103", 4, false},
V104: {V104, "v104", 4, false},
V105: {V105, "v105", 4, false},
V106: {V106, "v106", 4, false},
V107: {V107, "v107", 4, false},
V108: {V108, "v108", 4, false},
V109: {V109, "v109", 4, false},
V110: {V110, "v110", 4, false},
V111: {V111, "v111", 4, false},
V112: {V112, "v112", 4, false},
V113: {V113, "v113", 4, false},
V114: {V114, "v114", 4, false},
V115: {V115, "v115", 4, false},
V116: {V116, "v116", 4, false},
V117: {V117, "v117", 4, false},
V118: {V118, "v118", 4, false},
V119: {V119, "v119", 4, false},
V120: {V120, "v120", 4, false},
V121: {V121, "v121", 4, false},
V122: {V122, "v122", 4, false},
V123: {V123, "v123", 4, false},
V124: {V124, "v124", 4, false},
V125: {V125, "v125", 4, false},
V126: {V126, "v126", 4, false},
V127: {V127, "v127", 4, false},
V128: {V128, "v128", 4, false},
V129: {V129, "v129", 4, false},
V130: {V130, "v130", 4, false},
V131: {V131, "v131", 4, false},
V132: {V132, "v132", 4, false},
V133: {V133, "v133", 4, false},
V134: {V134, "v134", 4, false},
V135: {V135, "v135", 4, false},
V136: {V136, "v136", 4, false},
V137: {V137, "v137", 4, false},
V138: {V138, "v138", 4, false},
V139: {V139, "v139", 4, false},
V140: {V140, "v140", 4, false},
V141: {V141, "v141", 4, false},
V142: {V142, "v142", 4, false},
V143: {V143, "v143", 4, false},
V144: {V144, "v144", 4, false},
V145: {V145, "v145", 4, false},
V146: {V146, "v146", 4, false},
V147: {V147, "v147", 4, false},
V148: {V148, "v148", 4, false},
V149: {V149, "v149", 4, false},
V150: {V150, "v150", 4, false},
V151: {V151, "v151", 4, false},
V152: {V152, "v152", 4, false},
V153: {V153, "v153", 4, false},
V154: {V154, "v154", 4, false},
V155: {V155, "v155", 4, false},
V156: {V156, "v156", 4, false},
V157: {V157, "v157", 4, false},
V158: {V158, "v158", 4, false},
V159: {V159, "v159", 4, false},
V160: {V160, "v160", 4, false},
V161: {V161, "v161", 4, false},
V162: {V162, "v162", 4, false},
V163: {V163, "v163", 4, false},
V164: {V164, "v164", 4, false},
V165: {V165, "v165", 4, false},
V166: {V166, "v166", 4, false},
V167: {V167, "v167", 4, false},
V168: {V168, "v168", 4, false},
V169: {V169, "v169", 4, false},
V170: {V170, "v170", 4, false},
V171: {V171, "v171", 4, false},
V172: {V172, "v172", 4, false},
V173: {V173, "v173", 4, false},
V174: {V174, "v174", 4, false},
V175: {V175, "v175", 4, false},
V176: {V176, "v176", 4, false},
V177: {V177, "v177", 4, false},
V178: {V178, "v178", 4, false},
V179: {V179, "v179", 4, false},
V180: {V180, "v180", 4, false},
V181: {V181, "v181", 4, false},
V182: {V182, "v182", 4, false},
V183: {V183, "v183", 4, false},
V184: {V184, "v184", 4, false},
V185: {V185, "v185", 4, false},
V186: {V186, "v186", 4, false},
V187: {V187, "v187", 4, false},
V188: {V188, "v188", 4, false},
V189: {V189, "v189", 4, false},
V190: {V190, "v190", 4, false},
V191: {V191, "v191", 4, false},
V192: {V192, "v192", 4, false},
V193: {V193, "v193", 4, false},
V194: {V194, "v194", 4, false},
V195: {V195, "v195", 4, false},
V196: {V196, "v196", 4, false},
V197: {V197, "v197", 4, false},
V198: {V198, "v198", 4, false},
V199: {V199, "v199", 4, false},
V200: {V200, "v200", 4, false},
V201: {V201, "v201", 4, false},
V202: {V202, "v202", 4, false},
V203: {V203, "v203", 4, false},
V204: {V204, "v204", 4, false},
V205: {V205, "v205", 4, false},
V206: {V206, "v206", 4, false},
V207: {V207, "v207", 4, false},
V208: {V208, "v208", 4, false},
V209: {V209, "v209", 4, false},
V210: {V210, "v210", 4, false},
V211: {V211, "v211", 4, false},
V212: {V212, "v212", 4, false},
V213: {V213, "v213", 4, false},
V214: {V214, "v214", 4, false},
V215: {V215, "v215", 4, false},
V216: {V216, "v216", 4, false},
V217: {V217, "v217", 4, false},
V218: {V218, "v218", 4, false},
V219: {V219, "v219", 4, false},
V220: {V220, "v220", 4, false},
V221: {V221, "v221", 4, false},
V222: {V222, "v222", 4, false},
V223: {V223, "v223", 4, false},
V224: {V224, "v224", 4, false},
V225: {V225, "v225", 4, false},
V226: {V226, "v226", 4, false},
V227: {V227, "v227", 4, false},
V228: {V228, "v228", 4, false},
V229: {V229, "v229", 4, false},
V230: {V230, "v230", 4, false},
V231: {V231, "v231", 4, false},
V232: {V232, "v232", 4, false},
V233: {V233, "v233", 4, false},
V234: {V234, "v234", 4, false},
V235: {V235, "v235", 4, false},
V236: {V236, "v236", 4, false},
V237: {V237, "v237", 4, false},
V238: {V238, "v238", 4, false},
V239: {V239, "v239", 4, false},
V240: {V240, "v240", 4, false},
V241: {V241, "v241", 4, false},
V242: {V242, "v242", 4, false},
V243: {V243, "v243", 4, false},
V244: {V244, "v244", 4, false},
V245: {V245, "v245", 4, false},
V246: {V246, "v246", 4, false},
V247: {V247, "v247", 4, false},
V248: {V248, "v248", 4, false},
V249: {V249, "v249", 4, false},
V250: {V250, "v250", 4, false},
V251: {V251, "v251", 4, false},
V252: {V252, "v252", 4, false},
V253: {V253, "v253", 4, false},
V254: {V254, "v254", 4, false},
V255: {V255, "v255", 4, false},
S0: {S0, "s0", 4, false},
S1: {S1, "s1", 4, false},
S2: {S2, "s2", 4, false},
S3: {S3, "s3", 4, false},
S4: {S4, "s4", 4, false},
S5: {S5, "s5", 4, false},
S6: {S6, "s6", 4, false},
S7: {S7, "s7", 4, false},
S8: {S8, "s8", 4, false},
S9: {S9, "s9", 4, false},
S10: {S10, "s10", 4, false},
S11: {S11, "s11", 4, false},
S12: {S12, "s12", 4, false},
S13: {S13, "s13", 4, false},
S14: {S14, "s14", 4, false},
S15: {S15, "s15", 4, false},
S16: {S16, "s16", 4, false},
S17: {S17, "s17", 4, false},
S18: {S18, "s18", 4, false},
S19: {S19, "s19", 4, false},
S20: {S20, "s20", 4, false},
S21: {S21, "s21", 4, false},
S22: {S22, "s22", 4, false},
S23: {S23, "s23", 4, false},
S24: {S24, "s24", 4, false},
S25: {S25, "s25", 4, false},
S26: {S26, "s26", 4, false},
S27: {S27, "s27", 4, false},
S28: {S28, "s28", 4, false},
S29: {S29, "s29", 4, false},
S30: {S30, "s30", 4, false},
S31: {S31, "s31", 4, false},
S32: {S32, "s32", 4, false},
S33: {S33, "s33", 4, false},
S34: {S34, "s34", 4, false},
S35: {S35, "s35", 4, false},
S36: {S36, "s36", 4, false},
S37: {S37, "s37", 4, false},
S38: {S38, "s38", 4, false},
S39: {S39, "s39", 4, false},
S40: {S40, "s40", 4, false},
S41: {S41, "s41", 4, false},
S42: {S42, "s42", 4, false},
S43: {S43, "s43", 4, false},
S44: {S44, "s44", 4, false},
S45: {S45, "s45", 4, false},
S46: {S46, "s46", 4, false},
S47: {S47, "s47", 4, false},
S48: {S48, "s48", 4, false},
S49: {S49, "s49", 4, false},
S50: {S50, "s50", 4, false},
S51: {S51, "s51", 4, false},
S52: {S52, "s52", 4, false},
S53: {S53, "s53", 4, false},
S54: {S54, "s54", 4, false},
S55: {S55, "s55", 4, false},
S56: {S56, "s56", 4, false},
S57: {S57, "s57", 4, false},
S58: {S58, "s58", 4, false},
S59: {S59, "s59", 4, false},
S60: {S60, "s60", 4, false},
S61: {S61, "s61", 4, false},
S62: {S62, "s62", 4, false},
S63: {S63, "s63", 4, false},
S64: {S64, "s64", 4, false},
S65: {S65, "s65", 4, false},
S66: {S66, "s66", 4, false},
S67: {S67, "s67", 4, false},
S68: {S68, "s68", 4, false},
S69: {S69, "s69", 4, false},
S70: {S70, "s70", 4, false},
S71: {S71, "s71", 4, false},
S72: {S72, "s72", 4, false},
S73: {S73, "s73", 4, false},
S74: {S74, "s74", 4, false},
S75: {S75, "s75", 4, false},
S76: {S76, "s76", 4, false},
S77: {S77, "s77", 4, false},
S78: {S78, "s78", 4, false},
S79: {S79, "s79", 4, false},
S80: {S80, "s80", 4, false},
S81: {S81, "s81", 4, false},
S82: {S82, "s82", 4, false},
S83: {S83, "s83", 4, false},
S84: {S84, "s84", 4, false},
S85: {S85, "s85", 4, false},
S86: {S86, "s86", 4, false},
S87: {S87, "s87", 4, false},
S88: {S88, "s88", 4, false},
S89: {S89, "s89", 4, false},
S90: {S90, "s90", 4, false},
S91: {S91, "s91", 4, false},
S92: {S92, "s92", 4, false},
S93: {S93, "s93", 4, false},
S94: {S94, "s94", 4, false},
S95: {S95, "s95", 4, false},
S96: {S96, "s96", 4, false},
S97: {S97, "s97", 4, false},
S98: {S98, "s98", 4, false},
S99: {S99, "s99", 4, false},
S100: {S100, "s100", 4, false},
S101: {S101, "s101", 4, false},
EXEC: {EXEC, "exec", 8, false},
EXECLO: {EXECLO, "execlo", 4, false},
EXECHI: {EXECHI, "exechi", 4, false},
EXECZ: {EXECZ, "execz", 1, true},
VCC: {VCC, "vcc", 8, false},
VCCLO: {VCCLO, "vcclo", 4, false},
VCCHI: {VCCHI, "vcchi", 4, false},
VCCZ: {VCCZ, "vccz", 1, true},
SCC: {SCC, "scc", 1, true},
FlatSratch: {FlatSratch, "flatsratch", 8, false},
FlatSratchLo: {FlatSratchLo, "flatsratchlo", 4, false},
FlatSratchHi: {FlatSratchHi, "flatsratchhi", 4, false},
XnackMask: {XnackMask, "xnackmask", 8, false},
XnackMaskLo: {XnackMaskLo, "xnackmasklo", 4, false},
XnackMaskHi: {XnackMaskHi, "xnackmaskhi", 4, false},
Status: {Status, "status", 4, false},
Mode: {Mode, "mode", 4, false},
M0: {M0, "m0", 4, false},
Trapsts: {Trapsts, "trapsts", 4, false},
Tba: {Tba, "tba", 8, false},
TbaLo: {TbaLo, "tbalo", 4, false},
TbaHi: {TbaHi, "tbahi", 4, false},
Tma: {Tma, "tma", 8, false},
TmaLo: {TmaLo, "tmalo", 4, false},
TmaHi: {TmaHi, "tmahi", 4, false},
Timp0: {Timp0, "timp0", 4, false},
Timp1: {Timp1, "timp1", 4, false},
Timp2: {Timp2, "timp2", 4, false},
Timp3: {Timp3, "timp3", 4, false},
Timp4: {Timp4, "timp4", 4, false},
Timp5: {Timp5, "timp5", 4, false},
Timp6: {Timp6, "timp6", 4, false},
Timp7: {Timp7, "timp7", 4, false},
Timp8: {Timp8, "timp8", 4, false},
Timp9: {Timp9, "timp9", 4, false},
Timp10: {Timp10, "timp10", 4, false},
Timp11: {Timp11, "timp11", 4, false},
VMCNT: {VMCNT, "vmcnt", 1, false},
EXPCNT: {EXPCNT, "expcnt", 1, false},
LGKMCNT: {LGKMCNT, "lgkmcnt", 1, false},
} | insts/reg.go | 0.675444 | 0.421254 | reg.go | starcoder |
package data
// Course represents a course resource
type Course struct {
ApplicationProvider string `bson:"application_provider,omitempty"`
Country *Country `bson:"country"`
DistanceLearning *DistanceLearning `bson:"distance_learning"`
Foundation string `bson:"foundation_year_availability"` // enum
Honours bool `bson:"honours_award_provision"`
ID string `bson:"_id"`
Institution *InstitutionObject `bson:"institution"`
KISCourseID string `bson:"kis_course_id"`
Length *LengthObject `bson:"length_of_course"`
Links *LinkList `bson:"links"`
Location *Location `bson:"location"`
Mode *Mode `bson:"mode"` // enum - part time, full time, both
NHSFunded *NHSFunded `bson:"nhs_funded,omitempty"`
Qualification *Qualification `bson:"qualification"`
SandwichYear *Availability `bson:"sandwich_year"`
Statistics *Statistics `bson:"statistics,omitempty"`
Title *Language `bson:"title"`
UCASCode string `bson:"ucas_code_id,omitempty"`
YearAbroad *Availability `bson:"year_abroad"`
}
// Availability represents an object referring to the availability
type Availability struct {
Code string `bson:"code"`
Label string `bson:"label"` // enum , 0-2
}
// Country represents a country object
type Country struct {
Code string `bson:"code"`
Name string `bson:"name"`
}
// DistanceLearning represents an object referring
// to the course available through distance learning
type DistanceLearning struct {
Code string `bson:"code"`
Label string `bson:"label"`
}
// InstitutionObject represents institution data related to course
type InstitutionObject struct {
PublicUKPRNName string `bson:"public_ukprn_name"`
PublicUKPRN string `bson:"public_ukprn"`
UKPRNName string `bson:"ukprn_name"`
UKPRN string `bson:"ukprn"`
}
// Language represents an object containing english or welsh strings
type Language struct {
English string `bson:"english,omitempty"`
Welsh string `bson:"welsh,omitempty"`
}
// LengthObject represents an object referring to the course length
type LengthObject struct {
Code string `bson:"code"`
Label string `bson:"label"`
}
// LinkList represents a list of links related to resource
type LinkList struct {
Accommodation *Language `bson:"accommodation,omitempty"`
AssessmentMethod *Language `bson:"assessment_method,omitempty"` // ASSURL
CoursePage *Language `bson:"course_page,omitempty"` // CRSEURL
EmploymentDetails *Language `bson:"employment_details,omitempty"` // EMPLOYURL
FinancialSupport *Language `bson:"financial_support_details,omitempty"` // SUPPORTURL
Institution string `bson:"institution"`
LearningAndTeaching *Language `bson:"learning_and_teaching_methods,omitempty"` // LTURL
Self string `bson:"self"`
StudentUnion *Language `bson:"student_union,omitempty"`
}
// Location represents an object containing fields to enable one to locate institution
type Location struct {
Changes bool `bson:"changes"`
Latitude string `bson:"latitude"`
Longitude string `bson:"longitude"`
Name *Language `bson:"name"`
}
// Mode represents an object referring to the type of course
type Mode struct {
Code string `bson:"code"`
Label string `bson:"label"`
}
// NHSFunded represents an object referring to the course having any NHS funded students
type NHSFunded struct {
Code string `bson:"code,omitempty"`
Label string `bson:"label,omitempty"`
}
// Qualification represents an object referring to the qualification received from course
type Qualification struct {
Code string `bson:"code"`
Label string `bson:"label"`
Level string `bson:"level"`
Name string `bson:"name"`
}
// LocationIDObject represents a course location object
type LocationIDObject struct {
ID string `bson:"id"`
} | mongo/get-random-courses/vendor/github.com/ofs/alpha-scripts/mongo/load-data/course-builder/data/data.go | 0.702938 | 0.520192 | data.go | starcoder |
package main
import (
"fmt"
"math"
"github.com/hajimehoshi/oto"
)
// Beeper handles audio. Use NewBeeper to initialise.
type Beeper struct {
memory *byte
player *oto.Player
sampleRate int
frequency float64
volume float64
amplitude float64
step float64
time float64
// whether audio is able to be played
IsInitialised bool
}
// NewBeeper returns a pointer to Beeper which handles audio.
// The soundTimer pointer represents the register to which the chip8 emulator will write to, indicating when sound is to be played.
// The sampleRate, frequency and volume args affect the audio accordingly.
func NewBeeper(soundTimer *byte, sampleRate int, frequency float64, volume float64) *Beeper {
b := &Beeper{
memory: soundTimer,
sampleRate: sampleRate,
frequency: frequency,
volume: volume,
}
c, err := oto.NewContext(b.sampleRate, 1, 2, int(b.sampleRate/60*6))
if err != nil {
fmt.Println(err)
b.IsInitialised = false
return b
}
b.player = c.NewPlayer()
b.amplitude = b.volume * 0x7FFF
b.step = b.frequency * 2 * math.Pi / float64(b.sampleRate)
b.IsInitialised = true
return b
}
// UpdateSound will read the chip8 emulator's soundTimer register. If it is greater than 0, samples will be generated and added to the queue to be played, else nothing will be played.
func (b *Beeper) UpdateSound() {
if *b.memory > 0 {
b.player.Write(b.generateSample())
} else {
b.player.Write(make([]byte, b.sampleRate/60*2))
}
}
// generateSample creates enough 16bit single-channel samples for 60th of a second (the rate at which sound is played) and store them 8bit little endian.
func (b *Beeper) generateSample() []byte {
n := b.sampleRate / 60
bytes := make([]byte, n*2)
for i := 0; i < n; i++ {
s := int16(b.amplitude * curvyTriangle(b.time))
bytes[2*i] = byte(s)
bytes[2*i+1] = byte(s >> 8)
b.time += b.step
}
return bytes
}
// wave funcs
func triangle(t float64) float64 {
return (math.Abs(math.Mod(t, 2)-1) - 0.5) * 2
}
func curvyTriangle(t float64) float64 {
return math.Pow(math.Abs(math.Mod(t, 2)-1), 3)
}
func square(t float64) float64 {
if math.Mod(t, 2) < 1 {
return 1
}
return -1
}
func sawtooth(t float64) float64 {
return math.Mod(t, 2) - 1
}
func sine(t float64) float64 {
return math.Sin(t)
} | audio.go | 0.750918 | 0.495667 | audio.go | starcoder |
package fileutils
/*
This utility will help find packed or encrypted files on a Linux system by calculating the entropy to see how
random they are. Packed or encrypted malware often appears to be a very random executable file and this utility
can help identify potential problems.
You can calculate entropy on all files, or limit the search just to Linux ELF executables that have an entropy of
your threshold.
Sandfly Security produces an agentless intrusion detection and incident response platform for Linux. You can
find out more about how it works at: https://www.sandflysecurity.com
MIT License
Copyright (c) 2019 Sandfly Security Ltd.
https://www.sandflysecurity.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of
the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Version: 1.0
Date: 2019-11-23
Author: <NAME> @CraigHRowland @SandflySecurity
*/
import (
"bytes"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
"crypto/sha512"
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"math"
"os"
)
const (
// Max file size for entropy, etc. is 2GB
constMaxFileSize = 2147483648
// Chunk of data size to read in for entropy calc
constMaxEntropyChunk = 256000
// Need 4 bytes to determine basic ELF type
constMagicNumRead = 4
// Magic number for basic ELF type
constMagicNumElf = "7f454c46"
)
// Pass in a path and we'll see if the magic number is Linux ELF type.
func IsElfType(path string) (isElf bool, err error) {
var hexData [constMagicNumRead]byte
if path == "" {
return false, fmt.Errorf("must provide a path to file to get ELF type")
}
f, err := os.Open(path)
if err != nil {
return false, err
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return false, err
}
// Not a regular file, so can't be ELF
if !fStat.Mode().IsRegular() {
return false, nil
}
// Too small to be ELF
if fStat.Size() < constMagicNumRead {
return false, nil
}
err = binary.Read(f, binary.LittleEndian, &hexData)
if err != nil {
return false, err
}
elfType, err := hex.DecodeString(constMagicNumElf)
if err != nil {
return false, err
}
if len(elfType) > constMagicNumRead {
return false, fmt.Errorf("elf magic number string is longer than magic number read bytes")
}
if bytes.Equal(hexData[:len(elfType)], elfType) {
return true, nil
}
return false, nil
}
// Calculates entropy of a file.
func Entropy(path string) (entropy float64, err error) {
var size int64
if path == "" {
return entropy, fmt.Errorf("must provide a path to file to get entropy")
}
f, err := os.Open(path)
if err != nil {
return 0, fmt.Errorf("couldn't open path (%s) to get entropy: %v", path, err)
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return 0, err
}
if !fStat.Mode().IsRegular() {
return 0, fmt.Errorf("file (%s) is not a regular file to calculate entropy", path)
}
size = fStat.Size()
// Zero sized file is zero entropy.
if size == 0 {
return 0, nil
}
if size > int64(constMaxFileSize) {
return 0, fmt.Errorf("file size (%d) is too large to calculate entropy (max allowed: %d)",
size, int64(constMaxFileSize))
}
dataBytes := make([]byte, constMaxEntropyChunk)
byteCounts := make([]int, 256)
for {
numBytesRead, err := f.Read(dataBytes)
if err == io.EOF {
break
}
if err != nil {
return 0, err
}
// For each byte of the data that was read, increment the count
// of that number of bytes seen in the file in our byteCounts
// array
for i := 0; i < numBytesRead; i++ {
byteCounts[int(dataBytes[i])]++
}
}
for i := 0; i < 256; i++ {
px := float64(byteCounts[i]) / float64(size)
if px > 0 {
entropy += -px * math.Log2(px)
}
}
// Returns rounded to nearest two decimals.
return math.Round(entropy*100) / 100, nil
}
// Generates MD5 hash of a file
func HashMD5(path string) (hash string, err error) {
if path == "" {
return hash, fmt.Errorf("must provide a path to file to hash")
}
f, err := os.Open(path)
if err != nil {
return hash, fmt.Errorf("couldn't open path (%s): %v", path, err)
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return hash, err
}
if !fStat.Mode().IsRegular() {
return hash, fmt.Errorf("file (%s) is not a regular file to calculate hash", path)
}
// Zero sized file is no hash.
if fStat.Size() == 0 {
return hash, nil
}
if fStat.Size() > int64(constMaxFileSize) {
return hash, fmt.Errorf("file size (%d) is too large to calculate hash (max allowed: %d)",
fStat.Size(), int64(constMaxFileSize))
}
hashMD5 := md5.New()
_, err = io.Copy(hashMD5, f)
if err != nil {
return hash, fmt.Errorf("couldn't read path (%s) to get MD5 hash: %v", path, err)
}
hash = hex.EncodeToString(hashMD5.Sum(nil))
return hash, nil
}
// Generates SHA1 hash of a file
func HashSHA1(path string) (hash string, err error) {
if path == "" {
return hash, fmt.Errorf("must provide a path to file to hash")
}
f, err := os.Open(path)
if err != nil {
return hash, fmt.Errorf("couldn't open path (%s): %v", path, err)
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return hash, err
}
if !fStat.Mode().IsRegular() {
return hash, fmt.Errorf("file (%s) is not a regular file to calculate hash", path)
}
// Zero sized file is no hash.
if fStat.Size() == 0 {
return hash, nil
}
if fStat.Size() > int64(constMaxFileSize) {
return hash, fmt.Errorf("file size (%d) is too large to calculate hash (max allowed: %d)",
fStat.Size(), int64(constMaxFileSize))
}
hashSHA1 := sha1.New()
_, err = io.Copy(hashSHA1, f)
if err != nil {
return hash, fmt.Errorf("couldn't read path (%s) to get SHA1 hash: %v", path, err)
}
hash = hex.EncodeToString(hashSHA1.Sum(nil))
return hash, nil
}
// Generates SHA256 hash of a file
func HashSHA256(path string) (hash string, err error) {
if path == "" {
return hash, fmt.Errorf("must provide a path to file to hash")
}
f, err := os.Open(path)
if err != nil {
return hash, fmt.Errorf("couldn't open path (%s): %v", path, err)
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return hash, err
}
if !fStat.Mode().IsRegular() {
return hash, fmt.Errorf("file (%s) is not a regular file to calculate hash", path)
}
// Zero sized file is no hash.
if fStat.Size() == 0 {
return hash, nil
}
if fStat.Size() > int64(constMaxFileSize) {
return hash, fmt.Errorf("file size (%d) is too large to calculate hash (max allowed: %d)",
fStat.Size(), int64(constMaxFileSize))
}
hashSHA256 := sha256.New()
_, err = io.Copy(hashSHA256, f)
if err != nil {
return hash, fmt.Errorf("couldn't read path (%s) to get SHA256 hash: %v", path, err)
}
hash = hex.EncodeToString(hashSHA256.Sum(nil))
return hash, nil
}
// Generates SHA512 hash of a file
func HashSHA512(path string) (hash string, err error) {
if path == "" {
return hash, fmt.Errorf("must provide a path to file to hash")
}
f, err := os.Open(path)
if err != nil {
return hash, fmt.Errorf("couldn't open path (%s): %v", path, err)
}
defer f.Close()
fStat, err := f.Stat()
if err != nil {
return hash, err
}
if !fStat.Mode().IsRegular() {
return hash, fmt.Errorf("file (%s) is not a regular file to calculate hash", path)
}
// Zero sized file is no hash.
if fStat.Size() == 0 {
return hash, nil
}
if fStat.Size() > int64(constMaxFileSize) {
return hash, fmt.Errorf("file size (%d) is too large to calculate hash (max allowed: %d)",
fStat.Size(), int64(constMaxFileSize))
}
hashSHA512 := sha512.New()
_, err = io.Copy(hashSHA512, f)
if err != nil {
return hash, fmt.Errorf("couldn't read path (%s) to get SHA512 hash: %v", path, err)
}
hash = hex.EncodeToString(hashSHA512.Sum(nil))
return hash, nil
} | fileutils/fileutils.go | 0.775987 | 0.421611 | fileutils.go | starcoder |
package sqlset
import (
"context"
"fmt"
"math"
"github.com/pbanos/botanic/feature"
"github.com/pbanos/botanic/set"
)
/*
Set is a set.Set to which samples can be added
Its AddSample takes a set.Sample and adds it to the set,
returning an error if any errors occur or nil otherwise.
*/
type Set interface {
set.Set
Write(context.Context, []set.Sample) (int, error)
Read(context.Context) (<-chan set.Sample, <-chan error)
}
type sqlSet struct {
db Adapter
features []feature.Feature
criteria []*FeatureCriterion
featureNamesColumns map[string]string
columnFeatures map[string]feature.Feature
discreteValues map[int]string
inverseDiscreteValues map[string]int
dfColumns []string
cfColumns []string
count *int
entropy *float64
}
/*
Open takes an Adapter to a db backend and a slice of feature.Feature
and returns a Set backed by the given adapter or an error if no set is
available through the given adapter.
This function expects the adapter to have the samples and discrete value
tables already created, and the discrete value table initialized with all
the values of the discrete features in the features slice.
*/
func Open(ctx context.Context, dbAdapter Adapter, features []feature.Feature) (Set, error) {
ss := &sqlSet{db: dbAdapter, features: features}
err := ss.initFeatureColumns()
if err != nil {
return nil, err
}
err = ss.init(ctx)
if err != nil {
return nil, err
}
return ss, nil
}
/*
Create takes an Adapter and a slice of feature.Feature and returns a Set
backed by the given adapter or an error.
This function will ensure that the samples and discrete value tables are
created on the database, and that the discrete value table has all the
values for the discrete features on the features slice.
*/
func Create(ctx context.Context, dbAdapter Adapter, features []feature.Feature) (Set, error) {
ss := &sqlSet{db: dbAdapter, features: features}
err := ss.initFeatureColumns()
if err != nil {
return nil, err
}
err = ss.initDB(ctx)
if err != nil {
return nil, err
}
return ss, nil
}
func (ss *sqlSet) Count(ctx context.Context) (int, error) {
if ss.count != nil {
return *ss.count, nil
}
result, err := ss.db.CountSamples(ctx, ss.criteria)
if err == nil {
ss.count = &result
}
return result, err
}
func (ss *sqlSet) Entropy(ctx context.Context, f feature.Feature) (float64, error) {
if ss.entropy != nil {
return *ss.entropy, nil
}
var result, count float64
column, ok := ss.featureNamesColumns[f.Name()]
if !ok {
return 0.0, fmt.Errorf("unknown feature %s", f.Name())
}
if _, ok = f.(*feature.DiscreteFeature); ok {
featureValueCounts, err := ss.db.CountSampleDiscreteFeatureValues(ctx, column, ss.criteria)
if err != nil {
return 0.0, err
}
for _, c := range featureValueCounts {
count += float64(c)
}
for _, c := range featureValueCounts {
probValue := float64(c) / count
result -= probValue * math.Log(probValue)
}
} else {
featureValueCounts, err := ss.db.CountSampleContinuousFeatureValues(ctx, column, ss.criteria)
if err != nil {
return 0.0, err
}
for _, c := range featureValueCounts {
count += float64(c)
}
for _, c := range featureValueCounts {
probValue := float64(c) / count
result -= probValue * math.Log(probValue)
}
}
ss.entropy = &result
return result, nil
}
func (ss *sqlSet) FeatureValues(ctx context.Context, f feature.Feature) ([]interface{}, error) {
var err error
var result []interface{}
column, ok := ss.featureNamesColumns[f.Name()]
if !ok {
return nil, fmt.Errorf("unknown feature %s", f.Name())
}
if _, ok = f.(*feature.DiscreteFeature); ok {
var values []int
values, err = ss.db.ListSampleDiscreteFeatureValues(ctx, column, ss.criteria)
if err != nil {
return nil, err
}
for _, v := range values {
result = append(result, v)
}
} else {
var values []float64
values, err = ss.db.ListSampleContinuousFeatureValues(ctx, column, ss.criteria)
if err != nil {
return nil, err
}
for _, v := range values {
result = append(result, v)
}
}
return result, nil
}
func (ss *sqlSet) Samples(ctx context.Context) ([]set.Sample, error) {
rawSamples, err := ss.db.ListSamples(ctx, ss.criteria, ss.dfColumns, ss.cfColumns)
if err != nil {
return nil, err
}
samples := make([]set.Sample, 0, len(rawSamples))
for _, s := range rawSamples {
samples = append(samples, &Sample{Values: s, DiscreteFeatureValues: ss.discreteValues, FeatureNamesColumns: ss.featureNamesColumns})
}
return samples, nil
}
func (ss *sqlSet) SubsetWith(ctx context.Context, fc feature.Criterion) (set.Set, error) {
rfc, err := NewFeatureCriteria(fc, ss.db.ColumnName, ss.inverseDiscreteValues)
if err != nil {
return nil, err
}
subsetCriteria := make([]*FeatureCriterion, 0, len(ss.criteria)+len(rfc))
subsetCriteria = append(subsetCriteria, ss.criteria...)
subsetCriteria = append(subsetCriteria, rfc...)
return &sqlSet{
db: ss.db,
features: ss.features,
criteria: subsetCriteria,
discreteValues: ss.discreteValues,
inverseDiscreteValues: ss.inverseDiscreteValues,
featureNamesColumns: ss.featureNamesColumns,
columnFeatures: ss.columnFeatures,
dfColumns: ss.dfColumns,
cfColumns: ss.cfColumns,
}, nil
}
func (ss *sqlSet) CountFeatureValues(ctx context.Context, f feature.Feature) (map[string]int, error) {
result := make(map[string]int)
column, ok := ss.featureNamesColumns[f.Name()]
if !ok {
return nil, fmt.Errorf("unknown feature %s", f.Name())
}
if _, ok = f.(*feature.DiscreteFeature); ok {
featureValueCounts, err := ss.db.CountSampleDiscreteFeatureValues(ctx, column, ss.criteria)
if err != nil {
return nil, err
}
for k, v := range featureValueCounts {
result[ss.discreteValues[k]] = v
}
} else {
featureValueCounts, err := ss.db.CountSampleContinuousFeatureValues(ctx, column, ss.criteria)
if err != nil {
return nil, err
}
for k, v := range featureValueCounts {
result[fmt.Sprintf("%f", k)] = v
}
}
return result, nil
}
func (ss *sqlSet) Write(ctx context.Context, samples []set.Sample) (int, error) {
if len(samples) == 0 {
return 0, nil
}
rawSamples := make([]map[string]interface{}, 0, len(samples))
for _, s := range samples {
rs, err := ss.newRawSample(s)
if err != nil {
return 0, err
}
rawSamples = append(rawSamples, rs)
}
return ss.db.AddSamples(ctx, rawSamples, ss.dfColumns, ss.cfColumns)
}
func (ss *sqlSet) Read(ctx context.Context) (<-chan set.Sample, <-chan error) {
sampleStream := make(chan set.Sample)
errStream := make(chan error)
go func() {
err := ss.db.IterateOnSamples(
ctx,
ss.criteria,
ss.dfColumns,
ss.cfColumns,
func(n int, rs map[string]interface{}) (bool, error) {
s := &Sample{
Values: rs,
DiscreteFeatureValues: ss.discreteValues,
FeatureNamesColumns: ss.featureNamesColumns}
select {
case <-ctx.Done():
return false, nil
case sampleStream <- s:
}
return true, nil
})
if err != nil {
go func() {
errStream <- err
close(errStream)
}()
} else {
close(errStream)
}
close(sampleStream)
}()
return sampleStream, errStream
}
func (ss *sqlSet) initDB(ctx context.Context) error {
err := ss.db.CreateDiscreteValuesTable(ctx)
if err != nil {
return err
}
err = ss.db.CreateSampleTable(ctx, ss.dfColumns, ss.cfColumns)
if err != nil {
return err
}
ss.discreteValues, err = ss.db.ListDiscreteValues(ctx)
if err != nil {
return err
}
newValues := ss.unavailableDiscreteValues()
_, err = ss.db.AddDiscreteValues(ctx, newValues)
if err != nil {
return err
}
err = ss.init(ctx)
if err != nil {
return err
}
return nil
}
func (ss *sqlSet) unavailableDiscreteValues() []string {
var unavailableDiscreteValues []string
for _, f := range ss.features {
df, ok := f.(*feature.DiscreteFeature)
if ok {
for _, fv := range df.AvailableValues() {
var present bool
for _, pv := range ss.discreteValues {
if fv == pv {
present = true
break
}
}
if !present {
for _, uv := range unavailableDiscreteValues {
if fv == uv {
present = true
break
}
}
if !present {
unavailableDiscreteValues = append(unavailableDiscreteValues, fv)
}
}
}
}
}
return unavailableDiscreteValues
}
func (ss *sqlSet) init(ctx context.Context) error {
var err error
ss.discreteValues, err = ss.db.ListDiscreteValues(ctx)
if err != nil {
return err
}
ss.inverseDiscreteValues = make(map[string]int)
for k, v := range ss.discreteValues {
ss.inverseDiscreteValues[v] = k
}
return nil
}
func (ss *sqlSet) newRawSample(s set.Sample) (map[string]interface{}, error) {
rs := make(map[string]interface{})
for _, f := range ss.features {
v, err := s.ValueFor(f)
if err != nil {
return nil, err
}
if v != nil {
_, ok := f.(*feature.DiscreteFeature)
if ok {
vs, ok := v.(string)
if !ok {
return nil, fmt.Errorf("expected string value for discrete feature %s of sample, got %T", f.Name(), v)
}
v, ok = ss.inverseDiscreteValues[vs]
}
rs[f.Name()] = v
}
}
return rs, nil
}
func (ss *sqlSet) initFeatureColumns() error {
ss.columnFeatures = make(map[string]feature.Feature)
ss.featureNamesColumns = make(map[string]string)
for _, f := range ss.features {
column, err := ss.db.ColumnName(f.Name())
if err != nil {
return fmt.Errorf("invalid feature %s: %v", f.Name(), err)
}
of, ok := ss.columnFeatures[column]
if ok {
return fmt.Errorf("%s and %s feature names translate to the same column name %s", f.Name(), of.Name(), column)
}
ss.columnFeatures[column] = f
ss.featureNamesColumns[f.Name()] = column
}
for _, f := range ss.features {
if _, ok := f.(*feature.DiscreteFeature); ok {
ss.dfColumns = append(ss.dfColumns, ss.featureNamesColumns[f.Name()])
} else {
ss.cfColumns = append(ss.cfColumns, ss.featureNamesColumns[f.Name()])
}
}
return nil
} | set/sqlset/set.go | 0.59796 | 0.40072 | set.go | starcoder |
package gobang
/**
* | | | | | | | |B| |B|
* | | | | | | |B| |B| |
* | | | | | |B| |B| | |
* | | | | |B| |B| | | |
* | | | |B| |B| | | | |
* | | | | |B| | | | | |
* | | | |B| | | | | | |
* | | |B| | | | | | | |
* | |B| | | | | | | | |
* |B| | | | | | | | | |
*/
func NewTopRightDiagonalCellMatcher(stone Stone, count int) *TopRightDiagonalCellMatcher {
return &TopRightDiagonalCellMatcher{count: count, stone: stone}
}
type TopRightDiagonalCellMatcher struct {
count int
stone Stone
}
func (s *TopRightDiagonalCellMatcher) Matches(board *Board) *MatchedResult {
result := &MatchedResult{}
groups := s.scanAllCellGroup(board)
reachedSelector := ReachedSelector{
stone: s.stone,
count: s.count,
board: board,
neighbor: NewTopRightNeighborDistance(),
}
for _, group := range groups {
results := group.SelectReached(reachedSelector)
if len(results) <= 0 {
continue
}
result.results = append(result.results, results...)
}
return result
}
func (s *TopRightDiagonalCellMatcher) scanAllCellGroup(board *Board) []*CellGroup {
groups := make([]*CellGroup, 0)
groups = append(groups, s.scanXAxisCellGroup(board)...)
groups = append(groups, s.scanYAxisCellGroup(board)...)
return groups
}
/**
* | | | | |B|B|B|B|B|B|
* | | | |B|B|B|B|B|B| |
* | | |B|B|B|B|B|B| | |
* | |B|B|B|B|B|B| | | |
* |B|B|B|B|B|B| | | | |
* |B|B|B|B|B| | | | | |
* |B|B|B|B| | | | | | |
* |B|B|B| | | | | | | |
* |B|B| | | | | | | | |
* |B| | | | | | | | | |
*/
func (s *TopRightDiagonalCellMatcher) scanXAxisCellGroup(board *Board) []*CellGroup {
point := DefaultPoint()
endX := board.Width() - 1
groups := make([]*CellGroup, 0)
for startX := s.count - 1; startX <= endX; startX++ {
y := 0
group := &CellGroup{}
for x := startX; x >= 0; x-- {
cell := board.SelectCell(point.SetTo(x, y))
group.cells = append(group.cells, cell)
y++
}
groups = append(groups, group)
group = &CellGroup{}
}
return groups
}
/**
* | | | | | | | | | | |
* | | | | | | | | | |B|
* | | | | | | | | |B|B|
* | | | | | | | |B|B|B|
* | | | | | | |B|B|B|B|
* | | | | | |B|B|B|B|B|
* | | | | |B|B|B|B|B| |
* | | | |B|B|B|B|B| | |
* | | |B|B|B|B|B| | | |
* | |B|B|B|B|B| | | | |
*/
func (s *TopRightDiagonalCellMatcher) scanYAxisCellGroup(board *Board) []*CellGroup {
point := DefaultPoint()
maxY := board.Height() - 1
endY := board.Height() - s.count
groups := make([]*CellGroup, 0)
for startY := 1; startY <= endY; startY++ {
x := board.Width() - 1
group := &CellGroup{}
for y := startY; y <= maxY; y++ {
cell := board.SelectCell(point.SetTo(x, y))
group.cells = append(group.cells, cell)
x--
}
groups = append(groups, group)
group = &CellGroup{}
}
return groups
} | gobang/top_right_diagonal_cell_matcher.go | 0.832951 | 0.551815 | top_right_diagonal_cell_matcher.go | starcoder |
package compatibilityVars
var RubyVersionAgentSupportability = map[string][]string{
//the keys are the ruby version and the values are the agent versions that support that specific version
"2.7": []string{"6.9.0.363+"},
"2.6": []string{"5.7.0.350+"},
"2.5": []string{"4.8.0.341+"},
"2.4": []string{"3.18.0.329+"},
"2.3": []string{"3.9.9.275+"},
"2.2": []string{"3.9.9.275+"},
"2.1": []string{"3.9.9.275+"},
"2.0": []string{"3.9.6.257+"},
"1.9.3": []string{"3.9.6.257-3.18.1.330"},
"1.9.2": []string{"3.9.6.257-3.18.1.330"},
"1.8.7": []string{"3.9.6.257-3.18.1.330"},
}
var PythonVersionAgentSupportability = map[string][]string{
//the keys are the python version and the values are the agent versions that support that specific version
"3.8": []string{"5.2.3.131+"},
"3.7": []string{"3.4.0.95+"},
"3.6": []string{"172.16.58.3+"},
"3.5": []string{"2.78.0.57+"},
"3.4": []string{"192.168.3.11-4.20.0.120"},
"3.3": []string{"192.168.3.11-3.4.0.95"},
"2.7": []string{"2.42.0.35+"},
"2.6": []string{"2.42.0.35-3.4.0.95"},
}
/*
List of supported JRE distributions
The keys to this map are used verbatim to generate
a regular expression in `extractVendorFromJavaExecutable`
They should exactly match how they appear in the output
of `java -version.`
Any vendors not found in this map will be flagged as
unsupported. Known unsupported vendors can be called out
explicitly by using an empty slice of compatibility
requirements.
*/
var SupportedJavaVersions = map[string][]string{
// supported vendors
"OpenJDK": []string{"1.7-1.9.*", "7-15.*"},
"HotSpot": []string{"1.7-1.9.*", "7-15.*"},
"JRockit": []string{"1-1.6.0.50"},
"Coretto": []string{"1.8-1.9.*", "8-11.*"},
"Zulu": []string{"1.8-1.9.*", "8-12.*"},
"IBM": []string{"1.7-1.8.*", "7-8.*"},
"Oracle": []string{"1.5.*", "5.0.*"},
"Zing": []string{"1.8-1.9.*", "8-11.*"},
"OpenJ9": []string{"1.8-1.9.*", "8-13.*"},
"Dragonwell": []string{"1.8-1.9.*", "8-11.*"},
}
//Supported only with Java agent 4.3.x:
var SupportedForJavaAgent4 = map[string][]string{
"Apple": []string{"1.6.*", "6.*"},
"IBM": []string{"1.6.*", "6.*"},
"HotSpot": []string{"1.6.*", "6.*"},
}
var NodeSupportedVersions = map[string][]string{
"12": []string{"6.0.0+"},
"10": []string{"4.6.0-7.*"},
}
//https://docs.newrelic.com/docs/agents/net-agent/getting-started/net-agent-compatibility-requirements-net-framework#net-version
// .NET framework as keys and .NET agent as values
var DotnetFrameworkSupportedVersions = map[string][]string{
"5.0": []string{"7.0.0+"},
"4.8": []string{"7.0.0+"},
"4.7": []string{"7.0.0+"},
"4.6": []string{"7.0.0+"}, //should be inclusive of version such as 4.6.1
"4.5": []string{"7.0.0+"},
}
var DotnetFrameworkOldVersions = map[string][]string{
//To instrument applications running on .NET Framework version 4.0 and lower, you must run a version of the New Relic .NET agent earlier than 7.0
"4.0": []string{"5.1.*-6.*"}, //5.0 and lower are EOL versions
"3.5": []string{"5.1.*-6.*"},
//Doc says .NET Framework 3.0 and 2.0 are no longer supported as September 2020:https://docs.newrelic.com/docs/agents/net-agent/getting-started/net-agent-compatibility-requirements-net-framework
}
//.NET Core 2.0 or higher is supported by the New Relic .NET agent version 6.19 or higher
var DotnetCoreSupportedVersions = map[string][]string{
"5.0": []string{"8.35.0+"},
"3.1": []string{"8.21.34.0+"},
"3.0": []string{"8.21.34.0+"},
"2.2": []string{"8.19.353.0+"},
"2.1": []string{"8.19.353.0+"},
"2.0": []string{"8.19.353.0+"},
}
//https://docs.newrelic.com/docs/agents/net-agent/getting-started/net-agent-compatibility-requirements-net-core#net-version | tasks/compatibilityVars/versions.go | 0.533154 | 0.432243 | versions.go | starcoder |
package inject
import (
"fmt"
"reflect"
)
type provider struct {
constructor interface{}
argPtrs []interface{}
}
// NewProvider specifies how to construct a value given its constructor function and argument pointers
func NewProvider(constructor interface{}, argPtrs ...interface{}) Provider {
fnValue := reflect.ValueOf(constructor)
if fnValue.Kind() != reflect.Func {
panic(fmt.Sprintf("constructor (%v) is not a function, found %v", fnValue, fnValue.Kind()))
}
fnType := reflect.TypeOf(constructor)
if fnType.NumOut() != 1 {
panic(fmt.Sprintf("constructor must have exactly 1 return value, found %v", fnType.NumOut()))
}
argCount := fnType.NumIn()
if !fnValue.Type().IsVariadic() && argCount != len(argPtrs) {
panic(fmt.Sprintf("argPtrs (%d) must match constructor arguments (%d)", len(argPtrs), argCount))
}
var kind reflect.Kind
for i, argPtr := range argPtrs {
isVariadic := fnValue.Type().IsVariadic() && (fnType.NumIn() == 1 || i >= fnType.NumIn())
if i < fnType.NumIn() {
kind = fnType.In(i).Kind()
} else {
kind = fnType.In(fnType.NumIn() - 1).Kind()
}
if reflect.TypeOf(argPtr).Kind() != reflect.Ptr {
panic(fmt.Sprintf("argPtrs must all be pointers, found %v", reflect.TypeOf(argPtr)))
}
if !isVariadic && reflect.ValueOf(argPtr).Elem().Kind() != kind {
panic("argPtrs must match constructor argument types")
}
}
return provider{
constructor: constructor,
argPtrs: argPtrs,
}
}
// Provide returns the result of executing the constructor with argument values resolved from a dependency graph
func (p provider) Provide(g Graph) reflect.Value {
fnType := reflect.TypeOf(p.constructor)
argCount := fnType.NumIn()
if fnType.IsVariadic() {
argCount = len(p.argPtrs)
}
args := make([]reflect.Value, argCount, argCount)
var inType reflect.Type
for i := 0; i < argCount; i++ {
arg := g.Resolve(p.argPtrs[i])
argType := arg.Type()
if fnType.IsVariadic() && i >= fnType.NumIn()-1 {
inType = fnType.In(fnType.NumIn() - 1).Elem()
} else {
inType = fnType.In(i)
}
if !argType.AssignableTo(inType) {
if !argType.ConvertibleTo(inType) {
panic(fmt.Sprintf(
"arg %d of type %q cannot be assigned or converted to type %q for provider constructor (%s)",
i, argType, inType, p.constructor,
))
}
arg = arg.Convert(inType)
}
args[i] = arg
}
return reflect.ValueOf(p.constructor).Call(args)[0]
}
// Type returns the type of value to expect from Provide
func (p provider) ReturnType() reflect.Type {
return reflect.TypeOf(p.constructor).Out(0)
}
// String returns a multiline string representation of the provider
func (p provider) String() string {
return fmt.Sprintf("&provider{\n%s,\n%s\n}",
indent(fmt.Sprintf("constructor: %s", reflect.TypeOf(p.constructor)), 1),
indent(fmt.Sprintf("argPtrs: %s", p.fmtArgPtrs()), 1),
)
}
func (p provider) fmtArgPtrs() string {
b := make([]string, len(p.argPtrs), len(p.argPtrs))
for i, argPtr := range p.argPtrs {
b[i] = ptrString(argPtr)
}
return arrayString(b)
} | provider.go | 0.612773 | 0.449393 | provider.go | starcoder |
package internal
import (
"fmt"
"math"
)
const Epsilon = 1e-7
// To compensate for imprecision in floats, equality is tolerance based. If we
// don't account for this, we'll end up shaving off absurdly thin triangles on nearly
// horizontal segments.
func Equal(a, b float64) bool {
return math.Abs(a-b) < Epsilon
}
func GreaterThan(a, b float64) bool {
return a-b > Epsilon
}
func LessThan(a, b float64) bool {
return b-a > Epsilon
}
// A common convention in our geometry is that if two points have the same Y
// value, the one with the smallex X value is "lower". This simulates a slightly
// rotated coordinate system, allowing us to assume Y values are never equal.
func (p *Point) Below(otherPoint *Point) bool {
if Equal(p.Y, otherPoint.Y) {
return p.X < otherPoint.X
}
return p.Y < otherPoint.Y
}
func (p *Point) Above(otherPoint *Point) bool {
return !p.Below(otherPoint)
}
// Create a directional point pointing at another point
func (p *Point) PointingAt(other *Point) DirectionalPoint {
dir := Vector{
X: other.X - p.X,
Y: other.Y - p.Y,
}.Normalize()
return DirectionalPoint{
Point: p,
Direction: dir,
}
}
// Convenience function setting the convention for finding points when you don't care about the direction.
func (p *Point) PointingRight() DirectionalPoint {
return DirectionalPoint{
Point: p,
Direction: Vector{X: 1, Y: 0},
}
}
// Another convenience function, same concept as PointingRight
func DefaultDirectionalPoint(x, y float64) DirectionalPoint {
return DirectionalPoint{
Point: &Point{X: x, Y: y},
Direction: Vector{X: 1, Y: 0},
}
}
// Often we want to treat an array as a circular buffer. This gives the modular
// index given length n, but unlike the raw modulo operator, it only gives positive values
func CircularIndex(i, n int) int {
return (i%n + n) % n
}
func (s *PointStack) Push(p *Point) {
*s = append(*s, p)
}
func (s *PointStack) Pop() *Point {
if len(*s) == 0 {
return nil
}
p := (*s)[len(*s)-1]
*s = (*s)[:len(*s)-1]
return p
}
func (s *PointStack) Peek() *Point {
if len(*s) == 0 {
return nil
}
return (*s)[len(*s)-1]
}
func (s *PointStack) Empty() bool {
return len(*s) == 0
}
// Several properties can be derived from any structure that can compute its
// signed area.
type HasSignedArea interface {
// Enclosed area of the structure, positive if counterclockwise, negative if clockwise.
SignedArea() float64
}
func (t *Triangle) SignedArea() float64 {
return ((t.A.X*t.B.Y - t.B.X*t.A.Y) +
(t.B.X*t.C.Y - t.C.X*t.B.Y) +
(t.C.X*t.A.Y - t.A.X*t.C.Y)) / 2
}
func (poly *Polygon) SignedArea() float64 {
area := 0.0
n := len(poly.Points)
for i := 0; i < n; i++ {
nextI := (i + 1) % n
area += poly.Points[i].X*poly.Points[nextI].Y - poly.Points[nextI].X*poly.Points[i].Y
}
return area / 2
}
func Area(s HasSignedArea) float64 {
return math.Abs(s.SignedArea())
}
func IsCCW(s HasSignedArea) bool {
return s.SignedArea() > 0
}
func IsCW(s HasSignedArea) bool {
return s.SignedArea() < 0
}
func (ps PointSet) Contains(p *Point) bool {
_, ok := ps[p]
return ok
}
func (ps PointSet) Add(p *Point) {
ps[p] = struct{}{}
}
func (ps PointSet) Equals(otherSet PointSet) bool {
if len(ps) != len(otherSet) {
return false
}
for p := range ps {
if !otherSet.Contains(p) {
return false
}
}
return true
}
// String functions
func (p *Point) String() string {
return fmt.Sprintf("{%0.2f, %0.2f}", p.X, p.Y)
}
// A segment points down if its start point is above its endpoint
func (s *Segment) PointsDown() bool {
return s.End.Below(s.Start)
}
// Is the line segment left of p. This assumes that P is vertically between the start and end of the segment
func (s *Segment) IsLeftOf(p *Point) bool {
if s == nil {
return true
}
// Handle horizontal case
if Equal(s.Start.Y, s.End.Y) {
return LessThan(s.Bottom().X, p.X)
}
if s.Start == p || s.End == p {
return false
}
x := s.SolveForX(p.Y)
return LessThan(x, p.X)
}
func (s *Segment) IsRightOf(p *Point) bool {
if s == nil {
return true
}
// Handle horizontal case
if Equal(s.Start.Y, s.End.Y) {
return GreaterThan(s.Top().X, p.X)
}
if s.Start == p || s.End == p {
return false
}
x := s.SolveForX(p.Y)
return GreaterThan(x, p.X)
}
// Determine which direction the segment points from top to bottom
/*
o
/ <- Left
o
o
\ <- Right
o
*/
func (s *Segment) XDirection() XDirection {
top := s.Top()
bottom := s.Bottom()
if top.X > bottom.X {
return Left
} else {
return Right
}
}
func (s *Segment) Top() *Point {
if s == nil {
return nil
}
if s.PointsDown() {
return s.Start
}
return s.End
}
func (s *Segment) Bottom() *Point {
if s == nil {
return nil
}
if s.PointsDown() {
return s.End
}
return s.Start
}
func (dir XDirection) Opposite() XDirection {
return dir ^ 1
}
func (dir YDirection) Opposite() YDirection {
return dir ^ 1
}
func (dir Direction) Opposite() Direction {
return Direction{dir.X.Opposite(), dir.Y.Opposite()}
}
func (s *Segment) IsHorizontal() bool {
return Equal(s.Start.Y, s.End.Y)
}
func (s *Segment) IsVertical() bool {
return Equal(s.Start.X, s.End.X)
}
// Solve the line (ignoring the bounds) for the given y value
func (s *Segment) SolveForX(y float64) float64 {
if s.IsHorizontal() {
fatalf("cannot solve for X on a horizontal segment")
}
if s.IsVertical() { // Special case; no slope
return s.Start.X
}
m := (s.End.Y - s.Start.Y) / (s.End.X - s.Start.X)
b := s.Start.Y - m*s.Start.X
return (y - b) / m
}
func (v Vector) Normalize() Vector {
return Vector{
X: v.X / v.Length(),
Y: v.Y / v.Length(),
}
}
func (v Vector) Length() float64 {
return math.Sqrt(v.X*v.X + v.Y*v.Y)
}
// Helper mostly used in tests. Converts the triangles into generic polygons.
func (triangles TriangleList) ToPolygonList() PolygonList {
polyList := make(PolygonList, len(triangles))
for i, tri := range triangles {
polyList[i] = Polygon{Points: []*Point{tri.A, tri.B, tri.C}}
}
return polyList
} | internal/util.go | 0.870831 | 0.639427 | util.go | starcoder |
package techan
import "github.com/sdcoffey/big"
// Rule is an interface describing an algorithm by which a set of criteria may be satisfied
type Rule interface {
IsSatisfied(index int, record *TradingRecord) bool
}
// And returns a new rule whereby BOTH of the passed-in rules must be satisfied for the rule to be satisfied
func And(r1, r2 Rule) Rule {
return andRule{r1, r2}
}
// Or returns a new rule whereby ONE OF the passed-in rules must be satisfied for the rule to be satisfied
func Or(r1, r2 Rule) Rule {
return orRule{r1, r2}
}
type andRule struct {
r1 Rule
r2 Rule
}
func (ar andRule) IsSatisfied(index int, record *TradingRecord) bool {
return ar.r1.IsSatisfied(index, record) && ar.r2.IsSatisfied(index, record)
}
type orRule struct {
r1 Rule
r2 Rule
}
func (or orRule) IsSatisfied(index int, record *TradingRecord) bool {
return or.r1.IsSatisfied(index, record) || or.r2.IsSatisfied(index, record)
}
// OverIndicatorRule is a rule where the First Indicator must be greater than the Second Indicator to be Satisfied
type OverIndicatorRule struct {
First Indicator
Second Indicator
}
// IsSatisfied returns true when the First Indicator is greater than the Second Indicator
func (oir OverIndicatorRule) IsSatisfied(index int, record *TradingRecord) bool {
return oir.First.Calculate(index).GT(oir.Second.Calculate(index))
}
// UnderIndicatorRule is a rule where the First Indicator must be less than the Second Indicator to be Satisfied
type UnderIndicatorRule struct {
First Indicator
Second Indicator
}
// IsSatisfied returns true when the First Indicator is less than the Second Indicator
func (uir UnderIndicatorRule) IsSatisfied(index int, record *TradingRecord) bool {
return uir.First.Calculate(index).LT(uir.Second.Calculate(index))
}
type percentChangeRule struct {
indicator Indicator
percent big.Decimal
}
func (pgr percentChangeRule) IsSatisfied(index int, record *TradingRecord) bool {
return pgr.indicator.Calculate(index).Abs().GT(pgr.percent.Abs())
}
// NewPercentChangeRule returns a rule whereby the given Indicator must have changed by a given percentage to be satisfied.
// You should specify percent as a float value between -1 and 1
func NewPercentChangeRule(indicator Indicator, percent float64) Rule {
return percentChangeRule{
indicator: NewPercentChangeIndicator(indicator),
percent: big.NewDecimal(percent),
}
} | rule.go | 0.810066 | 0.510069 | rule.go | starcoder |
package main
import (
"fmt"
"github.com/theatlasroom/advent-of-code/go/2019/utils"
"math"
"strconv"
)
/**
--- Day 1: The Tyranny of the Rocket Equation ---
Santa has become stranded at the edge of the Solar System while delivering presents to other planets!
To accurately calculate his position in space, safely align his warp drive,
and return to Earth in time to save Christmas, he needs you to bring him measurements from fifty stars.
Collect stars by solving puzzles. Two puzzles will be made available on each day in the Advent calendar;
the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
The Elves quickly load you into a spacecraft and prepare to launch.
At the first Go / No Go poll, every Elf is Go until the Fuel Counter-Upper.
They haven't determined the amount of fuel required yet.
Fuel required to launch a given module is based on its mass. Specifically,
to find the fuel required for a module, take its mass, divide by three, round down, and subtract 2.
For example:
For a mass of 12, divide by 3 and round down to get 4, then subtract 2 to get 2.
For a mass of 14, dividing by 3 and rounding down still yields 4, so the fuel required is also 2.
For a mass of 1969, the fuel required is 654.
For a mass of 100756, the fuel required is 33583.
The Fuel Counter-Upper needs to know the total fuel requirement. To find it,
individually calculate the fuel needed for the mass of each module (your puzzle input),
then add together all the fuel values.
What is the sum of the fuel requirements for all of the modules on your spacecraft?
--- Part Two ---
During the second Go / No Go poll, the Elf in charge of the Rocket Equation Double-Checker stops the launch sequence. Apparently, you forgot to include additional fuel for the fuel you just added.
Fuel itself requires fuel just like a module - take its mass, divide by three, round down, and subtract 2. However, that fuel also requires fuel, and that fuel requires fuel, and so on. Any mass that would require negative fuel should instead be treated as if it requires zero fuel; the remaining mass, if any, is instead handled by wishing really hard, which has no mass and is outside the scope of this calculation.
So, for each module mass, calculate its fuel and add it to the total. Then, treat the fuel amount you just calculated as the input mass and repeat the process, continuing until a fuel requirement is zero or negative. For example:
A module of mass 14 requires 2 fuel. This fuel requires no further fuel (2 divided by 3 and rounded down is 0, which would call for a negative fuel), so the total fuel required is still just 2.
At first, a module of mass 1969 requires 654 fuel. Then, this fuel requires 216 more fuel (654 / 3 - 2). 216 then requires 70 more fuel, which requires 21 fuel, which requires 5 fuel, which requires no further fuel. So, the total fuel required for a module of mass 1969 is 654 + 216 + 70 + 21 + 5 = 966.
The fuel required by a module of mass 100756 and its fuel is: 33583 + 11192 + 3728 + 1240 + 411 + 135 + 43 + 12 + 2 = 50346.
What is the sum of the fuel requirements for all of the modules on your spacecraft when also taking into account the mass of the added fuel?
(Calculate the fuel requirements for each module separately, then add them all up at the end.)
*/
const divisor = 3
const offset = 2
func hasFuel(mass float64) bool {
return (mass - divisor - offset) > 0
}
func calculateFuel(mass float64) float64 {
if hasFuel(mass) {
return math.Floor(mass/divisor) - offset
}
return 0
}
func sumOfFuel(mass float64) float64 {
sum := 0.0
for mass > 0 {
mass = calculateFuel(mass)
sum += mass
}
return sum
}
func main() {
var fuel, fuelSum float64
data := utils.LoadData("1.txt")
utils.Banner(1)
for _, str := range data {
mass, _ := strconv.ParseFloat(str, 64)
fuel += calculateFuel(mass)
fuelSum += sumOfFuel(mass)
}
fmt.Println("Fuel required", int(fuel))
fmt.Println("Sum of fuel required", int(fuelSum))
} | go/2019/2019_1.go | 0.661704 | 0.749866 | 2019_1.go | starcoder |
package composite
import (
"github.com/jrife/flock/storage/kv"
"github.com/jrife/flock/storage/kv/keys"
composite_keys "github.com/jrife/flock/storage/kv/keys/composite"
)
// NamespaceMapReader returns a namespaced map reader
func NamespaceMapReader(mr MapReader, ns [][]byte) MapReader {
return &namespacedMap{ns: ns, reader: mr}
}
// NamespaceMapUpdater returns a namespaced map updater
func NamespaceMapUpdater(mu MapUpdater, ns [][]byte) MapUpdater {
return &namespacedMap{ns: ns, updater: mu}
}
// NamespaceMap returns a namespaced map
func NamespaceMap(m Map, ns [][]byte) Map {
return &namespacedMap{ns: ns, reader: m, updater: m}
}
// Namespace ensures that all keys referenced within a transaction
// are prefixed with ns.
func Namespace(txn Transaction, ns [][]byte) Transaction {
return &namespacedTxn{Map: NamespaceMap(txn, ns), txn: txn}
}
type namespacedMap struct {
ns composite_keys.Key
reader MapReader
updater MapUpdater
}
func (nsMap *namespacedMap) key(key composite_keys.Key) composite_keys.Key {
k := make(composite_keys.Key, 0, len(nsMap.ns)+len(key))
k = append(k, nsMap.ns...)
k = append(k, key...)
return k
}
func (nsMap *namespacedMap) Put(key composite_keys.Key, value []byte) error {
return nsMap.updater.Put(nsMap.key(key), value)
}
func (nsMap *namespacedMap) Delete(key composite_keys.Key) error {
return nsMap.updater.Delete(nsMap.key(key))
}
func (nsMap *namespacedMap) Get(key composite_keys.Key) ([]byte, error) {
return nsMap.reader.Get(nsMap.key(key))
}
func (nsMap *namespacedMap) Keys(keyRange composite_keys.Range, order kv.SortOrder) (Iterator, error) {
if order != kv.SortOrderAsc && order != kv.SortOrderDesc {
order = kv.SortOrderAsc
}
nsKeyRange := composite_keys.Range{}
for _, e := range nsMap.ns {
nsKeyRange = append(nsKeyRange, keys.All().Eq(e))
}
nsKeyRange = append(nsKeyRange, keyRange...)
iterator, err := nsMap.reader.Keys(nsKeyRange, order)
if err != nil {
return nil, err
}
return &namespacedIterator{iterator: iterator, ns: nsMap.ns}, nil
}
type namespacedIterator struct {
iterator Iterator
key composite_keys.Key
ns composite_keys.Key
}
func (nsIter *namespacedIterator) Next() bool {
if !nsIter.iterator.Next() {
nsIter.key = nil
return false
}
// strip the namespace prefix
nsIter.key = nsIter.iterator.Key()[len(nsIter.ns):]
return true
}
func (nsIter *namespacedIterator) Key() composite_keys.Key {
return nsIter.key
}
func (nsIter *namespacedIterator) Value() []byte {
return nsIter.iterator.Value()
}
func (nsIter *namespacedIterator) Error() error {
return nsIter.iterator.Error()
}
type namespacedTxn struct {
Map
txn Transaction
}
func (nsTxn *namespacedTxn) Metadata() ([]byte, error) {
return nsTxn.txn.Metadata()
}
func (nsTxn *namespacedTxn) SetMetadata(metadata []byte) error {
return nsTxn.txn.SetMetadata(metadata)
}
func (nsTxn *namespacedTxn) Commit() error {
return nsTxn.txn.Commit()
}
func (nsTxn *namespacedTxn) Rollback() error {
return nsTxn.txn.Rollback()
} | storage/kv/composite/namespace.go | 0.831006 | 0.506286 | namespace.go | starcoder |
package bitset
import "strings"
const bitsPerWord uint64 = 6
const numOfBits uint64 = 64
// Bitset represents a fixed size sequence of N bits.
type Bitset struct {
set []uint64
size uint64
trueCount uint64
}
// New given the desired number of bits, returns a new instance of a bitset.
func New(bits uint64) *Bitset {
n := (bits >> bitsPerWord) + 1
return &Bitset{set: make([]uint64, n), size: n * numOfBits}
}
// wordIndex given a position calculates the index in the bitset.
func (b *Bitset) wordIndex(p uint64) uint64 {
return (p >> bitsPerWord)
}
// posIndex given a position and a word index, locates the target bit in that
// word index.
func (b *Bitset) posIndex(p, n uint64) uint64 {
return (p - (n * numOfBits))
}
// checkBounds determines if 'n' is greater than the number of bits in the
// bit set. if it is, the method panics.
func (b *Bitset) checkBounds(n uint64) {
if n > b.size {
panic("index out of bounds")
}
}
// Size returns the number of bits that the bitset can hold. The size is a
// a multiple of 64.
func (b *Bitset) Size() uint64 {
return b.size
}
// Set sets the bit at a given position to 1 (true), the method panics if the
// postion to set, is greater than the number of bits in the bitset.
func (b *Bitset) Set(p uint64) {
b.checkBounds(p)
n := b.wordIndex(p)
b.set[n] |= (1 << b.posIndex(p, n))
b.trueCount++
}
// Reset sets the bit at a given position to 0 (false), the method panics if
// the postion to reset, is greater than the number of bits in the bitset.
func (b *Bitset) Reset(p uint64) {
b.checkBounds(p)
n := b.wordIndex(p)
if b.Test(p) {
b.set[n] &= ^(1 << b.posIndex(p, n))
b.trueCount--
}
}
// Test returns the value of a bit at a given position. true if the bit is set
// to 1, or false if the bit is set to 0. The method panics if the postion to
// test, is greater than the number of bits in the bitset.
func (b *Bitset) Test(p uint64) bool {
b.checkBounds(p)
n := b.wordIndex(p)
t := b.set[n] & (1 << b.posIndex(p, n))
if t == 0 {
return false
}
return true
}
// Flip inverts the value of a bit at a given position. the method panics if
// the postion to flip, is greater than the number of bits in the bitset.
func (b *Bitset) Flip(p uint64) {
b.checkBounds(p)
n := b.wordIndex(p)
b.set[n] ^= (1 << b.posIndex(p, n))
if b.Test(p) {
b.trueCount++
} else {
b.trueCount--
}
}
// All tests if all the bits in the bitset are set to true.
func (b *Bitset) All() bool {
var i uint64
for ; i < b.size; i++ {
if !b.Test(i) {
return false
}
}
return true
}
// Any tests if any of the bits in the bitset are set to true.
func (b *Bitset) Any() bool {
var i uint64
for ; i < b.size; i++ {
if b.Test(i) {
return true
}
}
return false
}
// None tests if none of the bits in the bitset are set to true.
func (b *Bitset) None() bool {
var i uint64
for ; i < b.size; i++ {
if b.Test(i) {
return false
}
}
return true
}
// TrueCount returns the number of bits in the bitset set to 1 (true).
func (b *Bitset) TrueCount() uint64 {
return b.trueCount
}
// Copy makes this bitset an exact independant copy of the argument bitset.
func (b *Bitset) Copy(a *Bitset) {
b.set = nil
b.set = make([]uint64, len(a.set))
b.size = a.size
b.trueCount = a.trueCount
copy(b.set, a.set)
}
// String returns a string representation of the bitset. The length of the
// string will be equal to bitset.Size(). Matches the Stringer interface.
func (b *Bitset) String() string {
n := ((b.size - 1) >> bitsPerWord) + 1
s := make([]string, n*numOfBits)
var i uint64
var j uint64
for ; i < n; i++ {
x := b.set[i]
for j = 0; j < 64; j++ {
if (x & (1 << j)) > 0 {
s = append(s, "1")
} else {
s = append(s, "0")
}
}
}
return strings.Join(s, "")
} | bitset.go | 0.76769 | 0.590514 | bitset.go | starcoder |
package iso20022
// Security that is a sub-set of an investment fund, and is governed by the same investment fund policy, eg, dividend option or valuation currency.
type SecurityIdentification1 struct {
// Identification of a security by an ISIN.
Identification *SecurityIdentification7 `xml:"Id"`
// Name of the financial instrument in free format text.
Name *Max350Text `xml:"Nm"`
// Features of units offered by a fund. For example, a unit may have a specific load structure, eg, front end or back end, an income policy, eg, pay out or accumulate, or a trailer policy, eg, with or without. Fund classes are typically denoted by a single character, eg, 'Class A', 'Class 2'.
ClassType *Max35Text `xml:"ClssTp,omitempty"`
// Name of the umbrella fund in which financial instrument is contained.
UmbrellaName *Max35Text `xml:"UmbrllNm,omitempty"`
// Currency of the investment fund class.
BaseCurrency *ActiveCurrencyCode `xml:"BaseCcy"`
// Country where the fund has legal domicile as reflected in the ISIN classification.
CountryOfDomicile *CountryCode `xml:"CtryOfDmcl"`
// Countries where the fund is registered for distribution.
RegisteredDistributionCountry []*CountryCode `xml:"RegdDstrbtnCtry"`
}
func (s *SecurityIdentification1) AddIdentification() *SecurityIdentification7 {
s.Identification = new(SecurityIdentification7)
return s.Identification
}
func (s *SecurityIdentification1) SetName(value string) {
s.Name = (*Max350Text)(&value)
}
func (s *SecurityIdentification1) SetClassType(value string) {
s.ClassType = (*Max35Text)(&value)
}
func (s *SecurityIdentification1) SetUmbrellaName(value string) {
s.UmbrellaName = (*Max35Text)(&value)
}
func (s *SecurityIdentification1) SetBaseCurrency(value string) {
s.BaseCurrency = (*ActiveCurrencyCode)(&value)
}
func (s *SecurityIdentification1) SetCountryOfDomicile(value string) {
s.CountryOfDomicile = (*CountryCode)(&value)
}
func (s *SecurityIdentification1) AddRegisteredDistributionCountry(value string) {
s.RegisteredDistributionCountry = append(s.RegisteredDistributionCountry, (*CountryCode)(&value))
} | SecurityIdentification1.go | 0.810816 | 0.400984 | SecurityIdentification1.go | starcoder |
package ityped
import (
"github.com/mg/i"
"github.com/mg/i/itk"
)
// Typed adapter for a Forward iterator. Contains methods to acccess the
// value typed as any of the basic Go types.
type ForwardItr struct {
itk.WForward
}
// Wrap a Forward iterator in a structure that has typed methods to access
// the value of the iterator.
func Forward(itr i.Forward) *ForwardItr {
f := ForwardItr{}
f.WForward = *(itk.WrapForward(itr))
return &f
}
// Typed adapter for a Bounded At start iterator. Contains methods to acccess the
// value typed as any of the basic Go types.
type BoundedAtStartItr struct {
itk.WBoundedAtStart
}
// Wrap a Bounded At Start iterator in a structure that has typed methods to access
// the value of the iterator.
func BoundedAtStart(itr i.BoundedAtStart) *BoundedAtStartItr {
b := BoundedAtStartItr{}
b.WBoundedAtStart = *(itk.WrapBoundedAtStart(itr))
return &b
}
// Typed adapter for a BiDirecitonal iterator. Contains methods to acccess the
// value typed as any of the basic Go types.
type BiDirectionalItr struct {
itk.WBiDirectional
}
// Wrap a BiDirectional iterator in a structure that has typed methods to access
// the value of the iterator.
func BiDirectional(itr i.BiDirectional) *BiDirectionalItr {
b := BiDirectionalItr{}
b.WBiDirectional = *(itk.WrapBiDirectional(itr))
return &b
}
// Typed adapter for a Bounded iterator. Contains methods to acccess the
// value typed as any of the basic Go types.
type BoundedItr struct {
itk.WBounded
}
// Wrap a Bounded iterator in a structure that has typed methods to access
// the value of the iterator.
func Bounded(itr i.Bounded) *BoundedItr {
b := BoundedItr{}
b.WBounded = *(itk.WrapBounded(itr))
return &b
}
// Typed adapter for a Random Access iterator. Contains methods to acccess the
// value typed as any of the basic Go types.
type RandomAccessItr struct {
itk.WRandomAccess
}
// Wrap a Random Access iterator in a structure that has typed methods to access
// the value of the iterator.
func RandomAccess(itr i.RandomAccess) *RandomAccessItr {
r := RandomAccessItr{}
r.WRandomAccess = *(itk.WrapRandomAccess(itr))
return &r
} | ityped/adapt.go | 0.642432 | 0.475971 | adapt.go | starcoder |
package unityai
import (
"unsafe"
)
type NavMeshNodeFlags uint8
const (
kNew NavMeshNodeFlags = 0x00
kOpen NavMeshNodeFlags = 0x01
kClosed NavMeshNodeFlags = 0x02
)
type NavMeshNode struct {
pos Vector3f // Position of the node.
cost float32 // Cost from previous node to current node.
total float32 // Cost up to the node.
pidx uint32 // Index to parent node.
flags NavMeshNodeFlags // NavMeshNode flags new/open/closed.
id NavMeshPolyRef // Polygon ref the node corresponds to.
}
type NavMeshNodeIndex uint16
const (
kNavMeshNodeNullIndex NavMeshNodeIndex = 0xffff
)
type NavMeshNodePool struct {
m_MaxNavMeshNodes int32
m_HashSize int32
m_NavMeshNodeCount int32
m_NavMeshNodes []NavMeshNode
m_First []NavMeshNodeIndex
m_Next []NavMeshNodeIndex
}
var navMeshNodeSize = unsafe.Sizeof(NavMeshNode{})
func GetNavMeshNodeIndex(start, end *NavMeshNode) int32 {
nstart := uintptr(unsafe.Pointer(start))
nend := uintptr(unsafe.Pointer(end))
return int32((nend - nstart) / navMeshNodeSize)
}
func (this *NavMeshNodePool) GetNodeIdx(node *NavMeshNode) uint32 {
if node == nil {
return 0
}
idx := GetNavMeshNodeIndex(&this.m_NavMeshNodes[0], node) + 1
Assert(idx <= this.m_MaxNavMeshNodes)
return uint32(idx)
}
func (this *NavMeshNodePool) GetNodeAtIdx(idx uint32) *NavMeshNode {
Assert(idx <= uint32(this.m_MaxNavMeshNodes))
if idx == 0 {
return nil
}
return &this.m_NavMeshNodes[idx-1]
}
func (this *NavMeshNodePool) GetHashSize() int32 {
return this.m_HashSize
}
func (this *NavMeshNodePool) GetFirst(bucket int32) NavMeshNodeIndex {
return this.m_First[bucket]
}
func (this *NavMeshNodePool) GetNext(i int32) NavMeshNodeIndex {
return this.m_Next[i]
}
type NavMeshNodeQueue struct {
m_Heap []*NavMeshNode
m_Size int32
}
func (this *NavMeshNodeQueue) empty() bool {
return this.m_Size == 0
}
func (this *NavMeshNodeQueue) Clear() {
this.m_Size = 0
}
func (this *NavMeshNodeQueue) Pop() *NavMeshNode {
result := this.m_Heap[0]
this.m_Size--
this.TrickleDown(0, this.m_Heap[this.m_Size])
return result
}
func (this *NavMeshNodeQueue) Push(node *NavMeshNode) {
this.m_Size++
this.BubbleUp(this.m_Size-1, node)
}
func (this *NavMeshNodeQueue) Modify(node *NavMeshNode) {
for i := int32(0); i < this.m_Size; i++ {
if this.m_Heap[i] == node {
this.BubbleUp(i, node)
return
}
}
} | nav_mesh_node.go | 0.604749 | 0.427397 | nav_mesh_node.go | starcoder |
package data
// A FrameType string, when present in a frame's metadata, asserts that the
// frame's structure conforms to the FrameType's specification.
// This property is currently optional, so FrameType may be FrameTypeUnknown even if the properties of
// the Frame correspond to a defined FrameType.
type FrameType string
const (
// FrameTypeUnknown indicates that we do not know the field type
FrameTypeUnknown FrameType = ""
// FrameTypeTimeSeriesWide has at least two fields:
// field[0]:
// * type time
// * unique ascending values
// field[1..n]:
// * distinct labels may be attached to each field
// * numeric & boolean fields can be drawn as lines on a graph
// See https://grafana.com/docs/grafana/latest/developers/plugins/data-frames/#wide-format
FrameTypeTimeSeriesWide = "timeseries-wide"
// FrameTypeTimeSeriesLong uses string fields to define dimensions. I has at least two fields:
// field[0]:
// * type time
// * ascending values
// * duplicate times exist for multiple dimensions
// field[1..n]:
// * string fields define series dimensions
// * non-string fields define the series progression
// See https://grafana.com/docs/grafana/latest/developers/plugins/data-frames/#long-format
FrameTypeTimeSeriesLong = "timeseries-long"
// FrameTypeTimeSeriesMany is the same as "Wide" with exactly one numeric value field
// field[0]:
// * type time
// * ascending values
// field[1]:
// * number field
// * labels represent the series dimensions
// This structure is typically part of a list of frames with the same structure
FrameTypeTimeSeriesMany = "timeseries-many"
// Soon?
// "timeseries-wide-ohlc" -- known fields for open/high/low/close
// "histogram" -- BucketMin, BucketMax, values...
// "trace" -- ??
// "node-graph-nodes"
// "node-graph-edges"
// FrameTypeDirectoryListing represents the items in a directory
// field[0]:
// * name
// * new paths can be constructed from the parent path + separator + name
// field[1]:
// * media-type
// * when "directory" it can be nested
FrameTypeDirectoryListing = "directory-listing"
// FrameTypeTable represents an arbitrary table structure with no constraints
FrameTypeTable = "table"
// FrameTypeTableFooter may exist next to FrameTypeTable and represent data that
// should show up in the footer section. It must have the same width, but not
// necessarily same field types as the sibling table data
FrameTypeTableFooter = "table-footer"
)
// IsKnownType checks if the value is a known structure
func (p FrameType) IsKnownType() bool {
switch p {
case
FrameTypeTimeSeriesWide,
FrameTypeTimeSeriesLong,
FrameTypeTimeSeriesMany:
return true
}
return false
}
// FrameTypes returns a slice of all known frame types
func FrameTypes() []FrameType {
return []FrameType{
FrameTypeTimeSeriesWide,
FrameTypeTimeSeriesLong,
FrameTypeTimeSeriesMany,
}
}
// IsTimeSeries checks if the type represents a timeseries
func (p FrameType) IsTimeSeries() bool {
switch p {
case
FrameTypeTimeSeriesWide,
FrameTypeTimeSeriesLong,
FrameTypeTimeSeriesMany:
return true
}
return false
} | data/frame_type.go | 0.780704 | 0.537223 | frame_type.go | starcoder |
package v1
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Creates and returns a new ReferenceImage resource. The `bounding_poly` field is optional. If `bounding_poly` is not specified, the system will try to detect regions of interest in the image that are compatible with the product_category on the parent product. If it is specified, detection is ALWAYS skipped. The system converts polygons into non-rotated rectangles. Note that the pipeline will resize the image if the image resolution is too large to process (above 50MP). Possible errors: * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 characters. * Returns INVALID_ARGUMENT if the product does not exist. * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing compatible with the parent product's product_category is detected. * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons.
type ReferenceImage struct {
pulumi.CustomResourceState
// Optional. Bounding polygons around the areas of interest in the reference image. If this field is empty, the system will try to detect regions of interest. At most 10 bounding polygons will be used. The provided shape is converted into a non-rotated rectangle. Once converted, the small edge of the rectangle must be greater than or equal to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 is not).
BoundingPolys BoundingPolyResponseArrayOutput `pulumi:"boundingPolys"`
// The resource name of the reference image. Format is: `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. This field is ignored when creating a reference image.
Name pulumi.StringOutput `pulumi:"name"`
// The Google Cloud Storage URI of the reference image. The URI must start with `gs://`.
Uri pulumi.StringOutput `pulumi:"uri"`
}
// NewReferenceImage registers a new resource with the given unique name, arguments, and options.
func NewReferenceImage(ctx *pulumi.Context,
name string, args *ReferenceImageArgs, opts ...pulumi.ResourceOption) (*ReferenceImage, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ProductId == nil {
return nil, errors.New("invalid value for required argument 'ProductId'")
}
if args.Uri == nil {
return nil, errors.New("invalid value for required argument 'Uri'")
}
var resource ReferenceImage
err := ctx.RegisterResource("google-native:vision/v1:ReferenceImage", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetReferenceImage gets an existing ReferenceImage resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetReferenceImage(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *ReferenceImageState, opts ...pulumi.ResourceOption) (*ReferenceImage, error) {
var resource ReferenceImage
err := ctx.ReadResource("google-native:vision/v1:ReferenceImage", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering ReferenceImage resources.
type referenceImageState struct {
}
type ReferenceImageState struct {
}
func (ReferenceImageState) ElementType() reflect.Type {
return reflect.TypeOf((*referenceImageState)(nil)).Elem()
}
type referenceImageArgs struct {
// Optional. Bounding polygons around the areas of interest in the reference image. If this field is empty, the system will try to detect regions of interest. At most 10 bounding polygons will be used. The provided shape is converted into a non-rotated rectangle. Once converted, the small edge of the rectangle must be greater than or equal to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 is not).
BoundingPolys []BoundingPoly `pulumi:"boundingPolys"`
Location *string `pulumi:"location"`
// The resource name of the reference image. Format is: `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. This field is ignored when creating a reference image.
Name *string `pulumi:"name"`
ProductId string `pulumi:"productId"`
Project *string `pulumi:"project"`
ReferenceImageId *string `pulumi:"referenceImageId"`
// The Google Cloud Storage URI of the reference image. The URI must start with `gs://`.
Uri string `pulumi:"uri"`
}
// The set of arguments for constructing a ReferenceImage resource.
type ReferenceImageArgs struct {
// Optional. Bounding polygons around the areas of interest in the reference image. If this field is empty, the system will try to detect regions of interest. At most 10 bounding polygons will be used. The provided shape is converted into a non-rotated rectangle. Once converted, the small edge of the rectangle must be greater than or equal to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 is not).
BoundingPolys BoundingPolyArrayInput
Location pulumi.StringPtrInput
// The resource name of the reference image. Format is: `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. This field is ignored when creating a reference image.
Name pulumi.StringPtrInput
ProductId pulumi.StringInput
Project pulumi.StringPtrInput
ReferenceImageId pulumi.StringPtrInput
// The Google Cloud Storage URI of the reference image. The URI must start with `gs://`.
Uri pulumi.StringInput
}
func (ReferenceImageArgs) ElementType() reflect.Type {
return reflect.TypeOf((*referenceImageArgs)(nil)).Elem()
}
type ReferenceImageInput interface {
pulumi.Input
ToReferenceImageOutput() ReferenceImageOutput
ToReferenceImageOutputWithContext(ctx context.Context) ReferenceImageOutput
}
func (*ReferenceImage) ElementType() reflect.Type {
return reflect.TypeOf((**ReferenceImage)(nil)).Elem()
}
func (i *ReferenceImage) ToReferenceImageOutput() ReferenceImageOutput {
return i.ToReferenceImageOutputWithContext(context.Background())
}
func (i *ReferenceImage) ToReferenceImageOutputWithContext(ctx context.Context) ReferenceImageOutput {
return pulumi.ToOutputWithContext(ctx, i).(ReferenceImageOutput)
}
type ReferenceImageOutput struct{ *pulumi.OutputState }
func (ReferenceImageOutput) ElementType() reflect.Type {
return reflect.TypeOf((**ReferenceImage)(nil)).Elem()
}
func (o ReferenceImageOutput) ToReferenceImageOutput() ReferenceImageOutput {
return o
}
func (o ReferenceImageOutput) ToReferenceImageOutputWithContext(ctx context.Context) ReferenceImageOutput {
return o
}
func init() {
pulumi.RegisterInputType(reflect.TypeOf((*ReferenceImageInput)(nil)).Elem(), &ReferenceImage{})
pulumi.RegisterOutputType(ReferenceImageOutput{})
} | sdk/go/google/vision/v1/referenceImage.go | 0.849113 | 0.41834 | referenceImage.go | starcoder |
package colors
import "bytes"
// Returns a string wrapped in ANSI start/stop codes for bold
func Bold(in string) string {
return stylize("\x1b[1m", in, "\x1b[22m")
}
// Returns a string wrapped in ANSI start/stop codes for italic
func Italic(in string) string {
return stylize("\x1b[3m", in, "\x1b[23m")
}
// Returns a string wrapped in ANSI start/stop codes for underline
func Underline(in string) string {
return stylize("\x1b[4m", in, "\x1b[24m")
}
// Returns a string wrapped in ANSI start/stop codes to reverse the foreground and background colors
func Inverse(in string) string {
return stylize("\x1b[7m", in, "\x1b[27m")
}
// Returns a string wrapped in ANSI start/stop codes for strikethrough text
func Strikethrough(in string) string {
return stylize("\x1b[9m", in, "\x1b[29m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground black
func Black(in string) string {
return stylize("\x1b[30m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground red
func Red(in string) string {
return stylize("\x1b[31m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground green
func Green(in string) string {
return stylize("\x1b[32m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground yellow
func Yellow(in string) string {
return stylize("\x1b[33m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground blue
func Blue(in string) string {
return stylize("\x1b[34m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground magenta
func Magenta(in string) string {
return stylize("\x1b[35m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground cyan
func Cyan(in string) string {
return stylize("\x1b[36m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for foreground white
func White(in string) string {
return stylize("\x1b[37m", in, "\x1b[39m")
}
// Returns a string wrapped in ANSI start/stop codes for background black
func BgBlack(in string) string {
return stylize("\x1b[40m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background red
func BgRed(in string) string {
return stylize("\x1b[41m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background green
func BgGreen(in string) string {
return stylize("\x1b[42m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background yellow
func BgYellow(in string) string {
return stylize("\x1b[43m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background blue
func BgBlue(in string) string {
return stylize("\x1b[44m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background magenta
func BgMagenta(in string) string {
return stylize("\x1b[45m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background cyan
func BgCyan(in string) string {
return stylize("\x1b[46m", in, "\x1b[49m")
}
// Returns a string wrapped in ANSI start/stop codes for background white
func BgWhite(in string) string {
return stylize("\x1b[47m", in, "\x1b[49m")
}
// Wraps string in provided ANSI codes
func stylize(style string, text string, closing string) string {
var buffer bytes.Buffer
buffer.WriteString(style)
buffer.WriteString(text)
buffer.WriteString(closing)
return buffer.String()
} | colors.go | 0.835249 | 0.437403 | colors.go | starcoder |
package bigint
import (
"math/big"
)
var zero = big.NewInt(0)
const (
cmpLt int = -1
cmpEq int = 0
cmpGt int = 1
cmpNil int = -42
)
// Add returns the result of adding the values of params x and y.
// Notes:
// - If x == nil && y == nil, returns nil.
// - If x != nil && y == nil, returns x.
// - If x == nil && y != nil, returns y.
func Add(x, y *big.Int) *big.Int {
if x == nil {
if y != nil {
return y
}
return nil
} else if y == nil {
if x != nil {
return x
}
return nil
}
return newBigInt().Add(x, y)
}
// Sub returns the result of subtracting the value of param y from the value of param x.
// Notes:
// - If x == nil, returns nil.
// - If x != nil && y == nil, returns x.
func Sub(x, y *big.Int) *big.Int {
if x == nil {
return nil
} else if x != nil && y == nil {
return x
}
return newBigInt().Sub(x, y)
}
// Mul returns the result of multiplying the values of params x and y.
// Notes:
// - If x == nil, returns nil.
// - If x != nil && y == nil, returns x.
func Mul(x, y *big.Int) *big.Int {
if x == nil {
return nil
} else if x != nil && y == nil {
return x
}
return newBigInt().Mul(x, y)
}
// Div returns the result of dividing the value of x by the value of y.
// Notes:
// - If x == nil, returns nil.
// - If x != nil && y == nil, returns x.
func Div(x, y *big.Int) *big.Int {
if x == nil {
return nil
} else if x != nil && y == nil {
return x
}
return newBigInt().Quo(x, y)
}
// Cmp wraps the Int.Cmp() function, returning the same values as the function it wraps:
// - -1 if x < y
// - 0 if x == y
// - 1 if x > y
// Note: returns -2 if either x or y == nil.
func Cmp(x, y *big.Int) int {
if x == nil || y == nil {
return cmpNil
}
return x.Cmp(y)
}
// Eq returns true if x == y.
// Returns false if either x or y == nil.
func Eq(x, y *big.Int) bool {
return Cmp(x, y) == cmpEq
}
// Lt returns true if x < y.
// Returns false if either x or y == nil.
func Lt(x, y *big.Int) bool {
return Cmp(x, y) == cmpLt
}
// Lte returns true if x <= y.
// Returns false if either x or y == nil.
func Lte(x, y *big.Int) bool {
return Lt(x, y) || Eq(x, y)
}
// Gt returns true if x > y.
// Returns false if either x or y == nil.
func Gt(x, y *big.Int) bool {
return Cmp(x, y) == cmpGt
}
// Gte returns true if x >= y.
// Returns false if either x or y == nil.
func Gte(x, y *big.Int) bool {
return Gt(x, y) || Eq(x, y)
}
// IsZero returns true if n == 0.
// Returns nil if n == nil.
func IsZero(n *big.Int) bool {
return Eq(zero, n)
}
// FromInt64 returns a new *big.Int with its value set to n.
func FromInt64(n int64) *big.Int {
return newBigInt().SetInt64(n)
}
// FromUint64 returns a new *big.Int with its value set to n.
func FromUint64(n uint64) *big.Int {
return newBigInt().SetUint64(n)
}
func newBigInt() *big.Int {
return new(big.Int)
} | bigint/bigint.go | 0.844249 | 0.702709 | bigint.go | starcoder |
package axes
// label.go contains code that calculates the positions of labels on the axes.
import (
"fmt"
"image"
"github.com/mum4k/termdash/align"
"github.com/mum4k/termdash/private/alignfor"
)
// Label is one text label on an axis.
type Label struct {
// Label content.
Text string
// Position of the label within the canvas.
Pos image.Point
}
// yLabels returns labels that should be placed next to the Y axis.
// The labelWidth is the width of the area from the left-most side of the
// canvas until the Y axis (not including the Y axis). This is the area where
// the labels will be placed and aligned.
// Labels are returned with Y coordinates in ascending order.
// Y coordinates grow down.
func yLabels(graphHeight, labelWidth int, stringLabels []string) ([]*Label, error) {
if min := 2; graphHeight < min {
return nil, fmt.Errorf("cannot place labels on a canvas with height %d, minimum is %d", graphHeight, min)
}
if min := 0; labelWidth < min {
return nil, fmt.Errorf("cannot place labels in label area width %d, minimum is %d", labelWidth, min)
}
var labels []*Label
for row, l := range stringLabels {
label, err := rowLabel(row, l, labelWidth)
if err != nil {
return nil, err
}
labels = append(labels, label)
}
return labels, nil
}
// rowLabel returns one label for the specified row.
// The row is the Y coordinate of the row, Y coordinates grow down.
func rowLabel(row int, label string, labelWidth int) (*Label, error) {
// The area available for the label
ar := image.Rect(0, row, labelWidth, row+1)
pos, err := alignfor.Text(ar, label, align.HorizontalRight, align.VerticalMiddle)
if err != nil {
return nil, fmt.Errorf("unable to align the label value: %v", err)
}
return &Label{
Text: label,
Pos: pos,
}, nil
}
// xLabels returns labels that should be placed under the X axis.
// Labels are returned with X coordinates in ascending order.
// X coordinates grow right.
func xLabels(yEnd image.Point, graphWidth int, stringLabels []string, cellWidth int) ([]*Label, error) {
var ret []*Label
length, index := paddedLabelLength(graphWidth, LongestString(stringLabels), cellWidth)
for x := yEnd.X + 1; x <= graphWidth && index < len(stringLabels); x += length {
ar := image.Rect(x, yEnd.Y, x+length, yEnd.Y+1)
pos, err := alignfor.Text(ar, stringLabels[index], align.HorizontalCenter, align.VerticalMiddle)
if err != nil {
return nil, fmt.Errorf("unable to align the label value: %v", err)
}
l := &Label{
Text: stringLabels[index],
Pos: pos,
}
index += length / cellWidth
ret = append(ret, l)
}
return ret, nil
}
// paddedLabelLength calculates the length of the padded label and
// the column index corresponding to the label.
// For example, the longest label's length is 5, like '12:34', and the cell's width is 3.
// So in order to better display, every three cells will display a label,
// the label belongs to the middle column of the three columns,
// and the padded length is 3*3, which is 9.
func paddedLabelLength(graphWidth, longest, cellWidth int) (l, index int) {
l, index = 0, 0
for i := longest/cellWidth + 1; i < graphWidth/cellWidth; i++ {
if (i*cellWidth-longest)%2 == 0 {
l = i * cellWidth
index = i / 2
break
}
}
return
} | lib/heatmap/axes/label.go | 0.849815 | 0.719334 | label.go | starcoder |
package main
import (
"math"
"sort"
"strconv"
"strings"
)
func Sum(p []int64) (a int64) {
for _, x := range p {
a += x
}
return a
}
func Mean(p []int64) (a float64) {
if len(p) == 0 {
return 0
}
return float64(Sum(p)) / float64(len(p))
}
func Roundf(f float64) string {
return strconv.FormatFloat(RoundFloat(f, 2), 'f', -1, 64)
}
func Median(p []int64) float64 {
if len(p) == 0 {
return 0
}
if len(p)%2 != 0 {
return float64(p[len(p)/2])
}
i := len(p) / 2
return float64(p[i]+p[i-1]) / 2
}
func First(p []int64) int64 {
if len(p) == 0 {
return 0
}
return p[0]
}
func Last(p []int64) int64 {
if len(p) == 0 {
return 0
}
return p[len(p)-1]
}
func PrettyRound(i float64) string {
var suffix string
var x float64
if i >= 1000000000 {
suffix = "B"
x = i / 100000000
} else if i >= 1000000 {
suffix = "M"
x = i / 100000
} else if i >= 1000 {
suffix = "K"
x = i / 100
}
return strconv.FormatFloat(float64(RoundInt(x))/10, 'f', -1, 64) + suffix
}
func Itof(i int64) float64 {
return float64(i)
}
// NiceNum finds a "nice" number approximately equal to x. Round the number if round is true, otherwise take the
// ceiling. Described on Graphics Gems pg. 63
func NiceNum(x float64, round bool) float64 {
exp := int(math.Floor(math.Log10(x)))
f := x / math.Pow10(exp)
var nf int
if round {
if f < 1.5 {
nf = 1
} else if f < 3 {
nf = 2
} else if f < 7 {
nf = 5
} else {
nf = 10
}
} else {
if f <= 1 {
nf = 1
} else if f <= 2 {
nf = 2
} else if f <= 5 {
nf = 5
} else {
nf = 10
}
}
return float64(nf) * math.Pow10(exp)
}
func RoundFloat(x float64, prec int) float64 {
var rounder float64
pow := math.Pow(10, float64(prec))
intermed := x * pow
_, frac := math.Modf(intermed)
x = .5
if frac < 0.0 {
x = -.5
}
if frac >= x {
rounder = math.Ceil(intermed)
} else {
rounder = math.Floor(intermed)
}
return rounder / pow
}
func RoundInt(n float64) int64 {
return int64(RoundFloat(n, 0))
}
func BarHeight(max int64, n int64) int {
return int(RoundInt((100 / float64(max)) * float64(n)))
}
const labelHeight = 20
func BarMarginPadding(n int) int {
if n > 99 {
return 0
}
if n < 20 {
return 100 - n - labelHeight
}
return 100 - n
}
func BarMarginHeight(n int) int {
if n < 20 {
return labelHeight
}
return 0
}
func BarMarginLabel(n int) bool {
if n >= 20 {
return true
}
return false
}
type ValueBucket struct {
Name string
Min int64
}
type ValueBuckets []ValueBucket
func (buckets ValueBuckets) Aggregate(values []int64) BucketedInts {
agg := make(map[string]int64)
incr := func(s string) {
if _, ok := agg[s]; !ok {
agg[s] = 0
}
agg[s] += 1
}
for _, x := range values {
for i, b := range buckets {
if x >= b.Min {
if i < len(buckets)-1 && x >= buckets[i+1].Min {
continue
}
incr(b.Name)
}
}
}
res := BucketedInts{Buckets: make([]BucketedInt, 0, len(agg))}
for _, b := range buckets {
if c, ok := agg[b.Name]; ok {
if c > res.Max {
res.Max = c
}
res.Buckets = append(res.Buckets, BucketedInt{b.Name, c})
}
}
return res
}
func Buckets(names ...string) ValueBuckets {
b := make([]ValueBucket, len(names))
for i, n := range names {
var x, min float64 = 1, 0
j := strings.Index(n, " - ")
if j != -1 && n[j-1] == 'k' {
j -= 1
x = 1000
} else if j != -1 && n[j-1] == 'm' {
j -= 1
x = 1000000
} else if n[len(n)-1] == 'k' {
n = n[:len(n)-1]
x = 1000
} else if n[len(n)-1] == 'm' {
n = n[:len(n)-1]
x = 1000000
}
if j != -1 {
n = n[:j]
}
if n[0] == '<' {
b[i] = ValueBucket{names[i], 0}
continue
} else if n[0] == '>' {
min, _ = strconv.ParseFloat(n[1:], 64)
b[i] = ValueBucket{names[i], int64(min * x)}
continue
}
min, _ = strconv.ParseFloat(n, 64)
b[i] = ValueBucket{names[i], int64(min * x)}
}
return b
}
type IntSlice []int64
func (p IntSlice) Len() int { return len(p) }
func (p IntSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p IntSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p IntSlice) Sort() { sort.Sort(p) } | math.go | 0.605916 | 0.499023 | math.go | starcoder |
// This example is a Go replica of https://github.com/google/or-tools/blob/master/ortools/linear_solver/samples/linear_programming_example.py
//Linear optimization example
package main
import (
"fmt"
"os"
"github.com/baobabsoluciones/ortoolslp"
)
func main() {
//Entry point of the program
//Instantiate a solver, naming it LinearExample.
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverCBC_MIXED_INTEGER_PROGRAMMING)
solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverCLP_LINEAR_PROGRAMMING)
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverGLOP_LINEAR_PROGRAMMING)
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverGLPK_LINEAR_PROGRAMMING)
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverSCIP_MIXED_INTEGER_PROGRAMMING)
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverCPLEX_LINEAR_PROGRAMMING)
// solver := ortoolslp.NewSolver("LinearExample", ortoolslp.SolverGUROBI_LINEAR_PROGRAMMING)
//Create the two variables and let them take on any value.
x := solver.NumVar(0, ortoolslp.SolverInfinity(), "x")
y := solver.NumVar(0, ortoolslp.SolverInfinity(), "y")
//Objective function: Maximize 3x + 4y.
objective := solver.Objective()
objective.SetCoefficient(x, 3)
objective.SetCoefficient(y, 4)
objective.SetMaximization()
//Constraint 0: x + 2y <= 14.
constraint0 := solver.Constraint(-ortoolslp.SolverInfinity(), float64(14))
constraint0.SetCoefficient(x, 1)
constraint0.SetCoefficient(y, 2)
//Constraint 1: 3x - y >= 0.
constraint1 := solver.Constraint(float64(0), ortoolslp.SolverInfinity())
constraint1.SetCoefficient(x, 3)
constraint1.SetCoefficient(y, -1)
//Constraint 2: x - y <= 2.
constraint2 := solver.Constraint(-ortoolslp.SolverInfinity(), float64(2))
constraint2.SetCoefficient(x, 1)
constraint2.SetCoefficient(y, -1)
fmt.Println("Number of variables =", solver.NumVariables())
fmt.Println("Number of constraints =", solver.NumConstraints())
//Solve the system.
status := solver.Solve()
//Check that the problem has an optimal solution.
if status != ortoolslp.SolverOPTIMAL {
fmt.Println("The problem does not have an optimal solution!")
os.Exit(1)
}
fmt.Println("Solution:")
fmt.Println("x =", x.Solution_value())
fmt.Println("y =", y.Solution_value())
fmt.Println("Optimal objective value =", objective.Value())
fmt.Println("")
fmt.Println("Advanced usage:")
fmt.Println("Problem solved in ", solver.Wall_time(), " milliseconds")
fmt.Println("Problem solved in ", solver.Iterations(), " iterations")
fmt.Println("x: reduced cost =", x.Reduced_cost())
fmt.Println("y: reduced cost =", y.Reduced_cost())
activities := solver.ComputeConstraintActivities()
fmt.Println("constraint0: dual value =",
constraint0.Dual_value())
fmt.Println(" activities =",
activities.Get(constraint0.Index()))
fmt.Println("constraint1: dual value =",
constraint1.Dual_value())
fmt.Println(" activities =",
activities.Get(constraint1.Index()))
fmt.Println("constraint2: dual value =",
constraint2.Dual_value())
fmt.Println(" activities =",
activities.Get(constraint2.Index()))
} | examples/LP/linear_programming.go | 0.854703 | 0.434161 | linear_programming.go | starcoder |
package gohome
import (
"github.com/PucklaMotzer09/mathgl/mgl32"
)
// A transform storing everything needed for the transform matrix
type TransformableObject3D struct {
// The position in the world
Position mgl32.Vec3
// The scale the multiplies all vertices
Scale mgl32.Vec3
// The rotation represented as a Quaternion
Rotation mgl32.Quat
oldPosition mgl32.Vec3
oldScale mgl32.Vec3
oldRotation mgl32.Quat
transformMatrix mgl32.Mat4
camNotRelativeMatrix mgl32.Mat4
parent ParentObject3D
parentChannel chan bool
childChannels map[*TransformableObject3D]chan bool
IgnoreParentRotation bool
IgnoreParentScale bool
oldParentTransform mgl32.Mat4
oldIgnoreParentRotation bool
oldIgnoreParentScale bool
}
func (tobj *TransformableObject3D) valuesChanged() bool {
return tobj.Position != tobj.oldPosition || tobj.Scale != tobj.oldScale || tobj.Rotation != tobj.oldRotation || tobj.IgnoreParentRotation != tobj.oldIgnoreParentRotation || tobj.IgnoreParentScale != tobj.oldIgnoreParentScale || tobj.getParentTransform() != tobj.oldParentTransform
}
// Calculates the transform matrix
func (tobj *TransformableObject3D) CalculateTransformMatrix(rmgr *RenderManager, notRelativeToCamera int) {
if tobj.parent != nil && RenderMgr.calculatingTransformMatricesParallel {
<-tobj.parentChannel
}
var cam3d *Camera3D = nil
if rmgr != nil {
if notRelativeToCamera != -1 && len(rmgr.camera3Ds) > notRelativeToCamera {
cam3d = rmgr.camera3Ds[notRelativeToCamera]
}
if cam3d != nil {
cam3d.CalculateViewMatrix()
}
}
if tobj.valuesChanged() {
// T QR S
T := mgl32.Translate3D(tobj.Position[0], tobj.Position[1], tobj.Position[2])
QR := tobj.Rotation.Mat4()
S := mgl32.Scale3D(tobj.Scale[0], tobj.Scale[1], tobj.Scale[2])
tobj.transformMatrix = T.Mul4(QR).Mul4(S)
// Parent
ptransform := tobj.getParentTransform()
tobj.transformMatrix = ptransform.Mul4(tobj.transformMatrix)
tobj.oldPosition = tobj.Position
tobj.oldScale = tobj.Scale
tobj.oldRotation = tobj.Rotation
tobj.oldParentTransform = ptransform
}
if cam3d != nil {
tobj.camNotRelativeMatrix = cam3d.GetInverseViewMatrix().Mul4(tobj.transformMatrix)
} else {
tobj.camNotRelativeMatrix = tobj.transformMatrix
}
if RenderMgr.calculatingTransformMatricesParallel {
if tobj.childChannels != nil {
for _, ch := range tobj.childChannels {
if ch != nil {
ch <- true
}
}
}
}
}
// Returns the transform matrix that represents the transformation of this object
func (tobj *TransformableObject3D) GetTransformMatrix() mgl32.Mat4 {
return tobj.camNotRelativeMatrix
}
// Sets the current transform matrix in the render manager
func (tobj *TransformableObject3D) SetTransformMatrix(rmgr *RenderManager) {
rmgr.setTransformMatrix3D(tobj.GetTransformMatrix())
}
func (tobj *TransformableObject3D) getParentTransform() mgl32.Mat4 {
if tobj.parent != nil {
if ptobj, ok := tobj.parent.(ParentObject3D); ok {
if transform := ptobj.GetTransform3D(); transform != nil {
if !tobj.IgnoreParentRotation && !tobj.IgnoreParentScale {
var nrc int = -1
if ent, ok := tobj.parent.(*Entity3D); ok {
nrc = ent.NotRelativeCamera()
}
if !RenderMgr.calculatingTransformMatricesParallel {
transform.CalculateTransformMatrix(&RenderMgr, nrc)
}
return transform.GetTransformMatrix()
} else {
// T QR S
T := mgl32.Translate3D(transform.Position[0], transform.Position[1], transform.Position[2])
var QR, S mgl32.Mat4
if tobj.IgnoreParentRotation {
QR = mgl32.Ident4()
} else {
QR = transform.Rotation.Mat4()
}
if tobj.IgnoreParentScale {
S = mgl32.Ident4()
} else {
S = mgl32.Scale3D(transform.Scale[0], transform.Scale[1], transform.Scale[2])
}
pmat := transform.getParentTransform().Mul4(T.Mul4(QR).Mul4(S))
return pmat
}
}
}
}
return mgl32.Ident4()
}
// The position based on the parent transform
func (tobj *TransformableObject3D) GetPosition() mgl32.Vec3 {
ptransform := tobj.getParentTransform()
return ptransform.Mul4x1(tobj.Position.Vec4(1.0)).Vec3()
}
// Returns itself
func (tobj *TransformableObject3D) GetTransform3D() *TransformableObject3D {
return tobj
}
// Returns the parent of this transform
func (tobj *TransformableObject3D) GetParent() ParentObject3D {
return tobj.parent
}
// Sets the parent of this transform to which this is relative to
func (tobj *TransformableObject3D) SetParent(parent ParentObject3D) {
if parent == nil {
if tobj.parent != nil {
close(tobj.parentChannel)
tobj.parent.SetChildChannel(nil, tobj)
tobj.parent = nil
}
return
}
if tobj.parent != nil {
close(tobj.parentChannel)
tobj.parent.SetChildChannel(nil, tobj)
}
tobj.parent = parent
tobj.parentChannel = make(chan bool)
tobj.parent.SetChildChannel(tobj.parentChannel, tobj)
}
// Used for calculating the transform matrix
func (tobj *TransformableObject3D) SetChildChannel(channel chan bool, tobj1 *TransformableObject3D) {
if tobj.childChannels == nil {
tobj.childChannels = make(map[*TransformableObject3D]chan bool)
}
tobj.childChannels[tobj1] = channel
}
// Returns a transformable object with an identity matrix
func DefaultTransformableObject3D() *TransformableObject3D {
transform := TransformableObject3D{
Scale: [3]float32{1.0, 1.0, 1.0},
}
return &transform
} | src/gohome/transformableobject3d.go | 0.709019 | 0.554772 | transformableobject3d.go | starcoder |
package effuncert
import (
"fmt"
"math"
"github.com/fako1024/numerics"
"github.com/fako1024/numerics/root"
)
const (
epsilon = 1e-9
maxQuadraticRootFindingN = 1000
)
const (
// OneSigma denotes a one sigma standard deviation equivalent
OneSigma = 0.6826895475
// TwoSigma denotes a two sigma standard deviation equivalent
TwoSigma = 0.9544997215
// ThreeSigma denotes a three sigma standard deviation equivalent
ThreeSigma = 0.9973001480
// FourSigma denotes a four sigma standard deviation equivalent
FourSigma = 0.99993669986724854
// FiveSigma denotes a five sigma standard deviation equivalent
FiveSigma = 0.99999940395355225
)
// Estimator denotes a numeric estimator instance for a Bernoulli experiment and
// its uncertainty based on a binomial probability distribution
type Estimator struct {
NSuccess, NTrial float64 // Number of successes & trials
Mode float64 // Mode / expectation value of the estimator
Integral float64 // Integral of the PDF
Variance float64 // Variance / classical uncertainty of the estimator
confidence float64 // Confidence interval for the uncertainty estimation
lowInterval, highInterval float64 // Valus holding the low / high relative uncertainty intervals
isEstimated bool // Inficator if interval has been estimated
}
// New instantiates a new estimator based on a set of trails / successes
// and functional options (if any)
func New(nSuccess, nTrial uint64, options ...func(*Estimator)) *Estimator {
obj := &Estimator{
NSuccess: float64(nSuccess), // Number of successful trials
NTrial: float64(nTrial), // Total number of trials
confidence: OneSigma, // Sigma confidence
}
// Determine mode and classical variance
obj.Mode = obj.NSuccess / obj.NTrial // Mode / Classical result
obj.Variance = math.Sqrt(obj.Mode * (1 - obj.Mode) / obj.NTrial) // Variance / Classical uncertainty
// Execute functional options (if any), see options.go for implementation
for _, option := range options {
option(obj)
}
return obj
}
// String returns a human-readable string representing the estimator result
func (e *Estimator) String() string {
// Calculate / get low and high intervals
lowInterval, highInterval := e.IntervalRelative()
// Set the precision digits based on the set precision
precisionDigits := len(fmt.Sprintf("%0.3f", math.Min(lowInterval, highInterval)))
return fmt.Sprintf("(%.[4]*[1]f -%.[4]*[2]f +%.[4]*[3]f)", e.Mode, lowInterval, highInterval, precisionDigits)
}
// Quantile returns a quantile based on a probability
func (e *Estimator) Quantile(confidence float64) float64 {
// Handle numerically impossible cases
if confidence < 0. || confidence > 1. {
return math.NaN()
}
// Handle special, numerically unstable cases
if e.NTrial == 0 || confidence < epsilon {
return 0.
}
if (1. - confidence) < epsilon {
return 1.
}
// Determine the initial result seed based on the mode of the distribution and
// stabilize edge cases
initialEstimate := e.Mode
if initialEstimate < epsilon {
initialEstimate = math.Min(0.1, 1./float64(e.NTrial))
} else if initialEstimate == 1 {
initialEstimate = math.Max(0.9, 1.-(1./float64(e.NTrial)))
}
// For large values use a linear root finding method (as it is more stable)
if e.NTrial > maxQuadraticRootFindingN {
return root.Bisect(func(x float64) float64 {
return numerics.BetaIncompleteRegular(x, 1.+float64(e.NSuccess), 1.-float64(e.NSuccess)+float64(e.NTrial)) - confidence
}, 0., 1.)
}
// For smaller values use a quadratic root finding method (as it is faster and more precise)
return root.Find(func(x float64) float64 {
return numerics.BetaIncompleteRegular(x, 1.+float64(e.NSuccess), 1.-float64(e.NSuccess)+float64(e.NTrial)) - confidence
}, func(x float64) float64 {
return numerics.Binomial(x, float64(e.NSuccess), float64(e.NTrial)) / numerics.Beta(1.+float64(e.NSuccess), 1.-float64(e.NSuccess)+float64(e.NTrial))
}, initialEstimate, root.WithLimits(0., 1.), root.WithHeuristics())
}
// Interval returns the absolute lower and upper quantiles for the uncertainty estimation
func (e *Estimator) Interval() (lowQuantile float64, highQuantile float64) {
// If the result is not valid, return accordingly
if math.IsNaN(e.Mode) || math.IsNaN(e.Integral) || math.IsNaN(e.Variance) {
return math.NaN(), math.NaN()
}
switch {
// Special case: Number of successes is 0
case e.NSuccess == 0:
lowQuantile = 0.
highQuantile = e.Quantile(e.confidence)
// Special case: Number of successes equals number of trials
case e.NSuccess == e.NTrial:
lowQuantile = e.Quantile(1. - e.confidence)
highQuantile = 1.
// Default case
default:
lowQuantile = e.Quantile(0.5 * (1. - e.confidence))
highQuantile = e.Quantile(1.0 - 0.5*(1.-e.confidence))
}
return
}
// IntervalRelative returns the relative lower and upper quantiles for the uncertainty estimation
func (e *Estimator) IntervalRelative() (lowQuantile float64, highQuantile float64) {
// Check if estimation has to be performed
if !e.isEstimated {
// Determine the absolute interval quantiles
tempLow, tempHigh := e.Interval()
//fmt.Println("Interval", e.Mode, tempLow, tempHigh)
e.lowInterval, e.highInterval = e.Mode-tempLow, tempHigh-e.Mode
}
return e.lowInterval, e.highInterval
} | uncert.go | 0.861669 | 0.729737 | uncert.go | starcoder |
package unit
import (
"fmt"
"math"
)
// Value is a value with a unit.
type Value struct {
V float32
U Unit
}
// Unit represents a unit for a Value.
type Unit uint8
// Metric converts Values to device-dependent pixels, px. The zero
// value represents a 1-to-1 scale from dp, sp to pixels.
type Metric struct {
// PxPerDp is the device-dependent pixels per dp.
PxPerDp float32
// PxPerSp is the device-dependent pixels per sp.
PxPerSp float32
}
const (
// UnitPx represent device pixels in the resolution of
// the underlying display.
UnitPx Unit = iota
// UnitDp represents device independent pixels. 1 dp will
// have the same apparent size across platforms and
// display resolutions.
UnitDp
// UnitSp is like UnitDp but for font sizes.
UnitSp
)
// Px returns the Value for v device pixels.
func Px(v float32) Value {
return Value{V: v, U: UnitPx}
}
// Dp returns the Value for v device independent
// pixels.
func Dp(v float32) Value {
return Value{V: v, U: UnitDp}
}
// Sp returns the Value for v scaled dps.
func Sp(v float32) Value {
return Value{V: v, U: UnitSp}
}
// Scale returns the value scaled by s.
func (v Value) Scale(s float32) Value {
v.V *= s
return v
}
func (v Value) String() string {
return fmt.Sprintf("%g%s", v.V, v.U)
}
func (u Unit) String() string {
switch u {
case UnitPx:
return "px"
case UnitDp:
return "dp"
case UnitSp:
return "sp"
default:
panic("unknown unit")
}
}
// Add a list of Values.
func Add(c Metric, values ...Value) Value {
var sum Value
for _, v := range values {
sum, v = compatible(c, sum, v)
sum.V += v.V
}
return sum
}
// Max returns the maximum of a list of Values.
func Max(c Metric, values ...Value) Value {
var max Value
for _, v := range values {
max, v = compatible(c, max, v)
if v.V > max.V {
max.V = v.V
}
}
return max
}
func (c Metric) Px(v Value) int {
var r float32
switch v.U {
case UnitPx:
r = v.V
case UnitDp:
s := c.PxPerDp
if s == 0 {
s = 1
}
r = s * v.V
case UnitSp:
s := c.PxPerSp
if s == 0 {
s = 1
}
r = s * v.V
default:
panic("unknown unit")
}
return int(math.Round(float64(r)))
}
func compatible(c Metric, v1, v2 Value) (Value, Value) {
if v1.U == v2.U {
return v1, v2
}
if v1.V == 0 {
v1.U = v2.U
return v1, v2
}
if v2.V == 0 {
v2.U = v1.U
return v1, v2
}
return Px(float32(c.Px(v1))), Px(float32(c.Px(v2)))
} | unit/unit.go | 0.847621 | 0.614018 | unit.go | starcoder |
package rt
import (
"emacs/lisp"
)
// Slice - Go slice.
type Slice struct {
data lisp.Object
offset int
len int
cap int
}
func SliceLen(slice *Slice) int { return slice.len }
func SliceCap(slice *Slice) int { return slice.cap }
// MakeSlice creates a new slice with cap=len.
// All values initialized to specified zero value.
func MakeSlice(length int, zv lisp.Object) *Slice {
return &Slice{
data: lisp.Call("make-vector", length, zv),
len: length,
cap: length,
}
}
// MakeSliceCap creates a new slice.
// Each value within length bounds is initialized to specified zero value.
func MakeSliceCap(length, capacity int, zv lisp.Object) *Slice {
if length == capacity {
return MakeSlice(length, zv)
}
data := lisp.Call("make-vector", capacity, lisp.Intern("nil"))
for i := 0; i < length; i++ {
lisp.Aset(data, i, zv)
}
return &Slice{data: data, len: length, cap: capacity}
}
// ArrayToSlice constructs a new slice from given data vector.
// Vector is not copied.
func ArrayToSlice(data lisp.Object) *Slice {
length := lisp.Length(data)
return &Slice{data: data, len: length, cap: length}
}
// SliceGet extract slice value using specified index.
func SliceGet(slice *Slice, index int) lisp.Object {
return aref(slice.data, slice.offset+index)
}
// SliceSet sets slice value at specified index.
func SliceSet(slice *Slice, index int, val lisp.Object) {
lisp.Aset(slice.data, index+slice.offset, val)
}
// SlicePush = "append(slice, val)".
func SlicePush(slice *Slice, val lisp.Object) *Slice {
pos := slice.len
if pos == slice.cap {
// Need to extend slice storage.
// Create a new vector with 1st element set to "val"
// then re-set slice data with "oldData+newData".
newData := lisp.Call("make-vector", memExtendPush, lisp.Intern("nil"))
lisp.Aset(newData, 0, val)
// For slices with offset a sub-vector should
// be taken to avoid memory leaks.
if slice.offset == 0 {
newData = vconcat2(slice.data, newData)
} else {
newData = vconcat2(
substringFrom(slice.data, slice.offset),
newData,
)
}
return &Slice{
data: newData,
len: pos + 1,
cap: slice.cap + memExtendPush,
offset: 0,
}
}
// Insert new value directly.
slice.len = pos + 1
SliceSet(slice, pos, val)
return slice
}
func sliceLenBound(slice *Slice, index int) {
if index < 0 || index > slice.len {
lisp.Error("slice bounds out of range")
}
}
func sliceCapBound(slice *Slice, index int) {
if index < 0 || index > slice.cap {
lisp.Error("slice bounds out of range")
}
}
// SliceCopyFast is SliceCopy specialization that is appliable if both
// `dst' and `src' have zero offset.
func SliceCopyFast(dst, src *Slice) {
dstData := dst.data
srcData := src.data
count := lisp.MinInt(dst.len, src.len)
for i := 0; i < count; i++ {
lisp.Aset(dstData, i, aref(srcData, i))
}
}
// SliceCopy copies one slice contents to another.
// Up to "min(len(dst), len(src))" elements are copied.
func SliceCopy(dst, src *Slice) {
if dst.offset == 0 && src.offset == 0 {
SliceCopyFast(dst, src)
return
}
count := lisp.MinInt(dst.len, src.len)
for i := 0; i < count; i++ {
SliceSet(dst, i, SliceGet(src, i))
}
}
// SliceSlice2 = "slice[low:high]".
func SliceSlice2(slice *Slice, low, high int) *Slice {
sliceLenBound(slice, low)
sliceCapBound(slice, high)
return &Slice{
data: slice.data,
offset: slice.offset + low,
len: high - low,
cap: slice.cap - low,
}
}
// SliceSliceLow = "slice[low:]".
func SliceSliceLow(slice *Slice, low int) *Slice {
sliceLenBound(slice, low)
return &Slice{
data: slice.data,
offset: slice.offset + low,
len: slice.len - low,
cap: slice.cap - low,
}
}
// SliceSliceHigh = "slice[:high]".
func SliceSliceHigh(slice *Slice, high int) *Slice {
sliceCapBound(slice, high)
return &Slice{
data: slice.data,
offset: slice.offset,
len: high,
cap: slice.cap,
}
}
// ArraySlice2 slices an array: "arr[low:high]".
func ArraySlice2(arr lisp.Object, low, high int) *Slice {
return &Slice{
data: arr,
offset: low,
len: high - low,
cap: lisp.Length(arr) - low,
}
}
// ArraySliceLow slices an array: "arr[low:]".
func ArraySliceLow(arr lisp.Object, low int) *Slice {
length := lisp.Length(arr)
return &Slice{
data: arr,
offset: low,
len: length - low,
cap: length - low,
}
}
// ArraySliceHigh slices an array: "arr[:high]".
func ArraySliceHigh(arr lisp.Object, high int) *Slice {
return &Slice{
data: arr,
offset: 0,
len: high,
cap: lisp.Length(arr),
}
} | src/emacs/rt/slice.go | 0.752104 | 0.452959 | slice.go | starcoder |
package matchers
import (
"os"
"regexp"
"strings"
"time"
"github.com/kbrgl/fuzzy"
)
// FileMatcher is an interface providing a Match method that checks whether a
// file satisfies some constraint.
type FileMatcher interface {
Match(os.FileInfo) bool
}
// FuzzyMatcher fuzzy-matches the filename.
type FuzzyMatcher struct {
pattern string
}
// NewFuzzyMatcher returns a new FuzzyMatcher.
func NewFuzzyMatcher(pattern string) *FuzzyMatcher {
return &FuzzyMatcher{pattern: pattern}
}
// Match fuzzy-matches the filename.
func (f FuzzyMatcher) Match(fi os.FileInfo) bool {
return fuzzy.MatchFold(fi.Name(), f.pattern)
}
// ExactMatcher checks the filename for exact equality with the expected value.
type ExactMatcher struct {
expected string
}
// NewExactMatcher returns a new ExactMatcher.
func NewExactMatcher(expected string) *ExactMatcher {
return &ExactMatcher{
expected: expected,
}
}
// Match checks that the filename is exactly equal to the expected value.
func (e ExactMatcher) Match(fi os.FileInfo) bool {
return fi.Name() == e.expected
}
// SuffixMatcher checks whether the filename has some suffix.
type SuffixMatcher struct {
suffix string
}
// NewSuffixMatcher returns a SuffixMatcher that checks a string for the provided
// suffix.
func NewSuffixMatcher(suffix string) *SuffixMatcher {
return &SuffixMatcher{
suffix: suffix,
}
}
// Match matches on the suffix of fi.Name().
func (s SuffixMatcher) Match(fi os.FileInfo) bool {
return strings.HasSuffix(fi.Name(), s.suffix)
}
// PrefixMatcher checks whether the filename has some prefix.
type PrefixMatcher struct {
prefix string
}
// NewPrefixMatcher returns a PrefixMatcher that checks a string for the provided
// prefix.
func NewPrefixMatcher(prefix string) *PrefixMatcher {
return &PrefixMatcher{
prefix: prefix,
}
}
// Match matches on the prefix of fi.Name().
func (p PrefixMatcher) Match(fi os.FileInfo) bool {
return strings.HasPrefix(fi.Name(), p.prefix)
}
// REMatcher checks whether the filename matches a regexp pattern.
type REMatcher struct {
pattern *regexp.Regexp
}
// NewREMatcher returns a new REMatcher that performs a check against the
// provided pattern.
func NewREMatcher(pattern string) (*REMatcher, error) {
re, err := regexp.Compile(pattern)
if err != nil {
return nil, err
}
return &REMatcher{
pattern: re,
}, nil
}
// Match regex-matches on fi.Name().
func (r REMatcher) Match(fi os.FileInfo) bool {
return r.pattern.MatchString(fi.Name())
}
// PermMatcher checks against the provided permissions.
type PermMatcher struct {
perm os.FileMode
}
// NewPermMatcher returns a new PermMatcher that performs a check against the
// provided FileMode.
func NewPermMatcher(perm os.FileMode) *PermMatcher {
return &PermMatcher{
perm: perm,
}
}
// Match compares fi's permissions to the permissions set on p.
func (p PermMatcher) Match(fi os.FileInfo) bool {
return fi.Mode().Perm()&p.perm != 0
}
// DirMatcher allows only dirs.
type DirMatcher struct {
}
// NewDirMatcher returns a new DirMatcher.
func NewDirMatcher() *DirMatcher {
return &DirMatcher{}
}
// Match returns true for dirs.
func (d DirMatcher) Match(fi os.FileInfo) bool {
return fi.IsDir()
}
// AgeOlderMatcher checks that a file is older than a given duration.
type AgeOlderMatcher struct {
age time.Duration
}
// NewAgeOlderMatcher returns a new AgeOlderMatcher.
func NewAgeOlderMatcher(age time.Duration) *AgeOlderMatcher {
return &AgeOlderMatcher{
age: age,
}
}
// Match checks that the provided file is older than the duration set on o.
func (o AgeOlderMatcher) Match(fi os.FileInfo) bool {
return fi.ModTime().Before(time.Now().Add(-1 * o.age))
}
// AgeYoungerMatcher checks that a file is younger than a given duration.
type AgeYoungerMatcher struct {
age time.Duration
}
// NewAgeYoungerMatcher returns a new AgeYoungerMatcher.
func NewAgeYoungerMatcher(age time.Duration) *AgeYoungerMatcher {
return &AgeYoungerMatcher{
age: age,
}
}
// Match checks that the provided file is younger than the duration set on y.
func (y AgeYoungerMatcher) Match(fi os.FileInfo) bool {
return fi.ModTime().After(time.Now().Add(-1 * y.age))
}
// AllMatcher allows everything.
type AllMatcher struct {
}
// NewAllMatcher returns a new AllMatcher.
func NewAllMatcher() *AllMatcher {
return &AllMatcher{}
}
// Match always returns true.
func (a AllMatcher) Match(_ os.FileInfo) bool {
return true
}
// SubstringMatcher checks the provided string for a substring.
type SubstringMatcher struct {
// substring to look for
substring string
}
// NewSubstringMatcher returns a new SubstringMatcher.
func NewSubstringMatcher(substring string) *SubstringMatcher {
return &SubstringMatcher{
substring: substring,
}
}
// Match searches the filename for a given substring and returns true if it is
// present.
func (s SubstringMatcher) Match(fi os.FileInfo) bool {
return strings.Contains(fi.Name(), s.substring)
} | matchers/matchers.go | 0.810779 | 0.457864 | matchers.go | starcoder |
package openapi
import (
"encoding/json"
)
// RateDetails struct for RateDetails
type RateDetails struct {
AccrualPeriod string `json:"accrual_period"`
// Rate in basis points. E.g. 5 represents 0.05%
Rate int32 `json:"rate"`
// Rate effective start date. Inclusive.
ValidFrom string `json:"valid_from"`
// Rate effective end date. Exclusive.
ValidTo *string `json:"valid_to,omitempty"`
}
// NewRateDetails instantiates a new RateDetails object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewRateDetails(accrualPeriod string, rate int32, validFrom string) *RateDetails {
this := RateDetails{}
this.AccrualPeriod = accrualPeriod
this.Rate = rate
this.ValidFrom = validFrom
return &this
}
// NewRateDetailsWithDefaults instantiates a new RateDetails object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewRateDetailsWithDefaults() *RateDetails {
this := RateDetails{}
return &this
}
// GetAccrualPeriod returns the AccrualPeriod field value
func (o *RateDetails) GetAccrualPeriod() string {
if o == nil {
var ret string
return ret
}
return o.AccrualPeriod
}
// GetAccrualPeriodOk returns a tuple with the AccrualPeriod field value
// and a boolean to check if the value has been set.
func (o *RateDetails) GetAccrualPeriodOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.AccrualPeriod, true
}
// SetAccrualPeriod sets field value
func (o *RateDetails) SetAccrualPeriod(v string) {
o.AccrualPeriod = v
}
// GetRate returns the Rate field value
func (o *RateDetails) GetRate() int32 {
if o == nil {
var ret int32
return ret
}
return o.Rate
}
// GetRateOk returns a tuple with the Rate field value
// and a boolean to check if the value has been set.
func (o *RateDetails) GetRateOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Rate, true
}
// SetRate sets field value
func (o *RateDetails) SetRate(v int32) {
o.Rate = v
}
// GetValidFrom returns the ValidFrom field value
func (o *RateDetails) GetValidFrom() string {
if o == nil {
var ret string
return ret
}
return o.ValidFrom
}
// GetValidFromOk returns a tuple with the ValidFrom field value
// and a boolean to check if the value has been set.
func (o *RateDetails) GetValidFromOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ValidFrom, true
}
// SetValidFrom sets field value
func (o *RateDetails) SetValidFrom(v string) {
o.ValidFrom = v
}
// GetValidTo returns the ValidTo field value if set, zero value otherwise.
func (o *RateDetails) GetValidTo() string {
if o == nil || o.ValidTo == nil {
var ret string
return ret
}
return *o.ValidTo
}
// GetValidToOk returns a tuple with the ValidTo field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *RateDetails) GetValidToOk() (*string, bool) {
if o == nil || o.ValidTo == nil {
return nil, false
}
return o.ValidTo, true
}
// HasValidTo returns a boolean if a field has been set.
func (o *RateDetails) HasValidTo() bool {
if o != nil && o.ValidTo != nil {
return true
}
return false
}
// SetValidTo gets a reference to the given string and assigns it to the ValidTo field.
func (o *RateDetails) SetValidTo(v string) {
o.ValidTo = &v
}
func (o RateDetails) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["accrual_period"] = o.AccrualPeriod
}
if true {
toSerialize["rate"] = o.Rate
}
if true {
toSerialize["valid_from"] = o.ValidFrom
}
if o.ValidTo != nil {
toSerialize["valid_to"] = o.ValidTo
}
return json.Marshal(toSerialize)
}
type NullableRateDetails struct {
value *RateDetails
isSet bool
}
func (v NullableRateDetails) Get() *RateDetails {
return v.value
}
func (v *NullableRateDetails) Set(val *RateDetails) {
v.value = val
v.isSet = true
}
func (v NullableRateDetails) IsSet() bool {
return v.isSet
}
func (v *NullableRateDetails) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableRateDetails(val *RateDetails) *NullableRateDetails {
return &NullableRateDetails{value: val, isSet: true}
}
func (v NullableRateDetails) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableRateDetails) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | synctera/model_rate_details.go | 0.828904 | 0.440951 | model_rate_details.go | starcoder |
package store
// InMemoryStore represents a data store that is in memory and thus transient
type InMemoryStore struct {
data map[string][]byte
}
// NewInMemoryStore create a new instance of a memory store with seed data
func NewInMemoryStore(initialData map[string][]byte) KVStore {
return &InMemoryStore{data: initialData}
}
// NewEmptyInMemoryStore creates a new instance of an in memory store without seed data
func NewEmptyInMemoryStore() KVStore {
return &InMemoryStore{data: make(map[string][]byte)}
}
// Get retrieve the []byte value from the store for the specified key
func (i InMemoryStore) Get(key string) ([]byte, error) {
return i.data[key], nil
}
// Set sets the specified to the specified value in the store
func (i InMemoryStore) Set(key string, value []byte) error {
i.data[key] = value
return nil
}
// Delete deletes the specified key from the storage
func (i InMemoryStore) Delete(key string) error {
delete(i.data, key)
return nil
}
// Size gets the size for all the items contained in this store
func (i InMemoryStore) Size() (int, error) {
return len(i.data), nil
}
// Keys retrieves all the keys currently in the store
func (i InMemoryStore) Keys() ([]string, error) {
var keys []string
for k := range i.data {
if k != "" {
keys = append(keys, k)
}
}
return keys, nil
}
// ForEachKey invokes the specified function for each key in the store
func (i InMemoryStore) ForEachKey(iterator func(string)) error {
for k := range i.data {
iterator(k)
}
return nil
}
// ForEachValue invokes the specified function for each value in the store
func (i InMemoryStore) ForEachValue(iterator func([]byte)) error {
for _, v := range i.data {
iterator(v)
}
return nil
}
// ForEach invokes the specified function for each Key/Value pair in the store
func (i InMemoryStore) ForEach(iterator func(*KVData)) error {
for k, v := range i.data {
iterator(&KVData{Key: k, Value: v})
}
return nil
}
// Find finds the first item in the store that matches the predicate
func (i InMemoryStore) Find(predicate func(*KVData) bool) (*KVData, error) {
for k, v := range i.data {
kv := &KVData{Key: k, Value: v}
if predicate(kv) {
return kv, nil
}
}
return nil, nil
}
// Contains returns true if the key exists in the store
func (i InMemoryStore) Contains(key string) (bool, error) {
return len(i.data[key]) > 0, nil
}
// Start starts this store
func (i InMemoryStore) Start() error {
return nil
}
// Stop stops this store
func (i InMemoryStore) Stop() error {
return nil
} | store/in_memory_store.go | 0.852813 | 0.569912 | in_memory_store.go | starcoder |
package valuation
const yearsOfHighGrowth = 5
// OutputYear defines the cash flow calculation of one particular year in the future
type OutputYear struct {
RevenueGrowthRate float64
Revenue float64
EBITMargin float64
EBIT float64
TaxRate float64
AfterTaxEBIT float64
Reinvestment float64
FCFF float64
CostOfCapital float64
DiscountFactor float64
PresentValueOfCashFlow float64
}
// Output defines the valuation result
type Output struct {
Market *Market
Input *Input
BaseYear *OutputYear
HighGrowthYears []OutputYear
LowGrowthYears []OutputYear
TerminalYear *OutputYear
TerminalCashFlow float64
TerminalValue float64
PresentValueOfTerminalValue float64
PresentValueOfCashFlow float64
PresentValue float64
EquityValue float64
ValuePerShare float64
}
// NewOutput returns a new valuation output object
func NewOutput(market *Market, input *Input) (*Output, error) {
output := Output{
Market: market,
Input: input,
}
highGrowthYears := make([]OutputYear, yearsOfHighGrowth)
lowGrowthYears := make([]OutputYear, yearsOfHighGrowth)
output.BaseYear = &OutputYear{}
output.HighGrowthYears = highGrowthYears
output.LowGrowthYears = lowGrowthYears
output.TerminalYear = &OutputYear{}
return &output, nil
}
// Compute calculates the valuation output
func (output *Output) Compute() error {
baseYear, err := output.computeBaseYear()
if err != nil {
return err
}
output.BaseYear = baseYear
market := output.Market
input := output.Input
prevYear := baseYear
startingRevenueGrowthRate := input.RevenueGrowthRate
terminalRevenueGrowthRate := market.TerminalRiskFreeRate
revenueGrowthRate := startingRevenueGrowthRate
ebitMargin := input.StartingEBITMargin
startingEBITMargin := input.StartingEBITMargin
terminalEBITMargin := input.TerminalEBITMargin
taxRate := input.EffectiveTaxRate
costOfCapital := input.CostOfCapital
startingCostOfCapital := input.CostOfCapital
terminalCostOfCapital := input.TerminalCostOfCapital
discountFactor := 1 / (1 + costOfCapital)
for i := 0; i < yearsOfHighGrowth; i++ {
if i != 0 {
ebitMargin = terminalEBITMargin - ((terminalEBITMargin-startingEBITMargin)/float64((yearsOfHighGrowth*2-1)))*float64((yearsOfHighGrowth*2-i-1))
}
year, err := output.computeYearInGrowth(prevYear, revenueGrowthRate, ebitMargin, taxRate, costOfCapital, discountFactor)
if err != nil {
return err
}
output.HighGrowthYears[i] = *year
prevYear = year
discountFactor = discountFactor * (1 / (1 + costOfCapital))
}
for i := 0; i < yearsOfHighGrowth; i++ {
revenueGrowthRate = startingRevenueGrowthRate - ((startingRevenueGrowthRate-terminalRevenueGrowthRate)/float64(yearsOfHighGrowth))*float64(i+1)
ebitMargin = terminalEBITMargin - ((terminalEBITMargin-startingEBITMargin)/float64((yearsOfHighGrowth*2-1)))*float64((yearsOfHighGrowth-i-1))
taxRate = input.EffectiveTaxRate + ((market.MarginalTaxRate-input.EffectiveTaxRate)/float64(yearsOfHighGrowth))*float64(i+1)
costOfCapital = startingCostOfCapital - (startingCostOfCapital-terminalCostOfCapital)/float64(yearsOfHighGrowth)*float64(i+1)
year, err := output.computeYearInGrowth(prevYear, revenueGrowthRate, ebitMargin, taxRate, costOfCapital, discountFactor)
if err != nil {
return err
}
output.LowGrowthYears[i] = *year
prevYear = year
discountFactor = discountFactor * (1 / (1 + costOfCapital))
}
terminalYear, err := output.computeTerminalYear()
if err != nil {
return err
}
output.TerminalYear = terminalYear
output.TerminalCashFlow = terminalYear.FCFF
output.TerminalValue = terminalYear.FCFF / (terminalYear.CostOfCapital - terminalYear.RevenueGrowthRate)
output.PresentValueOfTerminalValue = output.TerminalValue * terminalYear.DiscountFactor
for _, year := range output.HighGrowthYears {
output.PresentValueOfCashFlow += year.PresentValueOfCashFlow
}
for _, year := range output.LowGrowthYears {
output.PresentValueOfCashFlow += year.PresentValueOfCashFlow
}
output.PresentValue = output.PresentValueOfTerminalValue + output.PresentValueOfCashFlow
output.EquityValue = output.PresentValue - input.TotalDebt + input.TotalCash
return nil
}
func (output *Output) computeBaseYear() (*OutputYear, error) {
baseYear := OutputYear{}
input := output.Input
baseYear.Revenue = input.Revenue
baseYear.EBIT = input.EBIT
baseYear.TaxRate = input.EffectiveTaxRate
baseYear.EBITMargin = baseYear.EBIT / baseYear.Revenue
if baseYear.EBIT > 0 {
baseYear.AfterTaxEBIT = baseYear.EBIT * (1 - baseYear.TaxRate)
} else {
baseYear.AfterTaxEBIT = baseYear.EBIT
}
return &baseYear, nil
}
func (output *Output) computeYearInGrowth(previousYear *OutputYear, revenueGrowthRate float64, ebitMargin float64, taxRate float64, costOfCapital float64, discountFactor float64) (*OutputYear, error) {
result := OutputYear{}
result.RevenueGrowthRate = revenueGrowthRate
result.Revenue = previousYear.Revenue * (1 + revenueGrowthRate)
result.EBITMargin = ebitMargin
result.EBIT = result.Revenue * ebitMargin
result.TaxRate = taxRate
result.AfterTaxEBIT = result.EBIT * (1 - taxRate)
result.Reinvestment = (result.Revenue - previousYear.Revenue) / output.Input.SalesToCapital
result.FCFF = result.AfterTaxEBIT - result.Reinvestment
result.CostOfCapital = costOfCapital
result.DiscountFactor = discountFactor
result.PresentValueOfCashFlow = result.FCFF * discountFactor
return &result, nil
}
func (output *Output) computeTerminalYear() (*OutputYear, error) {
result := OutputYear{}
market := output.Market
input := output.Input
previousYear := output.HighGrowthYears[yearsOfHighGrowth-1]
result.RevenueGrowthRate = market.TerminalRiskFreeRate
result.Revenue = previousYear.Revenue * (1 + result.RevenueGrowthRate)
result.EBITMargin = input.TerminalEBITMargin
result.EBIT = result.Revenue * result.EBITMargin
result.TaxRate = market.MarginalTaxRate
result.AfterTaxEBIT = result.EBIT * (1 - result.TaxRate)
if result.RevenueGrowthRate > 0 {
result.Reinvestment = result.AfterTaxEBIT * (result.RevenueGrowthRate / input.TerminalCostOfCapital)
} else {
result.Reinvestment = 0
}
result.FCFF = result.AfterTaxEBIT - result.Reinvestment
result.CostOfCapital = input.TerminalCostOfCapital
result.DiscountFactor = previousYear.DiscountFactor
return &result, nil
} | pkg/valuation/output.go | 0.809878 | 0.698538 | output.go | starcoder |
package scale
import (
"fmt"
"sort"
"github.com/gvallee/collective_profiler/tools/internal/pkg/unit"
)
func mapFloat64sScaleDown(unitType int, unitScale int, values map[int]float64) (int, int, map[int]float64) {
if unitScale == -1 {
// Unit not recognized, nothing we can do
return unitType, unitScale, values
}
newUnitScale := unitScale - 1
if !unit.IsValidScale(unitType, newUnitScale) {
// nothing we can do
return unitType, unitScale, values
}
values = mapFloat64sCompute(DOWN, values)
return unitType, newUnitScale, values
}
func mapFloat64sScaleUp(unitType int, unitScale int, values map[int]float64) (int, int, map[int]float64) {
if unitScale == -1 {
// Unit not recognized, nothing we can do
return unitType, unitScale, values
}
newUnitScale := unitScale + 1
if !unit.IsValidScale(unitType, newUnitScale) {
// nothing we can do
return unitType, unitScale, values
}
values = mapFloat64sCompute(UP, values)
return unitType, newUnitScale, values
}
func mapFloat64sCompute(op int, values map[int]float64) map[int]float64 {
newValues := make(map[int]float64)
switch op {
case DOWN:
for key, val := range values {
newValues[key] = val * 1000
}
case UP:
for key, val := range values {
newValues[key] = val / 1000
}
}
return newValues
}
// MapFloat64s scales a map of float64
func MapFloat64s(unitID string, values map[int]float64) (string, map[int]float64, error) {
var sortedValues []float64
if len(values) == 0 {
return "", nil, fmt.Errorf("map is empty")
}
// Copy and sort the values to figure out what can be done
for _, v := range values {
sortedValues = append(sortedValues, v)
}
sort.Float64s(sortedValues)
// If all values are 0 nothing can be done
if allZerosFloat64s(sortedValues) {
return unitID, values, nil
}
if len(sortedValues) >= 2 && sortedValues[0] >= 0 && sortedValues[len(values)-1] <= 1 {
// We scale down all the values if possible
// Translate the human reading unit into something we can inteprete
unitType, unitScale := unit.FromString(unitID)
unitType, unitScale, newValues := mapFloat64sScaleDown(unitType, unitScale, values)
newUnitID := unit.ToString(unitType, unitScale)
if newUnitID != unitID {
if unit.IsMin(unitType, unitScale) {
return newUnitID, newValues, nil
}
return MapFloat64s(newUnitID, newValues)
}
// Nothing could be down returning...
return newUnitID, newValues, nil
}
if len(sortedValues) > 0 && sortedValues[0] >= 1000 {
// We scale up the value if possible
// Translate the human reading unit into something we can inteprete
unitType, unitScale := unit.FromString(unitID)
unitType, unitScale, newValues := mapFloat64sScaleUp(unitType, unitScale, values)
newUnitID := unit.ToString(unitType, unitScale)
if unit.IsMax(unitType, unitScale) {
return newUnitID, newValues, nil
}
return MapFloat64s(newUnitID, newValues)
}
// Nothing to do, just return the same
return unitID, values, nil
} | tools/internal/pkg/scale/scale_mapfloat64s.go | 0.735357 | 0.53443 | scale_mapfloat64s.go | starcoder |
package entropy
import (
"errors"
"fmt"
kanzi "github.com/flanglet/kanzi-go"
)
// Code based on Order 0 range coder by <NAME> itself derived from the algorithm
// described by <NAME> in his seminal article in 1979.
// [<NAME> on the Data Recording Conference, Southampton, 1979]
const (
_TOP_RANGE = uint64(0x0FFFFFFFFFFFFFFF)
_BOTTOM_RANGE = uint64(0x000000000000FFFF)
_RANGE_MASK = uint64(0x0FFFFFFF00000000)
_DEFAULT_RANGE_CHUNK_SIZE = uint(1 << 15) // 32 KB by default
_DEFAULT_RANGE_LOG_RANGE = uint(12)
_RANGE_MAX_CHUNK_SIZE = 1 << 30
)
// RangeEncoder a Order 0 Range Entropy Encoder
type RangeEncoder struct {
low uint64
rng uint64
alphabet [256]int
freqs [256]int
cumFreqs [257]uint64
bitstream kanzi.OutputBitStream
chunkSize uint
logRange uint
shift uint
}
// NewRangeEncoder creates a new instance of RangeEncoder
// The given arguments are either empty or containing a chunk size and
// a log range (to specify the precision of the encoding).
// EG: call NewRangeEncoder(bs) or NewRangeEncoder(bs, 16384, 14)
// The default chunk size is 65536 bytes.
func NewRangeEncoder(bs kanzi.OutputBitStream, args ...uint) (*RangeEncoder, error) {
if bs == nil {
return nil, errors.New("Range codec: Invalid null bitstream parameter")
}
if len(args) > 2 {
return nil, errors.New("Range codec: At most one chunk size and one log range can be provided")
}
chkSize := _DEFAULT_RANGE_CHUNK_SIZE
logRange := _DEFAULT_RANGE_LOG_RANGE
if len(args) == 2 {
chkSize = args[0]
logRange = args[1]
}
if chkSize < 1024 {
return nil, errors.New("Range codec: The chunk size must be at least 1024")
}
if chkSize > _RANGE_MAX_CHUNK_SIZE {
return nil, fmt.Errorf("Range codec: The chunk size must be at most %d", _RANGE_MAX_CHUNK_SIZE)
}
if logRange < 8 || logRange > 16 {
return nil, fmt.Errorf("Range codec: Invalid range parameter: %v (must be in [8..16])", logRange)
}
this := new(RangeEncoder)
this.bitstream = bs
this.alphabet = [256]int{}
this.freqs = [256]int{}
this.cumFreqs = [257]uint64{}
this.logRange = logRange
this.chunkSize = chkSize
return this, nil
}
func (this *RangeEncoder) updateFrequencies(frequencies []int, size int, lr uint) (int, error) {
if frequencies == nil || len(frequencies) != 256 {
return 0, errors.New("Range codec: Invalid frequencies parameter")
}
alphabetSize, err := NormalizeFrequencies(frequencies, this.alphabet[:], size, 1<<lr)
if err != nil {
return alphabetSize, err
}
if alphabetSize > 0 {
this.cumFreqs[0] = 0
// Create histogram of frequencies scaled to 'range'
for i := range frequencies {
this.cumFreqs[i+1] = this.cumFreqs[i] + uint64(frequencies[i])
}
}
err = this.encodeHeader(alphabetSize, this.alphabet[:], frequencies, lr)
return alphabetSize, err
}
func (this *RangeEncoder) encodeHeader(alphabetSize int, alphabet []int, frequencies []int, lr uint) error {
if _, err := EncodeAlphabet(this.bitstream, alphabet[0:alphabetSize]); err != nil {
return err
}
if alphabetSize == 0 {
return nil
}
this.bitstream.WriteBits(uint64(lr-8), 3) // logRange
chkSize := 8
if alphabetSize < 64 {
chkSize = 6
}
llr := uint(3)
for 1<<llr <= lr {
llr++
}
// Encode all frequencies (but the first one) by chunks
for i := 1; i < alphabetSize; i += chkSize {
max := frequencies[alphabet[i]] - 1
logMax := uint(0)
endj := i + chkSize
if endj > alphabetSize {
endj = alphabetSize
}
// Search for max frequency log size in next chunk
for j := i + 1; j < endj; j++ {
if frequencies[alphabet[j]]-1 > max {
max = frequencies[alphabet[j]] - 1
}
}
for 1<<logMax <= max {
logMax++
}
this.bitstream.WriteBits(uint64(logMax), llr)
if logMax == 0 {
// all frequencies equal one in this chunk
continue
}
// Write frequencies
for j := i; j < endj; j++ {
this.bitstream.WriteBits(uint64(frequencies[alphabet[j]]-1), logMax)
}
}
return nil
}
// Write encodes the data provided into the bitstream. Return the number of byte
// written to the bitstream. Splits the input into chunks and encode chunks
// sequentially based on local statistics.
func (this *RangeEncoder) Write(block []byte) (int, error) {
if block == nil {
return 0, errors.New("Range codec: Invalid null block parameter")
}
if len(block) == 0 {
return 0, nil
}
sizeChunk := int(this.chunkSize)
startChunk := 0
end := len(block)
for startChunk < end {
this.rng = _TOP_RANGE
this.low = 0
lr := this.logRange
endChunk := startChunk + sizeChunk
if endChunk > end {
endChunk = end
}
// Lower log range if the size of the data block is small
for lr > 8 && 1<<lr > endChunk-startChunk {
lr--
}
this.shift = lr
buf := block[startChunk:endChunk]
alphabetSize, err := this.rebuildStatistics(buf, lr)
if err != nil {
return startChunk, err
}
if alphabetSize <= 1 {
// Skip chunk if only one symbol
startChunk = endChunk
continue
}
for i := range buf {
this.encodeByte(buf[i])
}
// Flush 'low'
this.bitstream.WriteBits(this.low, 60)
startChunk = endChunk
}
return len(block), nil
}
// Compute chunk frequencies, cumulated frequencies and encode chunk header
func (this *RangeEncoder) rebuildStatistics(block []byte, lr uint) (int, error) {
kanzi.ComputeHistogram(block, this.freqs[:], true, false)
return this.updateFrequencies(this.freqs[:], len(block), lr)
}
func (this *RangeEncoder) encodeByte(b byte) {
// Compute next low and range
symbol := int(b)
cumFreq := this.cumFreqs[symbol]
this.rng >>= this.shift
this.low += (cumFreq * this.rng)
this.rng *= (this.cumFreqs[symbol+1] - cumFreq)
// If the left-most digits are the same throughout the range, write bits to bitstream
for {
if (this.low^(this.low+this.rng))&_RANGE_MASK != 0 {
if this.rng > _BOTTOM_RANGE {
break
}
// Normalize
this.rng = -this.low & _BOTTOM_RANGE
}
this.bitstream.WriteBits(this.low>>32, 28)
this.rng <<= 28
this.low <<= 28
}
}
// BitStream returns the underlying bitstream
func (this *RangeEncoder) BitStream() kanzi.OutputBitStream {
return this.bitstream
}
// Dispose this implementation does nothing
func (this *RangeEncoder) Dispose() {
}
// RangeDecoder Order 0 Range Entropy Decoder
type RangeDecoder struct {
code uint64
low uint64
rng uint64
alphabet [256]int
freqs [256]int
cumFreqs [257]uint64
f2s []uint16 // mapping frequency -> symbol
bitstream kanzi.InputBitStream
chunkSize uint
shift uint
}
// NewRangeDecoder creates a new instance of RangeDecoder
// The given arguments are either empty or containing a chunk size.
// EG: call NewRangeDecoder(bs) or NewRangeDecoder(bs, 16384)
// The default chunk size is 65536 bytes.
func NewRangeDecoder(bs kanzi.InputBitStream, args ...uint) (*RangeDecoder, error) {
if bs == nil {
return nil, errors.New("Range codec: Invalid null bitstream parameter")
}
if len(args) > 1 {
return nil, errors.New("Range codec: At most one chunk size can be provided")
}
chkSize := _DEFAULT_RANGE_CHUNK_SIZE
if len(args) == 1 {
chkSize = args[0]
}
if chkSize < 1024 {
return nil, errors.New("Range codec: The chunk size must be at least 1024")
}
if chkSize > _RANGE_MAX_CHUNK_SIZE {
return nil, fmt.Errorf("Range codec: The chunk size must be at most %d", _RANGE_MAX_CHUNK_SIZE)
}
this := new(RangeDecoder)
this.bitstream = bs
this.alphabet = [256]int{}
this.freqs = [256]int{}
this.cumFreqs = [257]uint64{}
this.f2s = make([]uint16, 0)
this.chunkSize = chkSize
return this, nil
}
func (this *RangeDecoder) decodeHeader(frequencies []int) (int, error) {
alphabetSize, err := DecodeAlphabet(this.bitstream, this.alphabet[:])
if err != nil || alphabetSize == 0 {
return alphabetSize, nil
}
if alphabetSize != 256 {
for i := range frequencies {
frequencies[i] = 0
}
}
// Decode frequencies
logRange := uint(8 + this.bitstream.ReadBits(3))
scale := 1 << logRange
this.shift = logRange
sum := 0
chkSize := 8
if alphabetSize < 64 {
chkSize = 6
}
llr := uint(3)
for 1<<llr <= logRange {
llr++
}
// Decode all frequencies (but the first one)
for i := 1; i < alphabetSize; i += chkSize {
logMax := uint(this.bitstream.ReadBits(llr))
if 1<<logMax > scale {
err := fmt.Errorf("Invalid bitstream: incorrect frequency size %v in range decoder", logMax)
return alphabetSize, err
}
endj := i + chkSize
if endj > alphabetSize {
endj = alphabetSize
}
// Read frequencies
for j := i; j < endj; j++ {
freq := 1
if logMax > 0 {
freq = int(1 + this.bitstream.ReadBits(logMax))
if freq <= 0 || freq >= scale {
err := fmt.Errorf("Invalid bitstream: incorrect frequency %v for symbol '%v' in range decoder", freq, this.alphabet[j])
return alphabetSize, err
}
}
frequencies[this.alphabet[j]] = freq
sum += freq
}
}
// Infer first frequency
if scale <= sum {
err := fmt.Errorf("Invalid bitstream: incorrect frequency %v for symbol '%v' in range decoder", frequencies[this.alphabet[0]], this.alphabet[0])
return alphabetSize, err
}
frequencies[this.alphabet[0]] = scale - sum
this.cumFreqs[0] = 0
if len(this.f2s) < scale {
this.f2s = make([]uint16, scale)
}
// Create reverse mapping
for i := range frequencies {
this.cumFreqs[i+1] = this.cumFreqs[i] + uint64(frequencies[i])
base := int(this.cumFreqs[i])
for j := frequencies[i] - 1; j >= 0; j-- {
this.f2s[base+j] = uint16(i)
}
}
return alphabetSize, nil
}
// Read decodes data from the bitstream and return it in the provided buffer.
// Decode the data chunk by chunk sequentially.
// Return the number of bytes read from the bitstream.
func (this *RangeDecoder) Read(block []byte) (int, error) {
if block == nil {
return 0, errors.New("Range codec: Invalid null block parameter")
}
end := len(block)
startChunk := 0
sizeChunk := int(this.chunkSize)
for startChunk < end {
endChunk := startChunk + sizeChunk
if endChunk > end {
endChunk = end
}
alphabetSize, err := this.decodeHeader(this.freqs[:])
if err != nil || alphabetSize == 0 {
return startChunk, err
}
if alphabetSize == 1 {
// Shortcut for chunks with only one symbol
for i := startChunk; i < endChunk; i++ {
block[i] = byte(this.alphabet[0])
}
startChunk = endChunk
continue
}
this.rng = _TOP_RANGE
this.low = 0
this.code = this.bitstream.ReadBits(60)
buf := block[startChunk:endChunk]
for i := range buf {
buf[i] = this.decodeByte()
}
startChunk = endChunk
}
return len(block), nil
}
func (this *RangeDecoder) decodeByte() byte {
// Compute next low and range
this.rng >>= this.shift
count := int((this.code - this.low) / this.rng)
symbol := this.f2s[count]
cumFreq := this.cumFreqs[symbol]
this.low += (cumFreq * this.rng)
this.rng *= (this.cumFreqs[symbol+1] - cumFreq)
// If the left-most digits are the same throughout the range, read bits from bitstream
for {
if (this.low^(this.low+this.rng))&_RANGE_MASK != 0 {
if this.rng > _BOTTOM_RANGE {
break
}
// Normalize
this.rng = -this.low & _BOTTOM_RANGE
}
this.code = (this.code << 28) | this.bitstream.ReadBits(28)
this.rng <<= 28
this.low <<= 28
}
return byte(symbol)
}
// BitStream returns the underlying bitstream
func (this *RangeDecoder) BitStream() kanzi.InputBitStream {
return this.bitstream
}
// Dispose this implementation does nothing
func (this *RangeDecoder) Dispose() {
} | entropy/RangeCodec.go | 0.636127 | 0.405154 | RangeCodec.go | starcoder |
package netpbm
import (
"bufio"
"errors"
"fmt"
"image"
"image/color"
"io"
"strings"
"github.com/spakin/netpbm/npcolor"
)
// An RGBM is an in-memory image whose At method returns npcolor.RGBM values.
type RGBM struct {
// Pix holds the image's pixels, in R, G, B (no M) order. The pixel at
// (x, y) starts at Pix[(y-Rect.Min.Y)*Stride + (x-Rect.Min.X)*3].
Pix []uint8
// Stride is the Pix stride (in bytes) between vertically adjacent
// pixels.
Stride int
// Rect is the image's bounds.
Rect image.Rectangle
// Model is the image's color model.
Model npcolor.RGBMModel
}
// ColorModel returns the RGBM image's color model.
func (p *RGBM) ColorModel() color.Model { return p.Model }
// Bounds returns the domain for which At can return non-zero color. The
// bounds do not necessarily contain the point (0, 0).
func (p *RGBM) Bounds() image.Rectangle { return p.Rect }
// At returns the color of the pixel at (x, y) as a color.Color.
// At(Bounds().Min.X, Bounds().Min.Y) returns the upper-left pixel of the grid.
// At(Bounds().Max.X-1, Bounds().Max.Y-1) returns the lower-right one.
func (p *RGBM) At(x, y int) color.Color {
return p.RGBMAt(x, y)
}
// RGBMAt returns the color of the pixel at (x, y) as an npcolor.RGBM.
func (p *RGBM) RGBMAt(x, y int) npcolor.RGBM {
if !(image.Point{x, y}.In(p.Rect)) {
return npcolor.RGBM{}
}
i := p.PixOffset(x, y)
return npcolor.RGBM{
R: p.Pix[i+0],
G: p.Pix[i+1],
B: p.Pix[i+2],
M: p.Model.M,
}
}
// PixOffset returns the index of the first element of Pix that corresponds to
// the pixel at (x, y).
func (p *RGBM) PixOffset(x, y int) int {
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*3
}
// Set sets the pixel at (x, y) to a given color, expressed as a color.Color.
func (p *RGBM) Set(x, y int, c color.Color) {
if !(image.Point{x, y}.In(p.Rect)) {
return
}
i := p.PixOffset(x, y)
c1 := p.Model.Convert(c).(npcolor.RGBM)
p.Pix[i+0] = c1.R
p.Pix[i+1] = c1.G
p.Pix[i+2] = c1.B
}
// SetRGBM sets the pixel at (x, y) to a given color, expressed as an
// npcolor.RGBM.
func (p *RGBM) SetRGBM(x, y int, c npcolor.RGBM) {
if !(image.Point{x, y}.In(p.Rect)) {
return
}
i := p.PixOffset(x, y)
if c.M == p.Model.M {
p.Pix[i+0] = c.R
p.Pix[i+1] = c.G
p.Pix[i+2] = c.B
} else {
p.Set(x, y, c)
}
}
// SubImage returns an image representing the portion of the image p visible
// through r. The returned value shares pixels with the original image.
func (p *RGBM) SubImage(r image.Rectangle) image.Image {
r = r.Intersect(p.Rect)
// If r1 and r2 are Rectangles, r1.Intersect(r2) is not guaranteed to
// be inside either r1 or r2 if the intersection is empty. Without
// explicitly checking for this, the Pix[i:] expression below can
// panic.
if r.Empty() {
return &RGBM{}
}
i := p.PixOffset(r.Min.X, r.Min.Y)
return &RGBM{
Pix: p.Pix[i:],
Stride: p.Stride,
Rect: r,
}
}
// Opaque scans the entire image and reports whether it is fully opaque.
func (p *RGBM) Opaque() bool {
return true
}
// MaxValue returns the maximum value allowed on any color channel.
func (p *RGBM) MaxValue() uint16 {
return uint16(p.Model.M)
}
// Format identifies the image as a PPM image.
func (p *RGBM) Format() Format {
return PPM
}
// HasAlpha indicates that there is no alpha channel.
func (p *RGBM) HasAlpha() bool {
return false
}
// NewRGBM returns a new RGBM with the given bounds and maximum channel value.
func NewRGBM(r image.Rectangle, m uint8) *RGBM {
w, h := r.Dx(), r.Dy()
pix := make([]uint8, 3*w*h)
model := npcolor.RGBMModel{M: m}
return &RGBM{pix, 3 * w, r, model}
}
// An RGBM64 is an in-memory image whose At method returns npcolor.RGBM64
// values.
type RGBM64 struct {
// Pix holds the image's pixels, in R, G, B, M order and big-endian
// format. The pixel at (x, y) starts at Pix[(y-Rect.Min.Y)*Stride +
// (x-Rect.Min.X)*8].
Pix []uint8
// Stride is the Pix stride (in bytes) between vertically adjacent
// pixels.
Stride int
// Rect is the image's bounds.
Rect image.Rectangle
// Model is the image's color model.
Model npcolor.RGBM64Model
}
// ColorModel returns the RGBM64 image's color model.
func (p *RGBM64) ColorModel() color.Model { return p.Model }
// Bounds returns the domain for which At can return non-zero color. The
// bounds do not necessarily contain the point (0, 0).
func (p *RGBM64) Bounds() image.Rectangle { return p.Rect }
// At returns the color of the pixel at (x, y) as a color.Color.
// At(Bounds().Min.X, Bounds().Min.Y) returns the upper-left pixel of the grid.
// At(Bounds().Max.X-1, Bounds().Max.Y-1) returns the lower-right one.
func (p *RGBM64) At(x, y int) color.Color {
return p.RGBM64At(x, y)
}
// RGBM64At returns the color of the pixel at (x, y) as an npcolor.RGBM64.
func (p *RGBM64) RGBM64At(x, y int) npcolor.RGBM64 {
if !(image.Point{x, y}.In(p.Rect)) {
return npcolor.RGBM64{}
}
i := p.PixOffset(x, y)
return npcolor.RGBM64{
R: uint16(p.Pix[i+0])<<8 | uint16(p.Pix[i+1]),
G: uint16(p.Pix[i+2])<<8 | uint16(p.Pix[i+3]),
B: uint16(p.Pix[i+4])<<8 | uint16(p.Pix[i+5]),
M: p.Model.M,
}
}
// PixOffset returns the index of the first element of Pix that corresponds to
// the pixel at (x, y).
func (p *RGBM64) PixOffset(x, y int) int {
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*6
}
// Set sets the pixel at (x, y) to a given color, expressed as a color.Color.
func (p *RGBM64) Set(x, y int, c color.Color) {
if !(image.Point{x, y}.In(p.Rect)) {
return
}
i := p.PixOffset(x, y)
c1 := p.Model.Convert(c).(npcolor.RGBM64)
p.Pix[i+0] = uint8(c1.R >> 8)
p.Pix[i+1] = uint8(c1.R)
p.Pix[i+2] = uint8(c1.G >> 8)
p.Pix[i+3] = uint8(c1.G)
p.Pix[i+4] = uint8(c1.B >> 8)
p.Pix[i+5] = uint8(c1.B)
}
// SetRGBM64 sets the pixel at (x, y) to a given color, expressed as an
// npcolor.RGBM.
func (p *RGBM64) SetRGBM64(x, y int, c npcolor.RGBM64) {
if !(image.Point{x, y}.In(p.Rect)) {
return
}
i := p.PixOffset(x, y)
if c.M == p.Model.M {
p.Pix[i+0] = uint8(c.R >> 8)
p.Pix[i+1] = uint8(c.R)
p.Pix[i+2] = uint8(c.G >> 8)
p.Pix[i+3] = uint8(c.G)
p.Pix[i+4] = uint8(c.B >> 8)
p.Pix[i+5] = uint8(c.B)
} else {
p.Set(x, y, c)
}
}
// SubImage returns an image representing the portion of the image p visible
// through r. The returned value shares pixels with the original image.
func (p *RGBM64) SubImage(r image.Rectangle) image.Image {
r = r.Intersect(p.Rect)
// If r1 and r2 are Rectangles, r1.Intersect(r2) is not guaranteed to
// be inside either r1 or r2 if the intersection is empty. Without
// explicitly checking for this, the Pix[i:] expression below can
// panic.
if r.Empty() {
return &RGBM64{}
}
i := p.PixOffset(r.Min.X, r.Min.Y)
return &RGBM64{
Pix: p.Pix[i:],
Stride: p.Stride,
Rect: r,
}
}
// Opaque scans the entire image and reports whether it is fully opaque.
func (p *RGBM64) Opaque() bool {
return true
}
// MaxValue returns the maximum value allowed on any color channel.
func (p *RGBM64) MaxValue() uint16 {
return uint16(p.Model.M)
}
// Format identifies the image as a PPM image.
func (p *RGBM64) Format() Format {
return PPM
}
// HasAlpha indicates that there is no alpha channel.
func (p *RGBM64) HasAlpha() bool {
return false
}
// NewRGBM64 returns a new RGBM64 with the given bounds and maximum channel
// value.
func NewRGBM64(r image.Rectangle, m uint16) *RGBM64 {
w, h := r.Dx(), r.Dy()
pix := make([]uint8, 6*w*h)
model := npcolor.RGBM64Model{M: m}
return &RGBM64{pix, 6 * w, r, model}
}
// decodeConfigPPMWithComments reads and parses a PPM header, either "raw"
// (binary) or "plain" (ASCII). Unlike decodeConfigPPM, it also returns any
// comments appearing in the file.
func decodeConfigPPMWithComments(r io.Reader) (image.Config, []string, error) {
// We really want a bufio.Reader. If we were given one, use it. If
// not, create a new one.
br, ok := r.(*bufio.Reader)
if !ok {
br = bufio.NewReader(r)
}
nr := newNetpbmReader(br)
// Parse the PPM header.
header, ok := nr.GetNetpbmHeader()
if !ok {
err := nr.Err()
if err == nil {
err = errors.New("Invalid PPM header")
}
return image.Config{}, nil, err
}
// Store and return the image configuration.
var cfg image.Config
cfg.Width = header.Width
cfg.Height = header.Height
if header.Maxval < 256 {
cfg.ColorModel = npcolor.RGBMModel{M: uint8(header.Maxval)}
} else {
cfg.ColorModel = npcolor.RGBM64Model{M: uint16(header.Maxval)}
}
return cfg, header.Comments, nil
}
// decodeConfigPPM reads and parses a PPM header, either "raw"
// (binary) or "plain" (ASCII).
func decodeConfigPPM(r io.Reader) (image.Config, error) {
img, _, err := decodeConfigPPMWithComments(r)
return img, err
}
// decodePPMWithComments reads a complete "raw" (binary) PPM image. Unlike
// decodePPM, it also returns any comments appearing in the file.
func decodePPMWithComments(r io.Reader) (image.Image, []string, error) {
// Read the image header, and use it to prepare a color image.
br := bufio.NewReader(r)
config, comments, err := decodeConfigPPMWithComments(br)
if err != nil {
return nil, nil, err
}
// Create either a Color or a Color64 image.
var img image.Image // Image to return
var data []uint8 // RGB (no M) image data
var maxVal uint // 100% white value
switch model := config.ColorModel.(type) {
case npcolor.RGBMModel:
maxVal = uint(model.M)
rgb := NewRGBM(image.Rect(0, 0, config.Width, config.Height), uint8(maxVal))
data = rgb.Pix
img = rgb
case npcolor.RGBM64Model:
maxVal = uint(model.M)
rgb := NewRGBM64(image.Rect(0, 0, config.Width, config.Height), uint16(maxVal))
data = rgb.Pix
img = rgb
default:
panic("Unexpected color model")
}
// Raw PPM images are nice because we can read directly into the image
// data.
for len(data) > 0 {
nRead, err := br.Read(data)
if err != nil && err != io.EOF {
return img, nil, err
}
if nRead == 0 {
return img, nil, errors.New("Failed to read binary PPM data")
}
data = data[nRead:]
}
return img, comments, nil
}
// decodePPM reads a complete "raw" (binary) PPM image.
func decodePPM(r io.Reader) (image.Image, error) {
img, _, err := decodePPMWithComments(r)
return img, err
}
// decodePPMPlainWithComments reads a complete "plain" (ASCII) PPM image.
// Unlike decodePPMPlain, it also returns any comments appearing in the file.
func decodePPMPlainWithComments(r io.Reader) (image.Image, []string, error) {
// Read the image header, and use it to prepare a color image.
br := bufio.NewReader(r)
config, comments, err := decodeConfigPPMWithComments(br)
if err != nil {
return nil, nil, err
}
var img image.Image // Image to return
// Define a simple error handler.
nr := newNetpbmReader(br)
badness := func() (image.Image, []string, error) {
// Something went wrong. Either we have an error code to
// explain what or we make up a generic error message.
err := nr.Err()
if err == nil {
err = errors.New("Failed to parse ASCII PPM data")
}
return img, nil, err
}
// Create either a Color or a Color64 image.
var data []uint8 // Image data
var maxVal int // 100% white value
switch model := config.ColorModel.(type) {
case npcolor.RGBMModel:
maxVal = int(model.M)
rgb := NewRGBM(image.Rect(0, 0, config.Width, config.Height), uint8(maxVal))
data = rgb.Pix
img = rgb
case npcolor.RGBM64Model:
maxVal = int(model.M)
rgb := NewRGBM64(image.Rect(0, 0, config.Width, config.Height), uint16(maxVal))
data = rgb.Pix
img = rgb
default:
panic("Unexpected color model")
}
// Read ASCII base-10 integers until no more remain.
if !nr.GetASCIIData(maxVal, data) {
return badness()
}
return img, comments, nil
}
// decodePPMPlain reads a complete "plain" (ASCII) PPM image.
func decodePPMPlain(r io.Reader) (image.Image, error) {
img, _, err := decodePPMPlainWithComments(r)
return img, err
}
// Indicate that we can decode both raw and plain PPM files.
func init() {
image.RegisterFormat("ppm", "P6", decodePPM, decodeConfigPPM)
image.RegisterFormat("ppm", "P3", decodePPMPlain, decodeConfigPPM)
}
// encodePPM writes an arbitrary image in PPM format.
func encodePPM(w io.Writer, img image.Image, opts *EncodeOptions) error {
// Write the PPM header.
if opts.Plain {
fmt.Fprintln(w, "P3")
} else {
fmt.Fprintln(w, "P6")
}
for _, cmt := range opts.Comments {
cmt = strings.Replace(cmt, "\n", " ", -1)
cmt = strings.Replace(cmt, "\r", " ", -1)
fmt.Fprintf(w, "# %s\n", cmt)
}
rect := img.Bounds()
width := rect.Max.X - rect.Min.X
height := rect.Max.Y - rect.Min.Y
fmt.Fprintf(w, "%d %d\n", width, height)
fmt.Fprintf(w, "%d\n", opts.MaxValue)
// Write the PPM data.
if opts.MaxValue < 256 {
return encodeRGBData(w, img, opts)
}
return encodeRGB64Data(w, img, opts)
}
// encodeRGBData writes image data as 8-bit samples.
func encodeRGBData(w io.Writer, img image.Image, opts *EncodeOptions) error {
// In the background, write each 8-bit color sample into a channel.
rect := img.Bounds()
width := rect.Max.X - rect.Min.X
samples := make(chan uint16, width*3)
go func() {
cm := npcolor.RGBMModel{M: uint8(opts.MaxValue)}
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c := cm.Convert(img.At(x, y)).(npcolor.RGBM)
samples <- uint16(c.R)
samples <- uint16(c.G)
samples <- uint16(c.B)
}
}
close(samples)
}()
// In the foreground, consume color samples and write them to the image
// file.
if opts.Plain {
return writePlainData(w, samples)
}
return writeRawData(w, samples, 1)
}
// encodeRGB64Data writes image data as 16-bit samples.
func encodeRGB64Data(w io.Writer, img image.Image, opts *EncodeOptions) error {
// In the background, write each 16-bit color sample into a channel.
rect := img.Bounds()
width := rect.Max.X - rect.Min.X
samples := make(chan uint16, width*3)
go func() {
cm := npcolor.RGBM64Model{M: opts.MaxValue}
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c := cm.Convert(img.At(x, y)).(npcolor.RGBM64)
samples <- c.R
samples <- c.G
samples <- c.B
}
}
close(samples)
}()
// In the foreground, consume color samples and write them to the image
// file.
if opts.Plain {
return writePlainData(w, samples)
}
return writeRawData(w, samples, 2)
} | ppm.go | 0.840848 | 0.547283 | ppm.go | starcoder |
package cp
// This is a user defined function that gets passed in to the Marching process
// the user establishes a PolyLineSet, passes a pointer to their function, and they
// populate it. In most cases you want to use PolyLineCollectSegment instead of defining your own
type MarchSegmentFunc func(v0 Vector, v1 Vector, segmentData *PolyLineSet)
// This is a user defined function that gets passed every single point from the bounding
// box the user passes into the March process - you can use this to sample an image and
// check for alpha values or really any 2d matrix you define like a tile map.
// NOTE: I could not determine a use case for the sample_data pointer from the original code
// so I removed it here - open to adding it back in if there is a reason.
type MarchSampleFunc func(point Vector) float64
type MarchCellFunc func(t, a, b, c, d, x0, x1, y0, y1 float64, marchSegment MarchSegmentFunc, segmentData *PolyLineSet)
// The looping and sample caching code is shared between cpMarchHard() and cpMarchSoft().
func MarchCells(bb BB, xSamples int64, ySamples int64, t float64, marchSegment MarchSegmentFunc, marchSample MarchSampleFunc, marchCell MarchCellFunc) *PolyLineSet {
var x_denom, y_denom float64
x_denom = 1.0 / float64(xSamples-1)
y_denom = 1.0 / float64(ySamples-1)
buffer := make([]float64, xSamples)
var i, j int64
for i = 0; i < xSamples; i++ {
buffer[i] = marchSample(Vector{Lerp(bb.L, bb.R, float64(i)*x_denom), bb.B})
}
segmentData := &PolyLineSet{}
for j = 0; j < ySamples-1; j++ {
y0 := Lerp(bb.B, bb.T, float64(j+0)*y_denom)
y1 := Lerp(bb.B, bb.T, float64(j+1)*y_denom)
a := buffer[0]
b := buffer[0]
c := marchSample(Vector{bb.L, y1})
d := c
buffer[0] = d
for i = 0; i < xSamples-1; i++ {
x0 := Lerp(bb.L, bb.R, float64(i+0)*x_denom)
x1 := Lerp(bb.L, bb.R, float64(i+1)*x_denom)
a = b
b = buffer[i+1]
c = d
d = marchSample(Vector{x1, y1})
buffer[i+1] = d
marchCell(t, a, b, c, d, x0, x1, y0, y1, marchSegment, segmentData)
}
}
return segmentData
}
func seg(v0 Vector, v1 Vector, marchSegment MarchSegmentFunc, segmentData *PolyLineSet) {
if !v0.Equal(v1) {
marchSegment(v1, v0, segmentData)
}
}
func midlerp(x0, x1, s0, s1, t float64) float64 {
return Lerp(x0, x1, (t-s0)/(s1-s0))
}
func MarchCellSoft(t, a, b, c, d, x0, x1, y0, y1 float64, marchSegment MarchSegmentFunc, segmentData *PolyLineSet) {
at := 0
bt := 0
ct := 0
dt := 0
if a > t {
at = 1
}
if b > t {
bt = 1
}
if c > t {
ct = 1
}
if d > t {
dt = 1
}
switch (at)<<0 | (bt)<<1 | (ct)<<2 | (dt)<<3 {
case 0x1:
seg(Vector{x0, midlerp(y0, y1, a, c, t)}, Vector{midlerp(x0, x1, a, b, t), y0}, marchSegment, segmentData)
case 0x2:
seg(Vector{midlerp(x0, x1, a, b, t), y0}, Vector{x1, midlerp(y0, y1, b, d, t)}, marchSegment, segmentData)
case 0x3:
seg(Vector{x0, midlerp(y0, y1, a, c, t)}, Vector{x1, midlerp(y0, y1, b, d, t)}, marchSegment, segmentData)
case 0x4:
seg(Vector{midlerp(x0, x1, c, d, t), y1}, Vector{x0, midlerp(y0, y1, a, c, t)}, marchSegment, segmentData)
case 0x5:
seg(Vector{midlerp(x0, x1, c, d, t), y1}, Vector{midlerp(x0, x1, a, b, t), y0}, marchSegment, segmentData)
case 0x6:
seg(Vector{midlerp(x0, x1, a, b, t), y0}, Vector{x1, midlerp(y0, y1, b, d, t)}, marchSegment, segmentData)
seg(Vector{midlerp(x0, x1, c, d, t), y1}, Vector{x0, midlerp(y0, y1, a, c, t)}, marchSegment, segmentData)
case 0x7:
seg(Vector{midlerp(x0, x1, c, d, t), y1}, Vector{x1, midlerp(y0, y1, b, d, t)}, marchSegment, segmentData)
case 0x8:
seg(Vector{x1, midlerp(y0, y1, b, d, t)}, Vector{midlerp(x0, x1, c, d, t), y1}, marchSegment, segmentData)
case 0x9:
seg(Vector{x0, midlerp(y0, y1, a, c, t)}, Vector{midlerp(x0, x1, a, b, t), y0}, marchSegment, segmentData)
seg(Vector{x1, midlerp(y0, y1, b, d, t)}, Vector{midlerp(x0, x1, c, d, t), y1}, marchSegment, segmentData)
case 0xA:
seg(Vector{midlerp(x0, x1, a, b, t), y0}, Vector{midlerp(x0, x1, c, d, t), y1}, marchSegment, segmentData)
case 0xB:
seg(Vector{x0, midlerp(y0, y1, a, c, t)}, Vector{midlerp(x0, x1, c, d, t), y1}, marchSegment, segmentData)
case 0xC:
seg(Vector{x1, midlerp(y0, y1, b, d, t)}, Vector{x0, midlerp(y0, y1, a, c, t)}, marchSegment, segmentData)
case 0xD:
seg(Vector{x1, midlerp(y0, y1, b, d, t)}, Vector{midlerp(x0, x1, a, b, t), y0}, marchSegment, segmentData)
case 0xE:
seg(Vector{midlerp(x0, x1, a, b, t), y0}, Vector{x0, midlerp(y0, y1, a, c, t)}, marchSegment, segmentData)
}
}
/// Trace an anti-aliased contour of an image along a particular threshold.
/// The given number of samples will be taken and spread across the bounding box area using the sampling function and context.
/// The segment function will be called for each segment detected that lies along the density contour for @c threshold.
func MarchSoft(bb BB, xSamples, ySamples int64, t float64, marchSegment MarchSegmentFunc, marchSample MarchSampleFunc) *PolyLineSet {
return MarchCells(bb, xSamples, ySamples, t, marchSegment, marchSample, MarchCellSoft)
}
func segs(a, b, c Vector, marchSegment MarchSegmentFunc, segmentData *PolyLineSet) {
seg(b, c, marchSegment, segmentData)
seg(a, b, marchSegment, segmentData)
}
func MarchCellHard(t, a, b, c, d, x0, x1, y0, y1 float64, marchSegment MarchSegmentFunc, segmentData *PolyLineSet) {
xm := Lerp(x0, x1, 0.5)
ym := Lerp(y0, y1, 0.5)
at := 0
bt := 0
ct := 0
dt := 0
if a > t {
at = 1
}
if b > t {
bt = 1
}
if c > t {
ct = 1
}
if d > t {
dt = 1
}
switch (at)<<0 | (bt)<<1 | (ct)<<2 | (dt)<<3 {
case 0x1:
segs(Vector{x0, ym}, Vector{xm, ym}, Vector{xm, y0}, marchSegment, segmentData)
case 0x2:
segs(Vector{xm, y0}, Vector{xm, ym}, Vector{x1, ym}, marchSegment, segmentData)
case 0x3:
seg(Vector{x0, ym}, Vector{x1, ym}, marchSegment, segmentData)
case 0x4:
segs(Vector{xm, y1}, Vector{xm, ym}, Vector{x0, ym}, marchSegment, segmentData)
case 0x5:
seg(Vector{xm, y1}, Vector{xm, y0}, marchSegment, segmentData)
case 0x6:
segs(Vector{xm, y0}, Vector{xm, ym}, Vector{x0, ym}, marchSegment, segmentData)
segs(Vector{xm, y1}, Vector{xm, ym}, Vector{x1, ym}, marchSegment, segmentData)
case 0x7:
segs(Vector{xm, y1}, Vector{xm, ym}, Vector{x1, ym}, marchSegment, segmentData)
case 0x8:
segs(Vector{x1, ym}, Vector{xm, ym}, Vector{xm, y1}, marchSegment, segmentData)
case 0x9:
segs(Vector{x1, ym}, Vector{xm, ym}, Vector{xm, y0}, marchSegment, segmentData)
segs(Vector{x0, ym}, Vector{xm, ym}, Vector{xm, y1}, marchSegment, segmentData)
case 0xA:
seg(Vector{xm, y0}, Vector{xm, y1}, marchSegment, segmentData)
case 0xB:
segs(Vector{x0, ym}, Vector{xm, ym}, Vector{xm, y1}, marchSegment, segmentData)
case 0xC:
seg(Vector{x1, ym}, Vector{x0, ym}, marchSegment, segmentData)
case 0xD:
segs(Vector{x1, ym}, Vector{xm, ym}, Vector{xm, y0}, marchSegment, segmentData)
case 0xE:
segs(Vector{xm, y0}, Vector{xm, ym}, Vector{x0, ym}, marchSegment, segmentData)
}
}
/// Trace an aliased curve of an image along a particular threshold.
/// The given number of samples will be taken and spread across the bounding box area using the sampling function and context.
/// The segment function will be called for each segment detected that lies along the density contour for @c threshold.
func MarchHard(bb BB, xSamples, ySamples int64, t float64, marchSegment MarchSegmentFunc, marchSample MarchSampleFunc) *PolyLineSet {
return MarchCells(bb, xSamples, ySamples, t, marchSegment, marchSample, MarchCellHard)
} | march.go | 0.626467 | 0.700264 | march.go | starcoder |
package graph
import (
"errors"
"fmt"
)
var alreadyConnected = errors.New("graph: edge already fully connected")
type Edge interface {
ID() int
Weight() float64
Nodes() (u, v Node)
Head() Node
Tail() Node
index() int
setIndex(int)
setID(int)
join(u, v Node)
disconnect()
reconnect(dst, src Node)
}
var _ Edge = (*edge)(nil)
// EdgeFilter is a function type used for assessment of edges during graph traversal.
type EdgeFilter func(Edge) bool
// An edge is an edge in a graph.
type edge struct {
id int
i int
u, v Node
}
// NewEdge returns a new Edge.
func NewEdge() Edge {
return &edge{}
}
// newEdge returns a new edge.
func newEdge(id, i int, u, v Node) Edge {
return &edge{id: id, i: i, u: u, v: v}
}
// ID returns the id of the edge.
func (e *edge) ID() int {
return e.id
}
func (e *edge) setID(id int) {
e.id = id
}
// Index returns the index of the edge in the compact edge list of the graph. The value returned
// cannot be reliably used after an edge deletion.
func (e *edge) index() int {
return e.i
}
func (e *edge) setIndex(i int) {
e.i = i
}
// Nodes returns the two nodes, u and v, that are joined by the edge.
func (e *edge) Nodes() (u, v Node) {
return e.u, e.v
}
// Head returns the first node of an edge's node pair.
func (e *edge) Head() Node {
return e.v
}
// Tail returns the second node of an edge's node pair.
func (e *edge) Tail() Node {
return e.u
}
// Weight returns the weight of the edge. The default weight is 1.
func (e *edge) Weight() float64 {
return 1
}
func (e *edge) reconnect(u, v Node) {
switch u {
case e.u:
e.u = v
case e.v:
e.v = v
}
}
func (e *edge) disconnect() {
e.u.drop(e)
if e.u != e.v {
e.v.drop(e)
e.v = nil
}
e.u = nil
}
func (e *edge) connect(n Node) (err error) {
switch Node(nil) {
case e.u:
e.u = n
e.u.add(e)
case e.v:
e.v = n
e.v.add(e)
default:
err = alreadyConnected
}
return
}
func (e *edge) join(u, v Node) {
e.u, e.v = u, v
}
func (e *edge) String() string {
return fmt.Sprintf("%d--%d", e.u.ID(), e.v.ID())
}
// Edges is a collection of edges used for internal representation of edge lists in a graph.
type Edges []Edge
func (e Edges) delFromGraph(i int) Edges {
e[i], e[len(e)-1] = e[len(e)-1], e[i]
e[i].setIndex(i)
e[len(e)-1].setIndex(-1)
return e[:len(e)-1]
}
func (e Edges) delFromNode(i int) Edges {
e[i], e[len(e)-1] = e[len(e)-1], e[i]
return e[:len(e)-1]
} | edge.go | 0.741393 | 0.417509 | edge.go | starcoder |
package apmcharts
import (
"io"
"sort"
"time"
"github.com/pkg/errors"
"github.com/wcharczuk/go-chart/v2"
)
// RenderResponseTime renders response time chart, aka. service sublayer
func RenderResponseTime(series, timestamps [][]float64, w io.Writer, options Options) error {
if len(series) != len(timestamps) {
return errors.New("RenderResponseTime: amount of series and timestamps should be equal")
}
convertedIndexed := map[float64]float64{}
timeSeries := make([]chart.Series, 0, len(series))
dataWithLegend := DataWithLegend{
series: series,
legend: options.Legend,
timestamps: timestamps,
}
sort.Sort(dataWithLegend)
for index, values := range dataWithLegend.series {
times := make([]time.Time, 0, len(dataWithLegend.timestamps[index]))
converted := make([]float64, 0, len(values))
for subIndex, value := range values {
t := dataWithLegend.timestamps[index][subIndex]
if v, ok := convertedIndexed[t]; ok {
convertedIndexed[t] = value + v
converted = append(converted, value+v)
} else {
convertedIndexed[t] = value
converted = append(converted, value)
}
times = append(times, time.Unix(int64(dataWithLegend.timestamps[index][subIndex]/1000), 0))
}
timeSeries = append(
[]chart.Series{
chart.TimeSeries{
Name: getLegend(dataWithLegend.legend, index),
Style: chart.Style{
StrokeColor: colorSchema[index],
FillColor: colorSchema[index],
StrokeWidth: 1,
},
XValues: times,
YValues: converted,
},
},
timeSeries...,
)
}
graph := chart.Chart{
Height: options.GetHeight(),
Width: options.GetWidth(),
ColorPalette: options.GetColorPalette(),
Title: options.GetTitle(),
TitleStyle: options.GetTitleStyle(),
XAxis: chart.XAxis{
ValueFormatter: options.GetTimeFormatter(),
},
YAxis: chart.YAxis{},
Series: timeSeries,
}
if len(options.Legend) > 0 {
graph.Elements = []chart.Renderable{
chart.LegendLeft(&graph),
}
}
return graph.Render(chart.PNG, w)
} | response_time.go | 0.589835 | 0.41117 | response_time.go | starcoder |
package gost
import (
"errors"
"math"
)
// Basic Node struct, basis of any single-linked list structure.
type Node struct {
Data interface{}
Next *Node
}
/*
NodeList is a an implementation of a singly linked list. It takes any interface{} and
allows:
- Retrieving: obtaining the value contained at any given index within the list.
- Appending: adding a new value at the last position of the list.
- Adding: adding a new value, specifying the index to be inserted at.
- Removing: deleting a value from the list, obtaining it if needed.
Note that the implementation is NOT thread-safe.
*/
type NodeList struct {
Head *Node
Tail *Node
size int
}
// Internal function used to iterate through the list and retrieve a Node at the index value. Doesn't check for errors.
func (list *NodeList) getNode(index int) (node *Node) {
node = list.Head
for i := 0; i < index; i++ {
node = node.Next
}
return node
}
// Internal function used to allow reverse search by subtracting the size of the list with the negative offset.
func (list *NodeList) reverseIndex(index int) (bool, int) {
if index < 0 {
return true, int(list.size) - int(math.Abs(float64(index)))
}
return false, 0
}
// Retrieve obtains data stored at position index within the list. Returns the data or an error if out of bounds.
func (list *NodeList) Retrieve(index int) (interface{}, error) {
isReverse, value := list.reverseIndex(index)
if isReverse {
index = value
}
if index >= int(list.size) || index < 0 {
return nil, errors.New("cannot Retrieve() index out of bounds")
}
node := list.getNode(index)
return node.Data, nil
}
// Append the data passed as parameter to the end of the list.
func (list *NodeList) Append(data interface{}) {
node := &Node{Data: data, Next: nil}
if list.size > 1 {
list.Tail.Next = node
} else if list.size == 0 {
list.Head = node
} else {
list.Head.Next = node
}
list.Tail = node
list.size++
}
// Add the data passed as parameter at the position designed by index. Returns an error if out of bounds.
func (list *NodeList) Add(index int, data interface{}) error {
isReverse, value := list.reverseIndex(index)
if isReverse {
index = value
}
if index > int(list.size) || index < 0 {
return errors.New("cannot Add() index out of bounds")
}
if index == int(list.size) {
list.Append(data)
return nil
}
node := &Node{Data: data, Next: nil}
if index == 0 {
node.Next = list.Head
list.Head = node
} else {
prev := list.getNode(index - 1)
next := prev.Next
prev.Next = node
node.Next = next
}
list.size++
return nil
}
// Remove the item stored at position index in the list. Returns the extracted data or an error if out of bounds.
func (list *NodeList) Remove(index int) (interface{}, error) {
isReverse, value := list.reverseIndex(index)
if isReverse {
index = value
}
if index >= int(list.size) || index < 0 {
return nil, errors.New("cannot Remove() index out of bounds")
}
var data interface{}
if index == int(list.size)-1 {
data = list.Tail.Data
prev := list.getNode(int(list.size) - 2)
prev.Next = nil
list.Tail = prev
} else if index > 0 {
prev := list.getNode(index - 1)
node := prev.Next
data = node.Data
prev.Next = node.Next
node.Next = nil
} else {
data = list.Head.Data
next := list.Head.Next
list.Head.Next = nil
list.Head = next
}
list.size--
return data, nil
}
// Size returns the length of the NodeList.
func (list *NodeList) Size() int {
return list.size
} | list/node_list.go | 0.742141 | 0.516352 | node_list.go | starcoder |
package main
import (
"time"
"github.com/marcusolsson/grafana-ynab-datasource/pkg/ynab"
)
type Range struct {
Start time.Time
End time.Time
}
func (r Range) Contains(t time.Time) bool {
return r.Start.UnixNano() <= t.UnixNano() && t.UnixNano() < r.End.UnixNano()
}
type Bucket struct {
Range Range
Measurements []Measurement
}
type Histogram struct {
buckets []Bucket
series TimeSeries
}
func NewHistogram(series TimeSeries) *Histogram {
return &Histogram{
buckets: make([]Bucket, 0),
series: series,
}
}
type Period int
const (
PeriodDaily Period = iota
PeriodWeekly
PeriodMonthly
)
func (g *Histogram) Fill(start, end time.Time, p Period) {
switch p {
case PeriodDaily:
first := time.Date(start.Year(), start.Month(), start.Day(), 0, 0, 0, 0, time.UTC)
last := time.Date(end.Year(), end.Month(), end.Day(), 0, 0, 0, 0, time.UTC).AddDate(0, 0, 1)
for b := first; b != last; b = b.AddDate(0, 0, 1) {
g.buckets = append(g.buckets, Bucket{
Range: Range{
Start: b,
End: b.AddDate(0, 0, 1),
},
Measurements: []Measurement{},
})
}
case PeriodWeekly:
first := time.Date(start.Year(), start.Month(), start.Day(), 0, 0, 0, 0, time.UTC)
for {
if first.Weekday() == time.Monday {
break
}
first = first.AddDate(0, 0, -1)
}
last := time.Date(end.Year(), end.Month(), end.Day(), 0, 0, 0, 0, time.UTC).AddDate(0, 0, 1)
for {
if last.Weekday() == time.Monday {
break
}
last = last.AddDate(0, 0, 1)
}
for b := first; b != last; b = b.AddDate(0, 0, 7) {
g.buckets = append(g.buckets, Bucket{
Range: Range{
Start: b,
End: b.AddDate(0, 0, 7),
},
Measurements: []Measurement{},
})
}
case PeriodMonthly:
first := time.Date(start.Year(), start.Month(), 1, 0, 0, 0, 0, time.UTC)
last := time.Date(end.Year(), end.Month(), 1, 0, 0, 0, 0, time.UTC).AddDate(0, 1, 0)
for b := first; b != last; b = b.AddDate(0, 1, 0) {
g.buckets = append(g.buckets, Bucket{
Range: Range{
Start: b,
End: b.AddDate(0, 1, 0),
},
Measurements: []Measurement{},
})
}
}
}
func (g *Histogram) Add(m Measurement) {
for j, bucket := range g.buckets {
if bucket.Range.Contains(m.Time) {
g.buckets[j].Measurements = append(g.buckets[j].Measurements, m)
}
}
}
func (g *Histogram) EachBucket(reducer func(r Range, ms []Measurement) Measurement, fn func(m Measurement), gapFill string) {
var lastMeasurement Measurement
for i, b := range g.buckets {
if len(b.Measurements) > 0 {
lastMeasurement = reducer(b.Range, b.Measurements)
fn(lastMeasurement)
} else {
if i > 0 {
if gapFill == "last" {
fn(Measurement{
Time: b.Range.Start,
Value: lastMeasurement.Value,
Labels: lastMeasurement.Labels,
})
}
} else {
if gapFill == "last" {
fn(Measurement{
Time: b.Range.Start,
Value: 0,
Labels: lastMeasurement.Labels,
})
}
}
}
}
}
type Measurement struct {
Time time.Time
Value float64
Labels map[string]string
}
type TimeSeries interface {
Time(i int) time.Time
Value(i int) float64
Len() int
Labels(i int) map[string]string
}
type TimeSeriesTransactions []ynab.Transaction
func (a TimeSeriesTransactions) Len() int {
return len(a)
}
func (a TimeSeriesTransactions) Time(i int) time.Time {
t, err := time.Parse("2006-01-02", a[i].Date)
if err != nil {
panic(err)
}
return t
}
func (a TimeSeriesTransactions) Value(i int) float64 {
return float64(a[i].Amount)
}
func (a TimeSeriesTransactions) Labels(i int) map[string]string {
return map[string]string{
"account_id": a[i].AccountID,
"account_name": a[i].AccountName,
"payee_id": a[i].PayeeID,
"payee_name": a[i].PayeeName,
"category_id": a[i].CategoryID,
"category_name": a[i].CategoryName,
}
}
var _ TimeSeries = TimeSeriesTransactions{}
type TimeSeriesBalance []ynab.Balance
func (a TimeSeriesBalance) Len() int {
return len(a)
}
func (a TimeSeriesBalance) Time(i int) time.Time {
t, err := time.Parse("2006-01-02", a[i].Date)
if err != nil {
panic(err)
}
return t
}
func (a TimeSeriesBalance) Value(i int) float64 {
return float64(a[i].Amount)
}
func (a TimeSeriesBalance) Labels(i int) map[string]string {
return map[string]string{
"account_id": a[i].AccountID,
"account_name": a[i].AccountName,
}
}
var _ TimeSeries = TimeSeriesBalance{}
func Regularize(series TimeSeries, p Period, aligner func(r Range, ms []Measurement) Measurement, gapFill string) ([]Measurement, error) {
if series.Len() < 0 {
return []Measurement{}, nil
}
hist := NewHistogram(series)
hist.Fill(series.Time(0), series.Time(series.Len()-1), p)
for i := 0; i < series.Len(); i++ {
m := Measurement{
Time: series.Time(i),
Value: series.Value(i),
Labels: series.Labels(i),
}
hist.Add(m)
}
var res []Measurement
hist.EachBucket(
aligner,
func(m Measurement) {
res = append(res, m)
},
gapFill,
)
return res, nil
}
func alignLast(r Range, ms []Measurement) Measurement {
last := ms[len(ms)-1]
last.Time = r.Start
return last
}
func alignTotal(r Range, ms []Measurement) Measurement {
var total float64
for _, m := range ms {
total += m.Value
}
return Measurement{
Time: r.Start,
Value: total,
Labels: ms[0].Labels,
}
} | pkg/aligner.go | 0.686895 | 0.584745 | aligner.go | starcoder |
package stack
import (
"eslang/core"
"fmt"
"math"
)
// ======================
// ARITHMETIC OPERATIONS
// ======================
// AddValues function adds two values together
func AddValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not add values of different types")
}
switch lhs.Type() {
case core.Int:
return NewStackValueInt(lhs.Value().(int64) + rhs.Value().(int64)), nil
case core.Float:
return NewStackValueFloat(lhs.Value().(float64) + rhs.Value().(float64)), nil
case core.String:
return NewStackValueString(lhs.Value().(string) + rhs.Value().(string)), nil
}
return nil, fmt.Errorf("can not add values of type %s", lhs.Type())
}
// SubtractValues function subtracts two values
func SubtractValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not subtract values of different types")
}
switch lhs.Type() {
case core.Int:
return NewStackValueInt(lhs.Value().(int64) - rhs.Value().(int64)), nil
case core.Float:
return NewStackValueFloat(lhs.Value().(float64) - rhs.Value().(float64)), nil
}
return nil, fmt.Errorf("can not subtract values of type %s", lhs.Type())
}
// MultiplyValues function multiplies two values
func MultiplyValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not multiply values of different types")
}
switch lhs.Type() {
case core.Int:
return NewStackValueInt(lhs.Value().(int64) * rhs.Value().(int64)), nil
case core.Float:
return NewStackValueFloat(lhs.Value().(float64) * rhs.Value().(float64)), nil
}
return nil, fmt.Errorf("can not multiply values of type %s", lhs.Type())
}
// DivideValues function Óòß divides two values
func DivideValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not divide values of different types")
}
switch lhs.Type() {
case core.Int:
return NewStackValueInt(lhs.Value().(int64) / rhs.Value().(int64)), nil
case core.Float:
return NewStackValueFloat(lhs.Value().(float64) / rhs.Value().(float64)), nil
}
return nil, fmt.Errorf("can not divide values of type %s", lhs.Type())
}
// ModuloValues function returns the remainder of the divition between two values
func ModuloValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not modulo values of different types")
}
switch lhs.Type() {
case core.Int:
return NewStackValueInt(lhs.Value().(int64) % rhs.Value().(int64)), nil
case core.Float:
return NewStackValueFloat(
math.Mod(
lhs.Value().(float64),
rhs.Value().(float64),
),
), nil
}
return nil, fmt.Errorf("can not modulo values of type %s", lhs.Type())
}
// ======================
// RELATIONAL OPERATIONS
// ======================
// EqualValues function compares if two values are equal
func EqualValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
truthty := lhs.Value() == rhs.Value()
return NewStackValueBool(truthty), nil
}
// NotEqualValues function compares if two values are not equal
func NotEqualValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
truthty := lhs.Value() != rhs.Value()
return NewStackValueBool(truthty), nil
}
// LessThanValues function compares if one value is less than another
func LessThanValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
var truthty bool
switch lhs.Type() {
case core.Int:
truthty = lhs.Value().(int64) < rhs.Value().(int64)
case core.Float:
truthty = lhs.Value().(float64) < rhs.Value().(float64)
case core.String:
truthty = lhs.Value().(string) < rhs.Value().(string)
default:
return nil, fmt.Errorf("can not compare values of type %s", lhs.Type())
}
return NewStackValueBool(truthty), nil
}
// GreaterThanValues function compares if one value is greater than another
func GreaterThanValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
var truthty bool
switch lhs.Type() {
case core.Int:
truthty = lhs.Value().(int64) > rhs.Value().(int64)
case core.Float:
truthty = lhs.Value().(float64) > rhs.Value().(float64)
case core.String:
truthty = lhs.Value().(string) > rhs.Value().(string)
default:
return nil, fmt.Errorf("can not compare values of type %s", lhs.Type())
}
return NewStackValueBool(truthty), nil
}
// LessThanOrEqualValues function compares if one value is less than or equal to another
func LessThanOrEqualValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
var truthty bool
switch lhs.Type() {
case core.Int:
truthty = lhs.Value().(int64) <= rhs.Value().(int64)
case core.Float:
truthty = lhs.Value().(float64) <= rhs.Value().(float64)
case core.String:
truthty = lhs.Value().(string) <= rhs.Value().(string)
default:
return nil, fmt.Errorf("can not compare values of type %s", lhs.Type())
}
return NewStackValueBool(truthty), nil
}
// GreaterThanOrEqualValues function compares if one value is greater than or equal to another
func GreaterThanOrEqualValues(lhs StackValue, rhs StackValue) (StackValue, error) {
if lhs.Type() != rhs.Type() {
return nil, fmt.Errorf("can not compare values of different types")
}
var truthty bool
switch lhs.Type() {
case core.Int:
truthty = lhs.Value().(int64) >= rhs.Value().(int64)
case core.Float:
truthty = lhs.Value().(float64) >= rhs.Value().(float64)
case core.String:
truthty = lhs.Value().(string) >= rhs.Value().(string)
default:
return nil, fmt.Errorf("can not compare values of type %s", lhs.Type())
}
return NewStackValueBool(truthty), nil
}
// ======================
// LOGICAL OPERATIONS
// ======================
// LogicalAndOperator function returns the logical AND of two values
func LogicalAndOperator(lhs StackValue, rhs StackValue) (StackValue, error) {
lhsTruthty, err := lhs.TestTruthy()
if err != nil {
return nil, err
}
rhsTruthty, err := rhs.TestTruthy()
if err != nil {
return nil, err
}
truthty := lhsTruthty && rhsTruthty
return NewStackValueBool(truthty), nil
}
// LogicalNotOperator function returns the logical NOT of a value
func LogicalNotOperator(value StackValue) (StackValue, error) {
truthty, err := value.TestTruthy()
if err != nil {
return nil, err
}
return NewStackValueBool(!truthty), nil
}
// LogicalOrOperator function returns the logical OR of two values
func LogicalOrOperator(lhs StackValue, rhs StackValue) (StackValue, error) {
lhsTruthty, err := lhs.TestTruthy()
if err != nil {
return nil, err
}
rhsTruthty, err := rhs.TestTruthy()
if err != nil {
return nil, err
}
truthty := lhsTruthty || rhsTruthty
return NewStackValueBool(truthty), nil
} | interpreter/stack/operations.go | 0.739328 | 0.454775 | operations.go | starcoder |
package main
import (
"flag"
"fmt"
"io"
"os"
// nginx
_ "github.com/Konstantin8105/ss/nginx"
// htop
_ "github.com/Konstantin8105/ss/htop"
// vim
_ "github.com/Konstantin8105/ss/vim"
// mc
_ "github.com/Konstantin8105/ss/mc"
// nano
_ "github.com/Konstantin8105/ss/nano"
// ssh
_ "github.com/Konstantin8105/ss/ssh"
// backup
// systemd
// git server
// git web
// localhost
// router settings
// system update
// base `starter` package
"github.com/Konstantin8105/ss/starter"
)
var (
helpFlag = flag.Bool("h", false, "give this help list")
listFlag = flag.Bool("l", false, "show list of modules")
installFlag = flag.Bool("i", false, "install settings")
prefixFlag = flag.String("prefix", "", "prefix before each command."+
" Typically used :\"sudo\" or \"ssh tom@localhost sudo\" or ...")
)
/*
Notes:
* https://blog.golang.org/docker
* https://stackoverflow.com/questions/26411594/executing-docker-command-using-golang-exec-fails
* https://github.com/betweenbrain/ubuntu-web-server-build-script
* https://medium.com/statuscode/golang-docker-for-development-and-production-ce3ad4e69673
*/
// TODO: add logs checking /var/log/
// TODO: add database
/*
# Minimal configuration of commands:
```minimal command
$ iw dev
Interface wlan0 <-- remember interface
$ ip link show wlan0 <-- checking
$ sudo ip link set wlan0 up <-- only if interface is not open
$ ip link show wlan0 <-- checking
$ iw wlan0 link <-- checking connection
$ sudo iw wlan0 scan
$ sudo -s
$ wpa_passphrase <PASSWORD> >> /etc/wpa_supplicant.conf
Enter WIFI_PASSWORD
$ exit
$ sudo vim /etc/network/interfaces
```
Inside `interfaces`:
```
auto wls1
#iface wls1 inet dhcp
iface wls1 inet static
address 192.168.0.55
netmask 255.255.255.0
gateway 192.168.0.1
wpa-conf /etc/wpa_supplicant.conf
```
Install video driver:
```
sudo apt install ubuntu-drivers-common
sudo apt-get install intel-microcode
sudo ubuntu-drivers devices
sudo ubuntu-drivers autoinstall
```
```
$ sudo ufw allow 222
$ sudo ufw enable
```
## How to connect to a WPA/WPA2 WiFi network using Linux command line
1. Find out the wireless device name.
```
$ /sbin/iw dev
phy#0
Interface wlan0
ifindex 3
type managed
```
The above output showed that the system has 1 physical WiFi card, designated as phy#0. The device name is wlan0. The type specifies the operation mode of the wireless device. managed means the device is a WiFi station or client that connects to an access point.
2. Check that the wireless device is up.
```
$ ip link show wlan0
3: wlan0: (BROADCAST,MULTICAST) mtu 1500 qdisc noop state DOWN mode DEFAULT qlen 1000
link/ether 74:e5:43:a1:ce:65 brd ff:ff:ff:ff:ff:ff
```
Look for the word **"UP"** inside the brackets in the first line of the output.
In the above example, wlan0 is not UP. Execute the following command to bring it up:
```
$ sudo ip link set wlan0 up
[sudo] password for <PASSWORD>:
Note: you need root privilege for the above operation.
```
If you run the show link command again, you can tell that wlan0 is now UP.
```
$ ip link show wlan0
3: wlan0: (NO-CARRIER,BROADCAST,MULTICAST,UP) mtu 1500 qdisc mq state DOWN mode DEFAULT qlen 1000
link/ether 74:e5:43:a1:ce:65 brd ff:ff:ff:ff:ff:ff
```
3. Check the connection status.
```
$ /sbin/iw wlan0 link
Not connected.
```
The above output shows that you are not connected to any network.
4. Scan to find out what WiFi network(s) are detected
```
$ sudo /sbin/iw wlan0 scan
BSS 00:14:d1:9c:1f:c8 (on wlan0)
... sniped ...
freq: 2412
SSID: gorilla
RSN: * Version: 1
* Group cipher: CCMP
* Pairwise ciphers: CCMP
* Authentication suites: PSK
* Capabilities: (0x0000)
... sniped ...
```
The 2 important pieces of information from the above are the SSID and the
security protocol (WPA/WPA2 vs WEP). The SSID from the above example is
gorilla. The security protocol is RSN, also commonly referred to as WPA2.
The security protocol is important because it determines what tool you use
to connect to the network.
5. Connect to WPA/WPA2 WiFi network.
This is a 2 step process. First, you generate a configuration file for wpa_supplicant that contains the pre-shared key ("passphrase") for the WiFi network.
```
$ sudo -s
[sudo] password for peter:
$ wpa_passphrase gorilla >> /etc/wpa_supplicant.conf
...type in the passphrase and hit enter...
wpa_passphrase takes the SSID as the single argument. You must type in the passphrase for the WiFi network gorilla after you run the command. Using that information, wpa_passphrase will output the necessary configuration statements to the standard output. Those statements are appended to the wpa_supplicant configuration file located at /etc/wpa_supplicant.conf.
```
Note: you need root privilege to write to /etc/wpa_supplicant.conf.
```
$ cat /etc/wpa_supplicant.conf
# reading passphrase from stdin
network={
ssid="gorilla"
#psk="testtest"
psk=4dfe1c985520d26a13e932bf0acb1d4580461dd854ed79ad1a88ec221a802061
}
The second step is to run wpa_supplicant with the new configuration file.
$ sudo wpa_supplicant -B -D wext -i wlan0 -c /etc/wpa_supplicant.conf
```
-B means run wpa_supplicant in the background.
-D specifies the wireless driver. wext is the generic driver.
-c specifies the path for the configuration file.
Use the iw command to verify that you are indeed connected to the SSID.
```
$ /sbin/iw wlan0 link
Connected to 00:14:d1:9c:1f:c8 (on wlan0)
SSID: gorilla
freq: 2412
RX: 63825 bytes (471 packets)
TX: 1344 bytes (12 packets)
signal: -27 dBm
tx bitrate: 6.5 MBit/s MCS 0
bss flags: short-slot-time
dtim period: 0
beacon int: 100
```
6. Obtain IP address by DHCP
```
$ sudo dhclient wlan0
Use the ip command to verify the IP address assigned by DHCP. The IP address is 192.168.1.113 from below.
$ ip addr show wlan0
3: wlan0: mtu 1500 qdisc mq state UP qlen 1000
link/ether 74:e5:43:a1:ce:65 brd ff:ff:ff:ff:ff:ff
inet 192.168.1.113/24 brd 192.168.1.255 scope global wlan0
inet6 fe80::76e5:43ff:fea1:ce65/64 scope link
valid_lft forever preferred_lft forever
```
7. Add default routing rule.
The last configuration step is to make sure that you have the proper routing rules.
```
$ ip route show
192.168.1.0/24 dev wlan0 proto kernel scope link src 192.168.1.113
The above routing table contains only 1 rule which redirects all traffic
destined for the local subnet (192.168.1.x) to the wlan0 interface.
You may want to add a default routing rule to pass all other traffic
through wlan0 as well.
$ sudo ip route add default via 192.168.1.254 dev wlan0
$ ip route show
default via 192.168.1.254 dev wlan0
192.168.1.0/24 dev wlan0 proto kernel scope link src 192.168.1.113
```
8. Ping external ip address to test connectivity
```
$ ping 8.8.8.8
PING 8.8.8.8 (8.8.8.8) 56(84) bytes of data.
64 bytes from 8.8.8.8: icmp_req=1 ttl=48 time=135 ms
64 bytes from 8.8.8.8: icmp_req=2 ttl=48 time=135 ms
64 bytes from 8.8.8.8: icmp_req=3 ttl=48 time=134 ms
^C
--- 8.8.8.8 ping statistics ---
3 packets transmitted, 3 received, 0% packet loss, time 2000ms
rtt min/avg/max/mdev = 134.575/134.972/135.241/0.414 ms
```
## Create internet connection
Add in file `/etc/network/interfaces`:
```
# WiFi connection
auto wls1
iface wls1 inet dhcp
wpa-conf /etc/wpa_supplicant.conf
```
## How to install NVidia driver
К сожалению пробую сначало установить сначало MATE
```
sudo apt install ubuntu-drivers-common
sudo apt-get install intel-microcode
sudo ubuntu-drivers devices
sudo ubuntu-drivers autoinstall
```
Screen Blanks/Monitor Turns Off
Using a laptop with a GeForce Go card, or connecting the sole display via DVI on a dual-head system sometimes results in the screen not receiving a picture. This is caused by the driver outputting video to the VGA port on the graphics card, instead of DVI.
The usual hint that you have this problem is when you hear the startup sound but nothing appears on the screen. If you do not hear any sound, you are more than likely experiencing unrelated problems.
This is a bug about displays on digital outputs being blank when using NVIDIA driver, and can be resolved by editing your /etc/X11/xorg.conf file:
1. Switch to the console by using ctrl+alt+F1, or reboot and select recovery mode from the GRUB menu.
```
mount -o rw,remount /
```
2. Open and edit xorg.conf like this:
```
sudo nano /etc/X11/xorg.conf.
```
3. Find the line that says: `Section "Screen"`
4. Insert a new line that says `Option "UseDisplayDevice" "DFP"`. in intel and nvidia
5. Save the file. If you had to restart into recovery mode, type reboot, otherwise restart your display using `sudo /etc/init.d/gdm restart`.
Change Inactive and Devise 0 on file /etc/X11/xorg.conf
```
$ sudo ufw allow 222
$ sudo ufw enable
```
To disable entering the sleep mode I had to edit the /etc/systemd/logind.conf file and modify the line:
`#HandleLidSwitch=suspend` to `HandleLidSwitch=ignore` . Then do
```
sudo service systemd-logind restart
```
*/
func main() {
flag.Parse()
err := run()
if err != nil {
fmt.Printf("Error = %v", err)
os.Exit(1)
}
}
var output io.Writer = os.Stdout
func run() (err error) {
if len(*prefixFlag) != 0 {
starter.SetCommandPrefix(*prefixFlag)
}
switch {
case *listFlag:
// list of modules
fmt.Fprintf(output, "List of starters :\n")
list := starter.List()
for inx := range list {
fmt.Fprintf(output, "%2d%20s\n", inx+1, list[inx].Name)
}
fmt.Fprintf(output, "Amount of starters : %2d\n", len(list))
case *installFlag:
// set settings
list := starter.List()
for inx := range list {
fmt.Fprintf(output, "%2d%20s\n", inx+1, list[inx].Name)
err = list[inx].S.Run()
if err != nil {
return err
}
}
default:
// help flag
flag.Usage()
}
return nil
} | main.go | 0.765769 | 0.425844 | main.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.