code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
// Zero-knowledge proof of knowledge of the discrete logarithm over safe prime product // A proof of knowledge of the discrete log of an element h2 = hx1 with respect to h1. // In our protocol, we will run two of these in parallel to prove that two elements h1,h2 generate the same group modN. package dlnproof import ( "fmt" "math/big" "github.com/manson1983-jdai/tss-lib/common" cmts "github.com/manson1983-jdai/tss-lib/crypto/commitments" ) const Iterations = 128 type ( Proof struct { Alpha, T [Iterations]*big.Int } ) func NewDLNProof(h1, h2, x, p, q, N *big.Int) *Proof { pMulQ := new(big.Int).Mul(p, q) modN, modPQ := common.ModInt(N), common.ModInt(pMulQ) a := make([]*big.Int, Iterations) alpha := [Iterations]*big.Int{} for i := range alpha { a[i] = common.GetRandomPositiveInt(pMulQ) alpha[i] = modN.Exp(h1, a[i]) } msg := append([]*big.Int{h1, h2, N}, alpha[:]...) c := common.SHA512_256i(msg...) t := [Iterations]*big.Int{} cIBI := new(big.Int) for i := range t { cI := c.Bit(i) cIBI = cIBI.SetInt64(int64(cI)) t[i] = modPQ.Add(a[i], modPQ.Mul(cIBI, x)) } return &Proof{alpha, t} } func (p *Proof) Verify(h1, h2, N *big.Int) bool { if p == nil { return false } modN := common.ModInt(N) msg := append([]*big.Int{h1, h2, N}, p.Alpha[:]...) c := common.SHA512_256i(msg...) cIBI := new(big.Int) for i := 0; i < Iterations; i++ { if p.Alpha[i] == nil || p.T[i] == nil { return false } cI := c.Bit(i) cIBI = cIBI.SetInt64(int64(cI)) h1ExpTi := modN.Exp(h1, p.T[i]) h2ExpCi := modN.Exp(h2, cIBI) alphaIMulH2ExpCi := modN.Mul(p.Alpha[i], h2ExpCi) if h1ExpTi.Cmp(alphaIMulH2ExpCi) != 0 { return false } } return true } func (p *Proof) Serialize() ([][]byte, error) { cb := cmts.NewBuilder() cb = cb.AddPart(p.Alpha[:]) cb = cb.AddPart(p.T[:]) ints, err := cb.Secrets() if err != nil { return nil, err } bzs := make([][]byte, len(ints)) for i, part := range ints { if part == nil { bzs[i] = []byte{} continue } bzs[i] = part.Bytes() } return bzs, nil } func UnmarshalDLNProof(bzs [][]byte) (*Proof, error) { bis := make([]*big.Int, len(bzs)) for i := range bis { bis[i] = new(big.Int).SetBytes(bzs[i]) } parsed, err := cmts.ParseSecrets(bis) if err != nil { return nil, err } if len(parsed) != 2 { return nil, fmt.Errorf("UnmarshalDLNProof expected %d parts but got %d", 2, len(parsed)) } pf := new(Proof) if len1 := copy(pf.Alpha[:], parsed[0]); len1 != Iterations { return nil, fmt.Errorf("UnmarshalDLNProof expected %d but copied %d", Iterations, len1) } if len2 := copy(pf.T[:], parsed[1]); len2 != Iterations { return nil, fmt.Errorf("UnmarshalDLNProof expected %d but copied %d", Iterations, len2) } return pf, nil }
crypto/dlnproof/proof.go
0.554712
0.562477
proof.go
starcoder
package content import ( "bytes" "fmt" "math/rand" "strings" "github.com/nboughton/go-roll" "github.com/nboughton/swnt/content/culture" "github.com/nboughton/swnt/content/format" "github.com/nboughton/swnt/content/name" ) // TagsTable represents the collection of Tags type TagsTable []Tag // Roll selects a random Tag func (t TagsTable) Roll() string { return fmt.Sprint(t[rand.Intn(len(t))]) } // Random selects a random tag (used in Adventure seed generation) func (t TagsTable) Random() string { return Tags[rand.Intn(len(Tags))].Name } // Find returns the tag specified. The search is case insensitive for convenience func (t TagsTable) Find(name string) (Tag, error) { for _, tag := range t { if strings.ToLower(tag.Name) == strings.ToLower(name) { return tag, nil } } return Tag{}, fmt.Errorf("no tag with name \"%s\"", name) } func selectTags(exclude []string) (Tag, Tag) { var t TagsTable for _, tag := range Tags { if !tag.match(exclude) { t = append(t, tag) } } t1Idx, t2Idx := rand.Intn(len(t)), rand.Intn(len(t)) for t1Idx == t2Idx { // Ensure the same tag isn't selected twice t2Idx = rand.Intn(len(t)) } return t[t1Idx], t[t2Idx] } func (t Tag) match(s []string) bool { for _, x := range s { if strings.ToLower(t.Name) == strings.ToLower(x) { return true } } return false } // Tag represents a complete World Tag structure as extracted from the Stars Without Number core book. type Tag struct { Name string Desc string Enemies roll.List Friends roll.List Complications roll.List Things roll.List Places roll.List } // Format tag as type o func (t Tag) Format(o format.OutputType) string { return format.Table(o, []string{t.Name, ""}, [][]string{ {"Description", t.Desc}, {"Enemies", t.Enemies.String()}, {"Friends", t.Friends.String()}, {"Complications", t.Complications.String()}, {"Things", t.Things.String()}, {"Places", t.Places.String()}, }) } func (t Tag) String() string { return t.Format(format.TEXT) } // World represents a generated world type World struct { Primary bool FullTags bool Name string Culture culture.Culture Tags [2]Tag Atmosphere string Temperature string Population string Biosphere string TechLevel string Origin string Relationship string Contact string } // NewWorld creates a new world. Set culture to culture.Any for a random culture and primary to false // to include relationship information. If tagNamesOnly is true then format output will not include full // tag text func NewWorld(primary bool, c culture.Culture, fullTags bool, excludeTags []string) World { t1, t2 := selectTags(excludeTags) w := World{ Primary: primary, FullTags: fullTags, Name: name.Table.ByCulture(c).Place.Roll(), Culture: c, Tags: [2]Tag{t1, t2}, Atmosphere: worldTable.atmosphere.Roll(), Temperature: worldTable.temperature.Roll(), Population: worldTable.population.Roll(), Biosphere: worldTable.biosphere.Roll(), TechLevel: worldTable.techLevel.Roll(), } if !w.Primary { w.Origin = otherWorldTable.origin.Roll() w.Relationship = otherWorldTable.relationship.Roll() w.Contact = otherWorldTable.contact.Roll() } return w } // Format returns the content of World w in format t func (w World) Format(t format.OutputType) string { var buf = new(bytes.Buffer) fmt.Fprintf(buf, format.Table(t, []string{w.Name, ""}, [][]string{ {"Atmosphere", w.Atmosphere}, {"Temperature", w.Temperature}, {"Biosphere", w.Biosphere}, {"Population", w.Population}, {"Culture", w.Culture.String()}, {"Tech Level", w.TechLevel}, })) if !w.FullTags { fmt.Fprintf(buf, format.Table(t, []string{}, [][]string{ {"Tags", fmt.Sprintf("%s, %s", w.Tags[0].Name, w.Tags[1].Name)}, })) } else { fmt.Fprintf(buf, format.Table(t, []string{}, [][]string{ {"Tags", ""}, {w.Tags[0].Name, w.Tags[0].Desc}, {w.Tags[1].Name, w.Tags[1].Desc}, })) } if !w.Primary { fmt.Fprintf(buf, format.Table(t, []string{}, [][]string{ {"Origins", ""}, {otherWorldTable.origin.Name, w.Origin}, {otherWorldTable.relationship.Name, w.Relationship}, {otherWorldTable.contact.Name, w.Contact}, })) } return buf.String() } func (w World) String() string { return w.Format(format.TEXT) } // Other represents origins of secondary population centers in a System var otherWorldTable = struct { origin roll.List relationship roll.List contact roll.List }{ roll.List{ Name: "Origin of the World", Items: []string{ "Recent colony from the primary world", "Refuge for exiles from primary", "Founded ages ago by a different group", "Founded long before the primary world", "Lost ancient colony of the primary", "Colony recently torn free of the primary", "Long-standing cooperative colony world", "Recent interstellar colony from elsewhere", }, }, roll.List{ Name: "Current Relationship", Items: []string{ "Confirmed hatred of each other", "Active cold war between them", "Old grudges or resentments", "Cultural disgust and avoidance", "Polite interchange and trade", "Cultural admiration for primary", "Long-standing friendship", "Unflinching mutual loyalty", }, }, roll.List{ Name: "Contact Point", Items: []string{ "Trade in vital goods", "Shared religion", "Mutual language", "Entertainment content", "Shared research", "Threat to both of them", "Shared elite families", "Exploiting shared resource", }, }, } var worldTable = struct { atmosphere roll.Table biosphere roll.Table temperature roll.Table techLevel roll.Table population roll.Table }{ // Atmosphere List roll.Table{ Dice: roll.Dice{N: 2, Die: roll.D6}, Items: []roll.TableItem{ {Match: []int{2}, Text: "Corrosive, damaging to foreign objects"}, {Match: []int{3}, Text: "Inert gas, useless for respiration"}, {Match: []int{4}, Text: "Airless or thin to the point of suffocation"}, {Match: []int{5, 6, 7, 8, 9}, Text: "Breathable mix"}, {Match: []int{10}, Text: "Thick, but breathable with a pressure mask"}, {Match: []int{11}, Text: "Invasive, penetrating suit seals"}, {Match: []int{12}, Text: "Both corrosive and invasive in its effect"}, }, }, // Biosphere List roll.Table{ Dice: roll.Dice{N: 2, Die: roll.D6}, Items: []roll.TableItem{ {Match: []int{2}, Text: "Remnant biosphere"}, {Match: []int{3}, Text: "Microbial life forms exist"}, {Match: []int{4, 5}, Text: "No native biosphere"}, {Match: []int{6, 7, 8}, Text: "Human-miscible biosphere"}, {Match: []int{9, 10}, Text: "Immiscible biosphere"}, {Match: []int{11}, Text: "Hybrid biosphere"}, {Match: []int{12}, Text: "Engineered biosphere"}, }, }, // Temperature List roll.Table{ Dice: roll.Dice{N: 2, Die: roll.D6}, Items: []roll.TableItem{ {Match: []int{2}, Text: "Frozen, locked in perpetual ice"}, {Match: []int{3}, Text: "Cold, dominated by glaciers and tundra"}, {Match: []int{4, 5}, Text: "Variable cold with temperate places"}, {Match: []int{6, 7, 8}, Text: "Temperate, Earthlike in its ranges"}, {Match: []int{9, 10}, Text: "Variable warm, with temperate places"}, {Match: []int{11}, Text: "Warm, tropical and hotter in places"}, {Match: []int{12}, Text: "Burning, intolerably hot on its surface"}, }, }, // TechLevel List roll.Table{ Dice: roll.Dice{N: 2, Die: roll.D6}, Items: []roll.TableItem{ {Match: []int{2}, Text: "TL0, neolithic-level technology"}, {Match: []int{3}, Text: "TL1, medieval technology"}, {Match: []int{4, 5}, Text: "TL2, early Industrial Age tech"}, {Match: []int{6, 7, 8}, Text: "TL4, modern postech"}, {Match: []int{9, 10}, Text: "TL3, tech like that of present-day Earth"}, {Match: []int{11}, Text: "TL4+, postech with specialties"}, {Match: []int{12}, Text: "TL5, pretech with surviving infrastructure"}, }, }, // Population List roll.Table{ Dice: roll.Dice{N: 2, Die: roll.D6}, Items: []roll.TableItem{ {Match: []int{2}, Text: "Failed colony"}, {Match: []int{3}, Text: "Outpost"}, {Match: []int{4, 5}, Text: "Fewer than a million inhabitants"}, {Match: []int{6, 7, 8}, Text: "Several million inhabitants"}, {Match: []int{9, 10}, Text: "Hundreds of millions of inhabitants"}, {Match: []int{11}, Text: "Billions of inhabitants"}, {Match: []int{12}, Text: "Alien inhabitants"}, }, }, } // Tags List var Tags = TagsTable{ { Name: "Abandoned Colony", Desc: "The world once hosted a colony, whether human or otherwise, until some crisis or natural disaster drove the inhabitants away or killed them off. The colony might have been mercantile in nature, an expedition to extract valuable local resources, or it might have been a reclusive cabal of zealots. The remains of the colony are usually in ruins, and might still be dangerous from the aftermath of whatever destroyed it in the first place.", Enemies: roll.List{ Items: []string{ "Crazed survivors", "Ruthless plunderers of the ruins", "Automated defense system", }, }, Friends: roll.List{ Items: []string{ "Inquisitive stellar archaeologist", "Heir to the colony’s property", "Local wanting the place cleaned out and made safe", }, }, Complications: roll.List{ Items: []string{ "The local government wants the ruins to remain a secret", "The locals claim ownership of it", "The colony is crumbling and dangerous to navigate", }, }, Things: roll.List{ Items: []string{ "Long-lost property deeds", "Relic stolen by the colonists when they left", "Historical record of the colonization attempt", }, }, Places: roll.List{ Items: []string{ "Decaying habitation block", "Vine-covered town square", "Structure buried by an ancient landslide", }, }, }, { Name: "<NAME>", Desc: "The world has significant alien ruins present. The locals may or may not permit others to investigate the ruins, and may make it difficult to remove any objects of value without substantial payment. Any surviving ruins with worthwhile salvage almost certainly have some defense or hazard to explain their unplundered state.", Enemies: roll.List{ Items: []string{ "Customs inspector", "Worshipper of the ruins", "Hidden alien survivor", }, }, Friends: roll.List{ Items: []string{ "Curious scholar", "Avaricious local resident", "Interstellar smuggler", }, }, Complications: roll.List{ Items: []string{ "Traps in the ruins", "Remote location", "Paranoid customs officials", }, }, Things: roll.List{ Items: []string{ "Precious alien artifacts", "Objects left with the remains of a prior unsuccessful expedition", "Untranslated alien texts", "Untouched hidden ruins", }, }, Places: roll.List{ Items: []string{ "Undersea ruin", "Orbital ruin", "Perfectly preserved", }, }, }, { Name: "Altered Humanity", Desc: "The humans on this world are visibly and drastically different from normal humanity. They may have additional limbs, new sensory organs, or other significant changes. Were these from ancestral eugenic manipulation, strange stellar mutations, or from an environmental toxin unique to this world?", Enemies: roll.List{ Items: []string{ "Biochauvinist local", "Local experimenter", "Mentally unstable mutant", }, }, Friends: roll.List{ Items: []string{ "Local seeking a “cure”", "Curious xenophiliac", "Anthropological researcher", }, }, Complications: roll.List{ Items: []string{ "Alteration is contagious", "Alteration is necessary for long-term survival", "Locals fear and mistrust non-local humans", }, }, Things: roll.List{ Items: []string{ "Original pretech mutagenic equipment", "Valuable biological byproduct from the mutants", "“Cure” for the altered genes", "Record of the original colonial genotypes", }, }, Places: roll.List{ Items: []string{ "Abandoned eugenics laboratory", "An environment requiring the mutation for survival", "A sacred site where the first local was transformed", }, }, }, { Name: "Anarchists", Desc: "Rather than being an incidental anarchy of struggling tribes and warring factions, this world actually has a functional society with no centralized authority. Authority might be hyper-localized to extended families, specific religious parishes, or voluntary associations. Some force is preventing an outside group or internal malcontents from coalescing into a power capable of imposing its rule on the locals; this force might be an ancient pretech defense system, a benevolent military AI, or the sheer obscurity and isolation of the culture.", Enemies: roll.List{ Items: []string{ "Offworlder imperialist", "Reformer seeking to impose “good government”", "Exploiter taking advantage of the lack of centralized resistance", }, }, Friends: roll.List{ Items: []string{ "Proud missionary for anarchy", "Casual local free spirit", "Curious offworlder political scientist", }, }, Complications: roll.List{ Items: []string{ "The anarchistic structure is compelled by an external power", "The anarchy is enabled by currently abundant resources", "The protecting force that shelters the anarchy is waning", }, }, Things: roll.List{ Items: []string{ "A macguffin that would let the possessor enforce their rule on others", "A vital resource needed to preserve general liberty", "Tech forbidden as disruptive to the social order", }, }, Places: roll.List{ Items: []string{ "Community of similar-sized homes", "Isolated clan homestead", "Automated mining site", }, }, }, { Name: "Anthropomorphs", Desc: "The locals were originally human, but at some point became anthropomorphic, either as an ancient furry colony, a group of animal-worshiping sectarians, or gengineers who just happened to find animal elements most convenient for surviving on the world. Depending on the skill of the original gengineers, their feral forms may or may not work as well as their original human bodies, or may come with drawbacks inherited from their animal elements.", Enemies: roll.List{ Items: []string{ "Anthro-supremacist local", "Native driven by feral urges", "Outside exploiter who sees the locals as subhuman creatures", }, }, Friends: roll.List{ Items: []string{ "Fascinated genetic researcher", "Diplomat trained to deal with normals", "Local needing outside help", }, }, Complications: roll.List{ Items: []string{ "The locals consider their shapes a curse from their foolish ancestors", "Society is ordered according to animal forms", "The locals view normal humans as repulsive or inferior", }, }, Things: roll.List{ Items: []string{ "Pretech gengineering tech", "A “cure” that may not be wanted", "Sacred feral totem", }, }, Places: roll.List{ Items: []string{ "Shrine to a feral deity", "Nature preserve suited to an animal type", "Living site built to take advantage of animal traits", }, }, }, { Name: "Area 51", Desc: "The world’s government is fully aware of their local stellar neighbors, but the common populace has no idea about it- and the government means to keep it that way. Trade with government officials in remote locations is possible, but any attempt to clue the commoners in on the truth will be met with lethal reprisals.", Enemies: roll.List{ Items: []string{ "Suspicious government minder", "Free merchant who likes his local monopoly", "Local who wants a specimen for dissection", }, }, Friends: roll.List{ Items: []string{ "Crusading offworld investigator", "Conspiracy-theorist local", "Idealistic government reformer", }, }, Complications: roll.List{ Items: []string{ "The government has a good reason to keep the truth concealed", "The government ruthlessly oppresses the natives", "The government is actually composed of offworlders", }, }, Things: roll.List{ Items: []string{ "Elaborate spy devices", "Memory erasure tech", "Possessions of the last offworlder who decided to spread the truth", }, }, Places: roll.List{ Items: []string{ "Desert airfield", "Deep subterranean bunker", "Hidden mountain valley", }, }, }, { Name: "<NAME>", Desc: "Whatever the original climate and atmosphere type, something horrible happened to this world. Biological, chemical, or nanotechnical weaponry has reduced it to a wretched hellscape. Some local life might still be able to survive on its blasted surface, usually at some dire cost in health or humanity.", Enemies: roll.List{ Items: []string{ "Mutated badlands fauna", "Desperate local", "Badlands raider chief", }, }, Friends: roll.List{ Items: []string{ "Native desperately wishing to escape the world", "Scientist researching ecological repair methods", "Ruin scavenger", }, }, Complications: roll.List{ Items: []string{ "Radioactivity", "Bioweapon traces", "Broken terrain", "Sudden local plague", }, }, Things: roll.List{ Items: []string{ "Maltech research core", "Functional pretech Maltech research core", "Functional pretech weaponry", "An uncontaminated well", }, }, Places: roll.List{ Items: []string{ "Untouched oasis", "Ruined city", "Salt flat", }, }, }, { Name: "Battleground", Desc: "The world is a battleground for two or more outside powers. They may be interstellar rivals, or groups operating out of orbitals or other system bodies. Something about the planet is valuable enough for them to fight over, but the natives are too weak to be anything but animate obstacles to the fight.", Enemies: roll.List{ Items: []string{ "Ruthless military commander", "Looter pack chieftain", "Traitorous collaborator", }, }, Friends: roll.List{ Items: []string{ "Native desperately seeking protection", "Pragmatic military officer", "Hapless war orphan", }, }, Complications: roll.List{ Items: []string{ "The war just ended as both sides are leaving", "The natives somehow brought this on themselves", "A small group of natives profit tremendously from the fighting", }, }, Things: roll.List{ Items: []string{ "A cache of the resource the invaders seek", "Abandoned prototype military gear", "Precious spy intelligence lost by someone", }, }, Places: roll.List{ Items: []string{ "Artillery-pocked wasteland", "Reeking refugee camp", "Burnt-out shell of a city", }, }, }, { Name: "Beastmasters", Desc: "The natives have extremely close bonds with the local fauna, possibly having special means of communication and control through tech or gengineering. Local animal life plays a major role in their society, industry, or warfare, and new kinds of beasts may be bred to suit their purposes.", Enemies: roll.List{ Items: []string{ "Half-feral warlord of a beast swarm", "Coldly inhuman scientist", "Altered beast with human intellect and furious malice", }, }, Friends: roll.List{ Items: []string{ "Native bonded with an adorable animal", "Herder of very useful beasts", "Animal-revering mystic", }, }, Complications: roll.List{ Items: []string{ "The “animals” are very heavily gengineered humans", "The animals actually run the society", "The animals have the same rights as humans", }, }, Things: roll.List{ Items: []string{ "Tech used to alter animal life", "A plague vial that could wipe out the animals", "A pretech device that can perform a wonder if operated by a beast", }, }, Places: roll.List{ Items: []string{ "Park designed as a comfortable home for beasts", "Public plaza designed to accommodate animal companions", "Factory full of animal workers", }, }, }, { Name: "<NAME>", Desc: "Whether due to a lack of atmosphere or an uninhabitable climate, the world’s cities exist within domes or pressurized buildings. In such sealed environments, techniques of surveillance and control can grow baroque and extreme.", Enemies: roll.List{ Items: []string{ "Native dreading outsider contamination", "Saboteur from another bubble city", "Local official hostile to outsider ignorance of laws", }, }, Friends: roll.List{ Items: []string{ "Local rebel against the city officials", "Maintenance chief in need of help", "Surveyor seeking new building sites", }, }, Complications: roll.List{ Items: []string{ "Bubble rupture", "Failing atmosphere reprocessor", "Native revolt against officials", "All-seeing surveillance cameras", }, }, Things: roll.List{ Items: []string{ "Pretech habitat technology", "Valuable industrial products", "Master key codes to a city’s security system", }, }, Places: roll.List{ Items: []string{ "City power core", "Surface of the bubble", "Hydroponics complex", "Warren-like hab block", }, }, }, { Name: "Cheap Life", Desc: "Human life is near-worthless on this world. Ubiquitous cloning, local conditions that ensure early death, a culture that reveres murder, or a social structure that utterly discounts the value of most human lives ensures that death is the likely outcome for any action that irritates someone consequential. ", Enemies: roll.List{ Items: []string{ "Master assassin", "Bloody-handed judge", "Overseer of disposable clones", }, }, Friends: roll.List{ Items: []string{ "Endearing local whose life the PCs accidentally bought", "Escapee from death seeking outside help", "Reformer trying to change local mores", }, }, Complications: roll.List{ Items: []string{ "Radiation or local diseases ensure all locals die before twenty-five years of age", "Tech ensures that death is just an annoyance", "Locals are totally convinced of a blissful afterlife", }, }, Things: roll.List{ Items: []string{ "Device that revives or re-embodies the dead", "Maltech engine fueled by human life", "Priceless treasure held by a now-dead owner", }, }, Places: roll.List{ Items: []string{ "Thronging execution ground", "extremely cursory cemetery", "Factory full of lethal dangers that could be corrected easily", }, }, }, { Name: "Civil War", Desc: "The world is currently torn between at least two opposing factions, all of which claim legitimacy. The war may be the result of a successful rebel uprising against tyranny, or it might just be the result of schemers who plan to be the new masters once the revolution is complete.", Enemies: roll.List{ Items: []string{ "Faction commissar", "Angry native", "Conspiracy theorist who blames offworlders for the war", "Deserter looking out for himself", "Guerrilla bandit chieftain", }, }, Friends: roll.List{ Items: []string{ "Faction loyalist seeking aid", "Native caught in the crossfire", "Offworlder seeking passage off the planet", }, }, Complications: roll.List{ Items: []string{ "The front rolls over the group", "Famine strikes", "Bandit infestations are in the way", }, }, Things: roll.List{ Items: []string{ "Ammo dump", "Military cache", "Treasure buried for after the war", "Secret war plans", }, }, Places: roll.List{ Items: []string{ "Battle front", "Bombed-out town", "Rear-area red light zone", "Propaganda broadcast tower", }, }, }, { Name: "<NAME>", Desc: "Two or more great powers control the planet, and they have a hostility to each other that’s just barely less than open warfare. The hostility might be ideological in nature, or it might revolve around control of some local resource.", Enemies: roll.List{ Items: []string{ "Suspicious chief of intelligence", "Native who thinks the outworlders are with the other side", "Femme fatale", }, }, Friends: roll.List{ Items: []string{ "Apolitical information broker", "Spy for the other side", "Unjustly accused innocent", "“He’s a bastard", "but he’s our bastard” official", }, }, Complications: roll.List{ Items: []string{ "Police sweep", "Low-level skirmishing", "“Red scare”", }, }, Things: roll.List{ Items: []string{ "List of traitors in government", "secret military plans", "Huge cache of weapons built up in preparation for war", }, }, Places: roll.List{ Items: []string{ "Seedy bar in a neutral area", "Political rally", "Isolated area where fighting is underway", }, }, }, { Name: "<NAME>", Desc: "A neighboring world has successfully colonized this less-advanced or less-organized planet, and the natives aren’t happy about it. A puppet government may exist, but all real decisions are made by the local viceroy.", Enemies: roll.List{ Items: []string{ "Suspicious security personnel", "Offworlder-hating natives", "Local crime boss preying on rich offworlders", }, }, Friends: roll.List{ Items: []string{ "Native resistance leader", "Colonial official seeking help", "Native caught between the two sides", }, }, Complications: roll.List{ Items: []string{ "Natives won’t talk to offworlders", "Colonial repression", "Misunderstood local customs", }, }, Things: roll.List{ Items: []string{ "Relic of the resistance movement", "List of collaborators", "Precious substance extracted by colonial labor", }, }, Places: roll.List{ Items: []string{ "Deep wilderness resistance camp", "City district off-limits to natives", "Colonial labor site", }, }, }, { Name: "Cultural Power", Desc: "The world is a considerable cultural power in the sector, producing music, art, philosophy, or some similar intangible that their neighbors find irresistibly attractive. Other worlds might have a profound degree of cultural cachet as the inheritor of some venerable artistic tradition.", Enemies: roll.List{ Items: []string{ "Murderously eccentric artist", "Crazed fan", "Failed artist with an obsessive grudge", "Critic with a crusade to enact", }, }, Friends: roll.List{ Items: []string{ "Struggling young artist", "Pupil of the artistic tradition", "Scholar of the art", "Offworlder hating the source of corrupting alien ways", }, }, Complications: roll.List{ Items: []string{ "The art is slowly lethal to its masters", "The art is mentally or physically addictive", "The art is a fragment of ancient technical or military science", }, }, Things: roll.List{ Items: []string{ "The instrument of a legendary master", "The only copy of a dead master’s opus", "Proof of intellectual property ownership", }, }, Places: roll.List{ Items: []string{ "Recording or performance studio", "Public festival choked with tourists", "Monument to a dead master of the art", }, }, }, { Name: "Cybercommunists", Desc: "On this world communism actually works, thanks to pretech computing devices and greater or lesser amounts of psychic precognition. Central planning nodes direct all production and employment on the world. Citizens in good standing have access to ample amounts of material goods for all needs and many wants. Instead of strife over wealth, conflicts erupt over political controls, cultural precepts, or control over the planning nodes. Many cybercommunist worlds show a considerable bias toward the private interests of those who run the planning nodes.", Enemies: roll.List{ Items: []string{ "Embittered rebel against perceived unfairness", "Offworlder saboteur", "Aspiring Stalin-figure", }, }, Friends: roll.List{ Items: []string{ "Idealistic planning node tech", "Cynical anti-corruption cop", "Precognitive economist", }, }, Complications: roll.List{ Items: []string{ "The pretech planning computers are breaking down", "The planning only works because the locals have been mentally or physically altered", "The planning computers can’t handle the increasing population within the system", }, }, Things: roll.List{ Items: []string{ "Planning node computer", "Wildly destabilizing commodity that can’t be factored into plans", "A tremendous store of valuables made by accident", }, }, Places: roll.List{ Items: []string{ "Humming factory", "Apartment block of perfectly equal flats", "Mass demonstration of unity", }, }, }, { Name: "Cyborgs", Desc: "The planet’s population makes heavy use of cybernetics, with many of the inhabitants possessing at least a cosmetic amount of chrome. This may be the result of a strong local cyber tech base, a religious injunction, or simply a necessary measure to survive the local conditions.", Enemies: roll.List{ Items: []string{ "Ambitious hacker of cyber implants", "Cybertech oligarch", "Researcher craving fresh offworlders", "Cybered-up gang boss", }, }, Friends: roll.List{ Items: []string{ "Charity-working implant physician", "Idealistic young cyber researcher", "Avant-garde activist", }, }, Complications: roll.List{ Items: []string{ "The powerful and dangerous come here often for cutting-edge implants", "The cyber has some universal negative side-effect", "Cyber and those implanted with it are forbidden to leave the planet as a tech security measure", }, }, Things: roll.List{ Items: []string{ "Unique prototype cyber implant", "Secret research files", "A virus that debilitates cyborgs", "A cache of critically-needed therapeutic cyber", }, }, Places: roll.List{ Items: []string{ "Grimy slum chop-shop", "Bloody lair of implant rippers", "Stark plaza where everyone is seeing things through their augmented-reality cyber", }, }, }, { Name: "<NAME>", Desc: "The world regularly suffers some apocalyptic catastrophe that wipes out organized civilization on it. The local culture is aware of this cycle and has traditions to ensure a fragment of civilization survives into the next era, but these traditions don’t always work properly, and sometimes dangerous fragments of the past emerge.", Enemies: roll.List{ Items: []string{ "Offworlder seeking to trigger the apocalypse early for profit", "Local recklessly taking advantage of preparation stores", "Demagogue claiming the cycle is merely a myth of the authorities", }, }, Friends: roll.List{ Items: []string{ "Harried official working to prepare", "Offworlder studying the cycles", "Local threatened by perils of the cycle’s initial stages", }, }, Complications: roll.List{ Items: []string{ "The cycles really are a myth of the authorities", "The cycles are controlled by alien constructs", "An outside power is interfering with preparation", }, }, Things: roll.List{ Items: []string{ "A lost cache of ancient treasures", "Tech or archives that will pinpoint the cycle’s timing", "Keycodes to bypass an ancient vault’s security", }, }, Places: roll.List{ Items: []string{ "Lethally-defended vault of forgotten secrets", "Starport crowded with panicked refugees", "Town existing in the shadow of some monstrous monument to a former upheaval", }, }, }, { Name: "<NAME>", Desc: "The world may have a breathable atmosphere and a human-tolerable temperature range, but it is an arid, stony waste outside of a few places made habitable by human effort. The deep wastes are largely unexplored and inhabited by outcasts and worse.", Enemies: roll.List{ Items: []string{ "Raider chieftain", "Crazed hermit", "Angry isolationists", "Paranoid mineral prospector", "Strange desert beast", }, }, Friends: roll.List{ Items: []string{ "Native guide", "Research biologist", "Aspiring terraformer", }, }, Complications: roll.List{ Items: []string{ "Sandstorms", "Water supply failure", "Native warfare over water rights", }, }, Things: roll.List{ Items: []string{ "Enormous water reservoir", "Map of hidden wells", "Pretech rainmaking equipment", }, }, Places: roll.List{ Items: []string{ "Oasis", "“The Empty Quarter” of the desert", "Hidden underground cistern", }, }, }, { Name: "<NAME>", Desc: "The world is doomed, and the locals may or may not know it. Some cosmic catastrophe looms before them, and the locals have no realistic way to get everyone to safety. To the extent that the public is aware, society is disintegrating into a combination of religious fervor, abject hedonism, and savage violence.", Enemies: roll.List{ Items: []string{ "Crazed prophet of a false salvation", "Ruthless leader seeking to flee with their treasures", "Cynical ship captain selling a one-way trip into hard vacuum as escape to another world", }, }, Friends: roll.List{ Items: []string{ "Appealing waif or family head seeking escape", "Offworld relief coordinator", "Harried law officer", }, }, Complications: roll.List{ Items: []string{ "The doom is false or won’t actually kill everyone", "The doom was intentionally triggered by someone", "Mass escape is possible if warring groups can somehow be brought to cooperate", }, }, Things: roll.List{ Items: []string{ "Clearance for a ship to leave the planet", "A cache of priceless cultural artifacts", "The life savings of someone trying to buy passage out", "Data that would prove to the public the end is nigh", }, }, Places: roll.List{ Items: []string{ "Open square beneath a sky angry with a foretaste of th impending ruin", "Orgiastic celebration involving sex and murder in equal parts", "Holy site full of desperate petitioners to the divine", }, }, }, { Name: "Dying Race", Desc: "The inhabitants of this world are dying out, and they know it. Through environmental toxins, hostile bio-weapons, or sheer societal despair, the culture cannot replenish its numbers. Members seek meaning in their own strange goals or peculiar faiths, though a few might struggle to find some way to reverse their slow yet inevitable doom.", Enemies: roll.List{ Items: []string{ "Hostile outsider who wants the locals dead", "Offworlder seeking to take advantage of their weakened state", "Invaders eager to push the locals out of their former lands", }, }, Friends: roll.List{ Items: []string{ "One of the few youth among the population", "Determined and hopeful reformer", "Researcher seeking a new method of reproduction", }, }, Complications: roll.List{ Items: []string{ "The dying culture’s values were monstrous", "The race’s death is somehow necessary to prevent some grand catastrophe", "The race is somehow convinced they deserve this fate", }, }, Things: roll.List{ Items: []string{ "Extremely valuable reproductive tech", "Treasured artifacts of the former age", "Bioweapon used on the race", }, }, Places: roll.List{ Items: []string{ "City streets devoid of pedestrians", "Mighty edifice now crumbling with disrepair", "Small dwelling full of people in a town now otherwise empty", }, }, }, { Name: "<NAME>", Desc: "Even in the days before the Silence, major improvement of the human genome always seemed to come with unacceptable side-effects. Some worlds host secret cults that perpetuate these improvements regardless of the cost, and a few planets have been taken over entirely by the cults.", Enemies: roll.List{ Items: []string{ "Eugenic superiority fanatic", "Mentally unstable homo superior", "Mad eugenic scientist", }, }, Friends: roll.List{ Items: []string{ "Eugenic propagandist", "Biotechnical investigator", "Local seeking revenge on cult", }, }, Complications: roll.List{ Items: []string{ "The altered cultists look human", "The locals are terrified of any unusual physical appearance", "The genetic modifications- and drawbacks- are contagious with long exposure", }, }, Things: roll.List{ Items: []string{ "Serum that induces the alteration", "Elixir that reverses the alteration", "Pretech biotechnical databanks", "List of secret cult sympathizers", }, }, Places: roll.List{ Items: []string{ "Eugenic breeding pit", "Isolated settlement of altered humans", "Public place infiltrated by cult sympathizers", }, }, }, { Name: "Exchange Consulate", Desc: "The Exchange of Light once served as the largest, most trusted banking and diplomatic service in human space. Even after the Silence, some worlds retain a functioning Exchange Consulate where banking services and arbitration can be arranged.", Enemies: roll.List{ Items: []string{ "Corrupt Exchange official", "Indebted native who thinks the players are Exchange agents", "Exchange official dunning the players for debts incurred", }, }, Friends: roll.List{ Items: []string{ "Consul in need of offworld help", "Local banker seeking to hurt his competition", "Exchange diplomat", }, }, Complications: roll.List{ Items: []string{ "The local Consulate has been corrupted", "the Consulate is cut off from its funds", "A powerful debtor refuses to pay", }, }, Things: roll.List{ Items: []string{ "Exchange vault codes", "Wealth hidden to conceal it from a bankruptcy judgment", "Location of forgotten vault", }, }, Places: roll.List{ Items: []string{ "Consulate meeting chamber", "Meeting site between fractious disputants", "Exchange vault", }, }, }, { Name: "<NAME>", Desc: "At some point in the past, this world was a hegemonic power over some or all of the sector, thanks to superior tech, expert diplomacy, the weakness of their neighbors, or inherited Mandate legitimacy. Some kind of crash or revolt broke their power, however, and now the world is littered with the wreckage of former glory.", Enemies: roll.List{ Items: []string{ "Bitter pretender to a meaningless throne", "Resentful official dreaming of empire", "Vengeful offworlder seeking to punish their old rulers", }, }, Friends: roll.List{ Items: []string{ "Realistic local leader trying to hold things together", "Scholar of past glories", "Refugee from an overthrown colonial enclave", }, }, Complications: roll.List{ Items: []string{ "The hegemon’s rule was enlightened and fair", "It collapsed due to its own internal strife rather than external resistance", "It pretends that nothing has happened to its power", "It’s been counter-colonized by vengeful outsiders", }, }, Things: roll.List{ Items: []string{ "Precious insignia of former rule", "Relic tech important to its power", "Plundered colonial artifact", }, }, Places: roll.List{ Items: []string{ "Palace far too grand for its current occupant", "Oversized spaceport now in disrepair", "Boulevard lined with monuments to past glories", }, }, }, { Name: "<NAME>", Desc: "In the long, isolated night of the Silence, some worlds have experienced total moral and cultural collapse. Whatever remains has been twisted beyond recognition into assorted death cults, xenophobic fanaticism, horrific cultural practices, or other behavior unacceptable on more enlightened worlds. These worlds are almost invariably quarantined by other planets.", Enemies: roll.List{ Items: []string{ "Decadent noble", "Mad cultist", "Xenophobic local", "Cannibal chief", "Maltech researcher", }, }, Friends: roll.List{ Items: []string{ "Trapped outworlder", "Aspiring reformer", "Native wanting to avoid traditional flensing", }, }, Complications: roll.List{ Items: []string{ "Horrific local “celebration”", "Inexplicable and repugnant social rules", "Taboo zones and people", }, }, Things: roll.List{ Items: []string{ "Terribly misused piece of pretech", "Wealth accumulated through brutal evildoing", "Valuable possession owned by luckless outworlder victim", }, }, Places: roll.List{ Items: []string{ "Atrocity amphitheater", "Traditional torture parlor", "Ordinary location twisted into something terrible.", }, }, }, { Name: "Flying Cities", Desc: "Perhaps the world is a gas giant, or plagued with unendurable storms at lower levels of the atmosphere. For whatever reason, the cities of this world fly above the surface of the planet. Perhaps they remain stationary, or perhaps they move from point to point in search of resources.", Enemies: roll.List{ Items: []string{ "Rival city pilot", "Tech thief attempting to steal outworld gear", "Saboteur or scavenger plundering the city’s tech", }, }, Friends: roll.List{ Items: []string{ "Maintenance tech in need of help", "City defense force pilot", "Meteorological researcher", }, }, Complications: roll.List{ Items: []string{ "Sudden storms", "Drastic altitude loss", "Rival city attacks", "Vital machinery breaks down", }, }, Things: roll.List{ Items: []string{ "Precious refined atmospheric gases", "Pretech grav engine plans", "Meteorological codex predicting future storms", }, }, Places: roll.List{ Items: []string{ "Underside of the city", "The one calm place on the planet’s surface", "Catwalks stretching over unimaginable gulfs below.", }, }, }, { Name: "Forbidden Tech", Desc: "Some group on this planet fabricates or uses maltech. Unbraked AIs doomed to metastasize into insanity, nation-destroying nanowarfare particles, slow-burn DNA corruptives, genetically engineered slaves, or something worse still. The planet’s larger population may or may not be aware of the danger in their midst.", Enemies: roll.List{ Items: []string{ "Mad scientist", "Maltech buyer from offworld", "Security enforcer", }, }, Friends: roll.List{ Items: []string{ "Victim of maltech", "Perimeter agent", "Investigative reporter", "Conventional arms merchant", }, }, Complications: roll.List{ Items: []string{ "The maltech is being fabricated by an unbraked AI", "The government depends on revenue from maltech sales to offworlders", "Citizens insist that it’s not really maltech", }, }, Things: roll.List{ Items: []string{ "Maltech research data", "The maltech itself", "Precious pretech equipment used to create it", }, }, Places: roll.List{ Items: []string{ "Horrific laboratory", "Hellscape sculpted by the maltech’s use", "Government building meeting room", }, }, }, { Name: "<NAME>", Desc: "The locals of this world were once famed for their martial prowess. They may have simply had a very militaristic culture, or were genetically engineered for combat, or developed high-tech weaponry, or had brilliant leadership. Those days are past, however, either due to crushing defeat, external restrictions, or a cultural turn toward peace.", Enemies: roll.List{ Items: []string{ "Unreformed warlord leader", "Bitter mercenary chief", "Victim of their warfare seeking revenge", }, }, Friends: roll.List{ Items: []string{ "Partisan of the new peaceful ways", "Outsider desperate for military aid", "Martial genius repressed by the new dispensation", }, }, Complications: roll.List{ Items: []string{ "Neighboring worlds want them pacified or dead", "They only ever used their arts in self-defense", "The source of their gifts has been “turned off” in a reversible way", }, }, Things: roll.List{ Items: []string{ "War trophy taken from a defeated foe", "Key to re-activating their martial ways", "Secret cache of high-tech military gear", }, }, Places: roll.List{ Items: []string{ "Cemetery of dead heroes", "Memorial hall now left to dust and silence", "Monument plaza dedicated to the new culture", }, }, }, { Name: "<NAME>", Desc: "The geology or geography of this world is simply freakish. Perhaps it’s composed entirely of enormous mountain ranges, or regular bands of land and sea, or the mineral structures all fragment into perfect cubes. The locals have learned to deal with it and their culture will be shaped by its requirements.", Enemies: roll.List{ Items: []string{ "Crank xenogeologist", "Cultist who believes it the work of aliens", }, }, Friends: roll.List{ Items: []string{ "Research scientist", "Prospector", "Artist", }, }, Complications: roll.List{ Items: []string{ "Local conditions that no one remembers to tell outworlders about", "Lethal weather", "Seismic activity", }, }, Things: roll.List{ Items: []string{ "Unique crystal formations", "Hidden veins of a major precious mineral strike", "Deed to a location of great natural beauty", }, }, Places: roll.List{ Items: []string{ "Atop a bizarre geological formation", "Tourist resort catering to offworlders", }, }, }, { Name: "<NAME>", Desc: "The planet is plagued with some sort of bizarre or hazardous weather pattern. Perhaps city-flattening storms regularly scourge the surface, or the world’s sun never pierces its thick banks of clouds.", Enemies: roll.List{ Items: []string{ "Criminal using the weather as a cover", "Weather cultists convinced the offworlders are responsible for some disaster", "Native predators dependent on the weather", }, }, Friends: roll.List{ Items: []string{ "Meteorological researcher", "Holodoc crew wanting shots of the weather", }, }, Complications: roll.List{ Items: []string{ "The weather itself", "Malfunctioning pretech terraforming engines that cause the weather", }, }, Things: roll.List{ Items: []string{ "Wind-scoured deposits of precious minerals", "Holorecords of a spectacularly and rare weather pattern", "Naturally-sculpted objects of intricate beauty", }, }, Places: roll.List{ Items: []string{ "Eye of the storm", "The one sunlit place", "Terraforming control room", }, }, }, { Name: "Friendly Foe", Desc: "Some hostile alien race or malevolent cabal has a branch or sect on this world that is actually quite friendly toward outsiders. For whatever internal reason, they are willing to negotiate and deal honestly with strangers, and appear to lack the worst impulses of their fellows.", Enemies: roll.List{ Items: []string{ "Driven hater of all their kind", "Internal malcontent bent on creating conflict", "Secret master who seeks to lure trust", }, }, Friends: roll.List{ Items: []string{ "Well-meaning bug-eyed monster", "Principled eugenics cultist", "Suspicious investigator", }, }, Complications: roll.List{ Items: []string{ "The group actually is as harmless and benevolent as they seem", "The group offers a vital service at the cost of moral compromise", "The group still feels bonds of affiliation with their hostile brethren", }, }, Things: roll.List{ Items: []string{ "Forbidden xenotech", "Eugenic biotech template", "Evidence to convince others of their kind that they are right", }, }, Places: roll.List{ Items: []string{ "Repurposed maltech laboratory", "Alien conclave building", "Widely-feared starship interior", }, }, }, { Name: "<NAME>", Desc: "Gold, silver, and other conventional precious minerals are common and cheap now that asteroid mining is practical for most worlds. But some minerals and compounds remain precious and rare, and this world has recently been discovered to have a supply of them. People from across the sector have come to strike it rich.", Enemies: roll.List{ Items: []string{ "Paranoid prospector", "Aspiring mining tycoon", "Rapacious merchant", }, }, Friends: roll.List{ Items: []string{ "Claim-jumped miner", "Native alien", "Curious tourist", }, }, Complications: roll.List{ Items: []string{ "The strike is a hoax", "The strike is of a dangerous toxic substance", "Export of the mineral is prohibited by the planetary government", "The native aliens live around the strike’s location", }, }, Things: roll.List{ Items: []string{ "Cases of the refined element", "Pretech mining equipment", "A dead prospector’s claim deed", }, }, Places: roll.List{ Items: []string{ "Secret mine", "Native alien village", "Processing plant", "Boom town", }, }, }, { Name: "<NAME>", Desc: "The locals are obsessed with completing a massive project, one that has consumed them for generations. It might be the completion of a functioning spaceyard, a massive solar power array, a network of terraforming engines, or the universal conversion of their neighbors to their own faith. The purpose of their entire civilization is to progress and some day complete the work.", Enemies: roll.List{ Items: []string{ "Local planning to sacrifice the PCs for the work", "Local who thinks the PCs threaten the work", "Obsessive zealot ready to destroy someone or something important to the PCs for the sake of the work", }, }, Friends: roll.List{ Items: []string{ "Outsider studying the work", "Local with a more temperate attitude", "Supplier of work materials", }, }, Complications: roll.List{ Items: []string{ "The work is totally hopeless", "Different factions disagree on what the work is", "An outside power is determined to thwart the work", }, }, Things: roll.List{ Items: []string{ "Vital supplies for the work", "Plans that have been lost", "Tech that greatly speeds the work", }, }, Places: roll.List{ Items: []string{ "A bustling work site", "Ancestral worker housing", "Local community made only semi-livable by the demands of the work", }, }, }, { Name: "Hatred", Desc: "For whatever reason, this world’s populace has a burning hatred for the inhabitants of a neighboring system. Perhaps this world was colonized by exiles, or there was a recent interstellar war, or ideas of racial or religious superiority have fanned the hatred. Regardless of the cause, the locals view their neighbor and any sympathizers with loathing.", Enemies: roll.List{ Items: []string{ "Native convinced that the offworlders are agents of Them", "Cynical politician in need of scapegoats", }, }, Friends: roll.List{ Items: []string{ "Intelligence agent needing catspaws", "Holodoc producers needing “an inside look”", "Unlucky offworlder from the hated system", }, }, Complications: roll.List{ Items: []string{ "The characters are wearing or using items from the hated world", "The characters are known to have done business there", "The characters “look like” the hated others", }, }, Things: roll.List{ Items: []string{ "Proof of Their evildoing", "Reward for turning in enemy agents", "Relic stolen by Them years ago", }, }, Places: roll.List{ Items: []string{ "War crimes museum", "Atrocity site", "Captured and decommissioned spaceship kept as a trophy", }, }, }, { Name: "Heavy Industry", Desc: "With interstellar transport so limited in the bulk it can move, worlds have to be largely self-sufficient in industry. Some worlds are more sufficient than others, however, and this planet has a thriving manufacturing sector capable of producing large amounts of goods appropriate to its tech level. The locals may enjoy a correspondingly higher lifestyle, or the products might be devoted towards vast projects for the aggrandizement of the rulers.", Enemies: roll.List{ Items: []string{ "Tycoon monopolist", "Industrial spy", "Malcontent revolutionary", }, }, Friends: roll.List{ Items: []string{ "Aspiring entrepreneur", "Worker union leader", "Ambitious inventor", }, }, Complications: roll.List{ Items: []string{ "The factories are toxic", "The resources extractable at their tech level are running out", "The masses require the factory output for survival", "The industries’ major output is being obsoleted by offworld tech", }, }, Things: roll.List{ Items: []string{ "Confidential industrial data", "Secret union membership lists", "Ownership shares in an industrial complex", }, }, Places: roll.List{ Items: []string{ "Factory floor", "Union meeting hall", "Toxic waste dump", "R&D complex", }, }, }, { Name: "<NAME>", Desc: "This world has large stocks of valuable minerals, usually necessary for local industry, life support, or refinement into loads small enough to export offworld. Major mining efforts are necessary to extract the minerals, and many natives work in the industry.", Enemies: roll.List{ Items: []string{ "Mine boss", "Tunnel saboteur", "Subterranean predators", }, }, Friends: roll.List{ Items: []string{ "Hermit prospector", "Offworld investor", "Miner’s union representative", }, }, Complications: roll.List{ Items: []string{ "The refinery equipment breaks down", "Tunnel collapse", "Silicate life forms growing in the miners’ lungs", }, }, Things: roll.List{ Items: []string{ "The mother lode", "Smuggled case of refined mineral", "Faked crystalline mineral samples", }, }, Places: roll.List{ Items: []string{ "Vertical mine face", "Tailing piles", "Roaring smelting complex", }, }, }, { Name: "Hivemind", Desc: "Natives of this world exist in a kind of mental gestalt, sharing thoughts and partaking of a single identity. Powerful pretech, exotic psionics, alien influence, or some other cause has left the world sharing one identity. Individual members may have greater or lesser degrees of effective coordination with the whole.", Enemies: roll.List{ Items: []string{ "A hivemind that wants to assimilate outsiders", "A hivemind that has no respect for unjoined life", "A hivemind that fears and hates unjoined life", }, }, Friends: roll.List{ Items: []string{ "A scholar studying the hivemind", "A person severed from the gestalt", "A relative of someone who has been assimilated", }, }, Complications: roll.List{ Items: []string{ "The hivemind only functions on this world", "The hivemind has strict range limits", "The hivemind has different personality factions", "The hivemind only happens at particular times", "The world is made of semi-sentient drones and a single AI", }, }, Things: roll.List{ Items: []string{ "Vital tech for maintaining the mind", "Precious treasure held by now-assimilated outsider", "Tech that “blinds” the hivemind to the tech’s users", }, }, Places: roll.List{ Items: []string{ "Barely tolerable living cells for individuals", "Workside where individuals casually die in their labors", "Community with absolutely no social or group-gathering facilities", }, }, }, { Name: "<NAME>", Desc: "A savage holy war is raging on this world, either between factions of locals or as a united effort against the pagans of some neighboring world. This war might involve a conventional religion, or it might be the result of a branding campaign, political ideology, artistic movement, or any other cause that people use as a substitute for traditional religion.", Enemies: roll.List{ Items: []string{ "Blood-mad pontiff", "Coldly cynical secular leader", "Totalitarian political demagogue", }, }, Friends: roll.List{ Items: []string{ "Desperate peacemaker", "Hard-pressed refugee of the fighting", "Peaceful religious leader who lost the internal debate", }, }, Complications: roll.List{ Items: []string{ "The targets of the war really are doing something diabolically horrible", "The holy war is just a mask for a very traditional casus belli", "The leaders don’t want the war won but only prolonged", "Both this world and the target of the war are religion-obsessed", }, }, Things: roll.List{ Items: []string{ "Sacred relic of the faith", "A captured blasphemer under a death sentence", "Plunder seized in battle", }, }, Places: roll.List{ Items: []string{ "Massive holy structure", "Razed community of infidels", "Vast shrine to the martyrs dead in war", }, }, }, { Name: "<NAME>", Desc: "The world is teeming with life, and it hates humans. Perhaps the life is xenoallergenic, forcing filter masks and tailored antiallergens for survival. It could be the native predators are huge and fearless, or the toxic flora ruthlessly outcompetes earth crops.", Enemies: roll.List{ Items: []string{ "Local fauna", "Nature cultist", "Native aliens", "Callous labor overseer", }, }, Friends: roll.List{ Items: []string{ "Xenobiologist", "Tourist on safari", "Grizzled local guide", }, }, Complications: roll.List{ Items: []string{ "Filter masks fail", "Parasitic alien infestation", "Crop greenhouses lose bio-integrity", }, }, Things: roll.List{ Items: []string{ "Valuable native biological extract", "Abandoned colony vault", "Remains of an unsuccessful expedition", }, }, Places: roll.List{ Items: []string{ "Deceptively peaceful glade", "Steaming polychrome jungle", "Nightfall when surrounded by Things", }, }, }, { Name: "Hostile Space", Desc: "The system in which the world exists is a dangerous neighborhood. Something about the system is perilous to inhabitants, either through meteor swarms, stellar radiation, hostile aliens in the asteroid belt, or periodic comet clouds.", Enemies: roll.List{ Items: []string{ "Alien raid leader", "Meteor-launching terrorists", "Paranoid local leader", }, }, Friends: roll.List{ Items: []string{ "Astronomic researcher", "Local defense commander", "Early warning monitor agent", }, }, Complications: roll.List{ Items: []string{ "The natives believe the danger is divine chastisement", "The natives blame outworlders for the danger", "The native elite profit from the danger in some way", }, }, Things: roll.List{ Items: []string{ "Early warning of a raid or impact", "Abandoned riches in a disaster zone", "Key to a secure bunker", }, }, Places: roll.List{ Items: []string{ "City watching an approaching asteroid", "Village burnt in an alien raid", "Massive ancient crater", }, }, }, { Name: "Immortals", Desc: "Natives of this world are effectively immortal. They may have been gengineered for tremendous lifespans, or have found some local anagathic, or be cyborg life forms, or be so totally convinced of reincarnation that death is a cultural irrelevance. Any immortality technique is likely applicable only to them, or else it’s apt to be a massive draw to outside imperialists.", Enemies: roll.List{ Items: []string{ "Outsider determined to steal immortality", "Smug local convinced of their immortal wisdom to rule all", "Offworlder seeking the world’s ruin before it becomes a threat to all", }, }, Friends: roll.List{ Items: []string{ "Curious longevity researcher", "Thrill-seeking local", }, }, Complications: roll.List{ Items: []string{ "Immortality requires doing something that outsiders can’t or won’t willingly do", "The immortality ends if they leave the world", "Death is the punishment for even minor crimes", "Immortals must die or go offworld after a certain span", "Immortality has brutal side-effects", }, }, Things: roll.List{ Items: []string{ "Immortality drug", "Masterwork of an ageless artisan", "Toxin that only affects immortals", }, }, Places: roll.List{ Items: []string{ "Community with no visible children", "Unchanging structure of obvious ancient age", "Cultural performance relying on a century of in-jokes", }, }, }, { Name: "<NAME>", Desc: "The world may be sophisticated or barely capable of steam engines, but either way it produces something rare and precious to the wider galaxy. It might be some pharmaceutical extract produced by a secret recipe, a remarkably popular cultural product, or even gengineered humans uniquely suited for certain work.", Enemies: roll.List{ Items: []string{ "Monopolist", "Offworlder seeking prohibition of the specialty", "Native who views the specialty as sacred", }, }, Friends: roll.List{ Items: []string{ "Spy searching for the source", "Artisan seeking protection", "Exporter with problems", }, }, Complications: roll.List{ Items: []string{ "The specialty is repugnant in nature", "The crafters refuse to sell to offworlders", "The specialty is made in a remote", "dangerous place", "The crafters don’t want to make the specialty any more", }, }, Things: roll.List{ Items: []string{ "The specialty itself", "The secret recipe", "Sample of a new improved variety", }, }, Places: roll.List{ Items: []string{ "Secret manufactory", "Hidden cache", "Artistic competition for best artisan", }, }, }, { Name: "Local Tech", Desc: "The locals can create a particular example of extremely high tech, possibly even something that exceeds pretech standards. They may use unique local resources to do so, or have stumbled on a narrow scientific breakthrough, or still have a functional experimental manufactory.", Enemies: roll.List{ Items: []string{ "Keeper of the tech", "Offworld industrialist", "Automated defenses that suddenly come alive", "Native alien mentors", }, }, Friends: roll.List{ Items: []string{ "Curious offworld scientist", "Eager tech buyer", "Native in need of technical help", }, }, Complications: roll.List{ Items: []string{ "The tech is unreliable", "The tech only works on this world", "The tech has poorly-understood side effects", "The tech is alien in nature.", }, }, Things: roll.List{ Items: []string{ "The tech itself", "An unclaimed payment for a large shipment", "The secret blueprints for its construction", "An ancient alien R&D database", }, }, Places: roll.List{ Items: []string{ "Alien factory", "Lethal R&D center", "Tech brokerage vault", }, }, }, { Name: "Major Spaceyard", Desc: "Most worlds of tech level 4 or greater have the necessary tech and orbital facilities to build spike drives and starships. This world is blessed with a major spaceyard facility, either inherited from before the Silence or painstakingly constructed in more recent decades. It can build even capital-class hulls, and do so more quickly and cheaply than its neighbors.", Enemies: roll.List{ Items: []string{ "Enemy saboteur", "Industrial spy", "Scheming construction tycoon", "Aspiring ship hijacker", }, }, Friends: roll.List{ Items: []string{ "Captain stuck in drydock", "Maintenance chief", "Mad innovator", }, }, Complications: roll.List{ Items: []string{ "The spaceyard is an alien relic", "The spaceyard is burning out from overuse", "The spaceyard is alive", "The spaceyard relies on maltech to function", }, }, Things: roll.List{ Items: []string{ "Intellectual property-locked pretech blueprints", "Override keys for activating old pretech facilities", "A purchased but unclaimed spaceship.", }, }, Places: roll.List{ Items: []string{ "Hidden shipyard bay", "Surface of a partially-completed ship", "Ship scrap graveyard", }, }, }, { Name: "Mandarinate", Desc: "The planet is ruled by an intellectual elite chosen via ostensibly neutral examinations or tests. The values this system selects for may or may not have anything to do with actual practical leadership skills, and the examinations may be more or less corruptible.", Enemies: roll.List{ Items: []string{ "Corrupt test administrator", "Incompetent but highly-rated graduate", "Ruthless leader of a clan of high-testing relations", }, }, Friends: roll.List{ Items: []string{ "Crusader for test reform", "Talented but poorly-connected graduate", "Genius who tests badly", }, }, Complications: roll.List{ Items: []string{ "The test is totally unrelated to necessary governing skills", "The test was very pertinent in the past but tech or culture has changed", "The test is for a skill that is vital to maintaining society but irrelevant to day-to-day governance", "The test is a sham and passage is based on wealth or influence", }, }, Things: roll.List{ Items: []string{ "Answer key to the next test", "Lost essay of incredible merit", "Proof of cheating", }, }, Places: roll.List{ Items: []string{ "Massive structure full of test-taking cubicles", "School filled with desperate students", "Ornate government building decorated with scholarly quotes and academic images", }, }, }, { Name: "<NAME>", Desc: "The Terran Mandate retained its control over this world for much longer than usual, and the world may still consider itself a true inheritor of Mandate legitimacy. Most of these worlds have or had superior technology, but they may still labor under the burden of ancient restrictive tech or monitoring systems designed to prevent them from rebelling.", Enemies: roll.List{ Items: []string{ "Deranged Mandate monitoring AI", "Aspiring sector ruler", "Demagogue preaching local superiority over “traitorous rebel worlds”.", }, }, Friends: roll.List{ Items: []string{ "Idealistic do-gooder local", "Missionary for advanced Mandate tech", "Outsider seeking lost data from Mandate records", }, }, Complications: roll.List{ Items: []string{ "The monitoring system forces the locals to behave in aggressive ways toward “rebel” worlds", "The monitoring system severely hinders offworld use of their tech", "The original colonists are all dead and have been replaced by outsiders who don’t understand all the details", }, }, Things: roll.List{ Items: []string{ "Ultra-advanced pretech", "Mandate military gear", "Databank containing precious tech schematics", }, }, Places: roll.List{ Items: []string{ "Faded Mandate offices still in use", "Vault containing ancient pretech", "Carefully-maintained monument to Mandate glory", }, }, }, { Name: "Maneaters", Desc: "The locals are cannibals, either out of necessity or out of cultural preference. Some worlds may actually eat human flesh, while others simply require the rendering of humans into important chemicals or pharmaceutical compounds, perhaps to prolong the lives of ghoul overlords. This cannibalism plays a major role in their society.", Enemies: roll.List{ Items: []string{ "Ruthless ghoul leader", "Chieftain of a ravenous tribe", "Sophisticated degenerate preaching the splendid authenticity of cannibalism", }, }, Friends: roll.List{ Items: []string{ "Sympathetic local fleeing the fork", "Escapee from a pharmaceutical rendering plant", "Outsider chosen for dinner", "Reformer seeking to break the custom or its necessity", }, }, Complications: roll.List{ Items: []string{ "Local food or environmental conditions make human consumption grimly necessary", "The locals farm human beings", "Outsiders are expected to join in the custom", "The custom is totally unnecessary but jealously maintained by the people", }, }, Things: roll.List{ Items: []string{ "Belongings of a recent meal", "An offworlder VIP due for the menu", "A toxin that makes human flesh lethal to consumers", }, }, Places: roll.List{ Items: []string{ "Hideous human abattoir", "Extremely civilized restaurant", "Funeral-home-cum-kitchen", }, }, }, { Name: "Megacorps", Desc: "The world is dominated by classic cyberpunk-esque megacorporations, each one far more important than the vestigial national remnants that encompass them. These megacorps are usually locked in a cold war, trading and dealing with each other even as they try to strike in deniable ways. An over-council of corporations usually acts to bring into line any that get excessively overt in their activities.", Enemies: roll.List{ Items: []string{ "Megalomaniacal executive", "Underling looking to use the PCs as catspaws", "Ruthless mercenary who wants what the PCs have", }, }, Friends: roll.List{ Items: []string{ "Victim of megacorp scheming", "Offworlder merchant in far over their head", "Local reformer struggling to cope with megacorp indifference", }, }, Complications: roll.List{ Items: []string{ "The megacorps are the only source of something vital to life on this world", "An autonomous Mandate system acts to punish excessively overt violence", "The megacorps are struggling against much more horrible national governments", }, }, Things: roll.List{ Items: []string{ "Blackmail on a megacorp exec", "Keycodes to critical corp secrets", "Proof of corp responsibility for a heinously unacceptable public atrocity", "Data on a vital new product line coming out soon", }, }, Places: roll.List{ Items: []string{ "A place plastered in megacorp ads", "A public plaza discreetly branded", "Private corp military base", }, }, }, { Name: "Mercenaries", Desc: "The world is either famous for its mercenary bands or it is plagued by countless groups of condottieri in service to whatever magnate can afford to pay or bribe them adequately.", Enemies: roll.List{ Items: []string{ "Amoral mercenary leader", "Rich offworlder trying to buy rule of the world", "Mercenary press gang chief forcing locals into service", }, }, Friends: roll.List{ Items: []string{ "Young and idealistic mercenary chief", "Harried leader of enfeebled national army", "Offworlder trying to hire help for a noble cause", }, }, Complications: roll.List{ Items: []string{ "The mercenaries are all that stand between the locals and a hungry imperial power", "The mercenaries are remnants of a former official army", "The mercenaries hardly ever actually fight as compared to taking bribes to walk away", }, }, Things: roll.List{ Items: []string{ "Lost mercenary payroll shipment", "Forbidden military tech", "Proof of a band’s impending treachery against their employers", }, }, Places: roll.List{ Items: []string{ "Shabby camp of undisciplined mercs", "Burnt-out village occupied by mercenaries", "Luxurious and exceedingly well-defended merc leader villa", }, }, }, { Name: "Minimal Contact", Desc: "The locals refuse most contact with offworlders. Only a small, quarantined treaty port is provided for offworld trade, and ships can expect an exhaustive search for contraband. Local governments may be trying to keep the very existence of interstellar trade a secret from their populations, or they may simply consider offworlders too dangerous or repugnant to be allowed among the population.", Enemies: roll.List{ Items: []string{ "Customs official", "Xenophobic natives", "Existing merchant who doesn’t like competition", }, }, Friends: roll.List{ Items: []string{ "Aspiring tourist", "Anthropological researcher", "Offworld thief", "Religious missionary", }, }, Complications: roll.List{ Items: []string{ "The locals carry a disease harmless to them and lethal to outsiders", "The locals hide dark purposes from offworlders", "The locals have something desperately needed but won’t bring it into the treaty port", }, }, Things: roll.List{ Items: []string{ "Contraband trade goods", "Security perimeter codes", "Black market local products", }, }, Places: roll.List{ Items: []string{ "Treaty port bar", "Black market zone", "Secret smuggler landing site", }, }, }, { Name: "Misandry/Misogyny", Desc: "The culture on this world holds a particular gender in contempt. Members of that gender are not permitted positions of formal power, and may be restricted in their movements and activities. Some worlds may go so far as to scorn both traditional genders, using gengineering techniques to hybridize or alter conventional human biology.", Enemies: roll.List{ Items: []string{ "Cultural fundamentalist", "Cultural missionary to outworlders", "Local rebel driven to pointless and meaningless violence", }, }, Friends: roll.List{ Items: []string{ "Oppressed native", "Research scientist", "Offworld emancipationist", "Local reformer", }, }, Complications: roll.List{ Items: []string{ "The oppressed gender is restive against the customs", "The oppressed gender largely supports the customs", "The customs relate to some physical quality of the world", "The oppressed gender has had maltech gengineering done to “tame” them.", }, }, Things: roll.List{ Items: []string{ "Aerosol reversion formula for undoing gengineered docility", "Hidden history of the world", "Pretech gengineering equipment", }, }, Places: roll.List{ Items: []string{ "Shrine to the virtues of the favored gender", "Security center for controlling the oppressed", "Gengineering lab", }, }, }, { Name: "Night World", Desc: "The world is plunged into eternal darkness. The only life on this planet derives its energy from other sources, such as geothermal heat, extremely volatile chemical reactions in the planet’s soil, or light in a non-visible spectrum. Most flora and fauna is voraciously eager to consume other life.", Enemies: roll.List{ Items: []string{ "Monstrous thing from the night", "Offworlder finding the obscurity of the world convenient for dark purposes", "Mad scientist experimenting with local life", }, }, Friends: roll.List{ Items: []string{ "Curious offworlder researcher", "Hard-pressed colony leader", "High priest of a sect that finds religious significance in the night", }, }, Complications: roll.List{ Items: []string{ "Daylight comes as a cataclysmic event at very long intervals", "Light causes very dangerous reactions in native life or chemicals here", "The locals have been gengineered to exist without sight", }, }, Things: roll.List{ Items: []string{ "Rare chemicals created in the darkness", "Light source usable on this world", "Smuggler cache hidden here in ages pastP Formlessly pitch-black wilderness", "Sea without a sun", "Location defined by sounds or smells", }, }, Places: roll.List{ Items: []string{ "Formlessly pitch-black wilderness", "Sea without a sun", "Location defined by sounds or smells", }, }, }, { Name: "Nomads", Desc: "Most of the natives of this world are nomadic, usually following a traditional cycle of movement through the lands they possess. Promises of rich plunder or local environmental perils can force these groups to strike out against neighbors. Other groups are forced to move constantly due to unpredictable dangers that crop up on the planet.", Enemies: roll.List{ Items: []string{ "Desperate tribal leader who needs what the PCs have", "Ruthless raider chieftain", "Leader seeking to weld the nomads into an army", }, }, Friends: roll.List{ Items: []string{ "Free-spirited young nomad", "Dreamer imagining a stable life", "Offworlder enamored of the life", }, }, Complications: roll.List{ Items: []string{ "An irresistibly lethal swarm of native life forces locals to move regularly", "Ancient defense systems destroy too-long-stationary communities", "Local chemical patches require careful balancing of exposure times to avoid side effects", }, }, Things: roll.List{ Items: []string{ "Cache of rare and precious resource", "Plunder seized by a tribal raid", "Tech that makes a place safe for long-term inhabitation", }, }, Places: roll.List{ Items: []string{ "Temporary nomad camp", "Oasis or resource reserve", "Trackless waste that kills the unprepared", }, }, }, { Name: "Oceanic World", Desc: "The world is entirely or almost entirely covered with liquid water. Habitations might be floating cities, or might cling precariously to the few rocky atolls jutting up from the waves, or are planted as bubbles on promontories deep beneath the stormy surface. Survival depends on aquaculture. Planets with inedible alien life rely on gengineered Terran sea crops.", Enemies: roll.List{ Items: []string{ "Pirate raider", "Violent “salvager” gang", "Tentacled sea monster", }, }, Friends: roll.List{ Items: []string{ "<NAME>", "Sea hermit", "Sapient native life", }, }, Complications: roll.List{ Items: []string{ "The liquid flux confuses grav engines too badly for them to function on this world", "Sea is corrosive or toxic", "The seas are wracked by regular storms", }, }, Things: roll.List{ Items: []string{ "Buried pirate treasure", "Location of enormous schools of fish", "Pretech water purification equipment", }, }, Places: roll.List{ Items: []string{ "The only island on the planet", "Floating spaceport", "Deck of a storm-swept ship", "Undersea bubble city", }, }, }, { Name: "Out of Contact", Desc: "The natives have been entirely out of contact with the greater galaxy for centuries or longer. Perhaps the original colonists were seeking to hide from the rest of the universe, or the Silence destroyed any means of communication. It may have been so long that human origins on other worlds have regressed into a topic for legends. The players might be on the first offworld ship to land since the First Wave of colonization a thousand years ago.", Enemies: roll.List{ Items: []string{ "Fearful local ruler", "Zealous native cleric", "Sinister power that has kept the world isolated", }, }, Friends: roll.List{ Items: []string{ "Scheming native noble", "Heretical theologian", "UFO cultist native", }, }, Complications: roll.List{ Items: []string{ "Automatic defenses fire on ships that try to take off", "The natives want to stay out of contact", "The natives are highly vulnerable to offworld diseases", "The native language is completely unlike any known to the group", }, }, Things: roll.List{ Items: []string{ "Ancient pretech equipment", "Terran relic brought from Earth", "Logs of the original colonists", }, }, Places: roll.List{ Items: []string{ "Long-lost colonial landing site", "Court of the local ruler", "Ancient defense battery controls", }, }, }, { Name: "Outpost World", Desc: "The world is only a tiny outpost of human habitation planted by an offworld corporation or government. Perhaps the staff is there to serve as a refueling and repair stop for passing ships, or to oversee an automated mining and refinery complex. They might be there to study ancient ruins, or simply serve as a listening and monitoring post for traffic through the system. The outpost is likely well-equipped with defenses against casual piracy.", Enemies: roll.List{ Items: []string{ "Space-mad outpost staffer", "Outpost commander who wants it to stay undiscovered", "Undercover saboteur", }, }, Friends: roll.List{ Items: []string{ "Lonely staffer", "Fixated researcher", "Overtaxed maintenance chief", }, }, Complications: roll.List{ Items: []string{ "The alien ruin defense systems are waking up", "Atmospheric disturbances trap the group inside the outpost for a month", "Pirates raid the outpost", "The crew have become converts to a strange set of beliefs", }, }, Things: roll.List{ Items: []string{ "Alien relics", "Vital scientific data", "Secret corporate exploitation plans", }, }, Places: roll.List{ Items: []string{ "Grimy recreation room", "Refueling station", "The only building on the planet", "A “starport” of swept bare rock.", }, }, }, { Name: "Perimeter Agency", Desc: "Before the Silence, the Perimeter was a Terran-sponsored organization charged with rooting out use of maltech, technology banned in human space as too dangerous for use or experimentation. Unbraked AIs, gengineered slave species, nanotech replicators, weapons of planetary destruction… the Perimeter hunted down experimenters with a great indifference to planetary laws. Most Perimeter Agencies collapsed during the Silence, but a few managed to hold on to their mission, though modern Perimeter agents often find more work as conventional spies.", Enemies: roll.List{ Items: []string{ "Renegade Agency Director", "Maltech researcher", "Paranoid intelligence chief", }, }, Friends: roll.List{ Items: []string{ "Agent in need of help", "Support staffer", "“Unjustly” targeted researcher", }, }, Complications: roll.List{ Items: []string{ "The local Agency has gone rogue and now uses maltech", "The Agency archives have been compromised", "The Agency has been targeted by a maltech-using organization", "The Agency’s existence is unknown to the locals", }, }, Things: roll.List{ Items: []string{ "Agency maltech research archives", "Agency pretech spec-ops gear", "File of blackmail on local politicians", }, }, Places: roll.List{ Items: []string{ "Interrogation room", "Smoky bar", "Maltech laboratory", "Secret Agency base", }, }, }, { Name: "<NAME>", Desc: "The world is noted for an important spiritual or historical location, and might be the sector headquarters for a widespread religion or political movement. The site attracts wealthy pilgrims from throughout nearby space, and those with the money necessary to manage interstellar travel can be quite generous to the site and its keepers. The locals tend to be fiercely protective of the place and its reputation, and some places may forbid the entrance of those not suitably pious or devout.", Enemies: roll.List{ Items: []string{ "Saboteur devoted to a rival belief", "Bitter reformer who resents the current leadership", "Swindler conning the pilgrims", }, }, Friends: roll.List{ Items: []string{ "Protector of the holy site", "Naive offworlder pilgrim", "Outsider wanting to learn the sanctum’s inner secrets", }, }, Complications: roll.List{ Items: []string{ "The site is actually a fake", "The site is run by corrupt and venal keepers", "A natural disaster threatens the site", }, }, Things: roll.List{ Items: []string{ "Ancient relic guarded at the site", "Proof of the site’s inauthenticity", "Precious offering from a pilgrim", }, }, Places: roll.List{ Items: []string{ "Incense-scented sanctum", "Teeming crowd of pilgrims", "Imposing holy structure", }, }, }, { Name: "Pleasure World", Desc: "This world provides delights either rare or impermissible elsewhere. Matchless local beauty, stunningly gengineered natives, a wide variety of local drugs, carnal pleasures unacceptable on other worlds, or some other rare delight is readily available here. Most worlds are fully aware of the value of their offerings, and the prices they demand can be in credits or in less tangible recompense.", Enemies: roll.List{ Items: []string{ "Purveyor of evil delights", "Local seeking to control others with addictions", "Offworlder exploiter of native resources", }, }, Friends: roll.List{ Items: []string{ "Tourist who’s in too deep", "Native seeking a more meaningful life elsewhere", "Offworld entertainer looking for training here", }, }, Complications: roll.List{ Items: []string{ "A deeply repugnant pleasure is offered here by a culture that sees nothing wrong with it", "Certain pleasures here are dangerously addictive", "The prices here can involve enslavement or death", "The world has been seized and exploited by an imperial power", }, }, Things: roll.List{ Items: []string{ "Forbidden drug", "A contract for some unspeakable payment", "Powerful tech repurposed for hedonistic ends", }, }, Places: roll.List{ Items: []string{ "Breathtaking natural feature", "Artful but decadent salon", "Grimy den of desperate vice", }, }, }, { Name: "Police State", Desc: "The world is a totalitarian police state. Any sign of disloyalty to the planet’s rulers is punished severely, and suspicion riddles society. Some worlds might operate by Soviet-style informers and indoctrination, while more technically sophisticated worlds might rely on omnipresent cameras or braked AI “guardian angels”. Outworlders are apt to be treated as a necessary evil at best, and “disappeared” if they become troublesome.", Enemies: roll.List{ Items: []string{ "Secret police chief", "Scapegoating official", "Treacherous native informer", }, }, Friends: roll.List{ Items: []string{ "Rebel leader", "Offworld agitator", "Imprisoned victim", "Crime boss", }, }, Complications: roll.List{ Items: []string{ "The natives largely believe in the righteousness of the state", "The police state is automated and its “rulers” can’t shut it off", "The leaders foment a pogrom against “offworlder spies”.", }, }, Things: roll.List{ Items: []string{ "List of police informers", "Wealth taken from “enemies of the state”", "Dear Leader’s private stash", }, }, Places: roll.List{ Items: []string{ "Military parade", "Gulag", "Gray concrete housing block", "Surveillance center", }, }, }, { Name: "Post-Scarcity", Desc: "The locals have maintained sufficient Mandate-era tech to be effectively post-scarcity in their economic structure. Everyone has all the necessities and most of the desires they can imagine. Conflict now exists over the apportionment of services and terrestrial space, since anything else can be had in abundance. Military goods and items of mass destruction may still be restricted, and there is probably some reason that the locals do not export their vast wealth.", Enemies: roll.List{ Items: []string{ "Frenzied ideologue fighting over an idea", "Paranoid local fearing offworlder influence", "Grim reformer seeking the destruction of the “enfeebling” productive tech", }, }, Friends: roll.List{ Items: []string{ "Offworlder seeking something available only here", "Local struggling to maintain the production tech", "Native missionary seeking to bring abundance to other worlds", }, }, Complications: roll.List{ Items: []string{ "The tech causes serious side-effects on those who take advantage of it", "The tech is breaking down", "The population is growing too large", "The tech produces only certain things in abundance", }, }, Things: roll.List{ Items: []string{ "A cornucopia device", "A rare commodity that cannot be duplicated", "Contract for services", }, }, Places: roll.List{ Items: []string{ "Tiny but richly-appointed private quarters", "Market for services", "Hushed non-duped art salon", }, }, }, { Name: "Preceptor Archive", Desc: "The Preceptors of the Great Archive were a pre-Silence organization devoted to ensuring the dissemination of human culture, history, and basic technology to frontier worlds that risked losing this information during the human expansion. Most frontier planets had an Archive where natives could learn useful technical skills in addition to human history and art. Those Archives that managed to survive the Silence now strive to send their missionaries of knowledge to new worlds in need of their lore.", Enemies: roll.List{ Items: []string{ "Luddite native", "Offworld merchant who wants the natives kept ignorant", "Religious zealot", "Corrupted First Speaker who wants to keep a monopoly on learning", }, }, Friends: roll.List{ Items: []string{ "Preceptor Adept missionary", "Offworld scholar", "Reluctant student", "Roving Preceptor Adept", }, }, Complications: roll.List{ Items: []string{ "The local Archive has taken a very religious and mystical attitude toward their teaching", "The Archive has maintained some replicable pretech science", "The Archive has been corrupted and their teaching is incorrect", }, }, Things: roll.List{ Items: []string{ "Lost Archive database", "Ancient pretech teaching equipment", "Hidden cache of unacceptable tech", }, }, Places: roll.List{ Items: []string{ "Archive lecture hall", "Experimental laboratory", "Student-local riot", }, }, }, { Name: "<NAME>", Desc: "The capacities of human science before the Silence vastly outmatch the technology available since the Scream. The Jump Gates alone were capable of crossing hundreds of light years in a moment, and they were just one example of the results won by blending psychic artifice with pretech science. Some worlds outright worship the artifacts of their ancestors, seeing in them the work of more enlightened and perfect humanity. These cultists may or may not understand the operation or replication of these devices, but they seek and guard them jealously.", Enemies: roll.List{ Items: []string{ "Cult leader", "Artifact supplier", "Pretech smuggler", }, }, Friends: roll.List{ Items: []string{ "Offworld scientist", "Robbed collector", "Cult heretic", }, }, Complications: roll.List{ Items: []string{ "The cultists can actually replicate certain forms of pretech", "The cultists abhor use of the devices as “presumption on the holy”", "The cultists mistake the party’s belongings for pretech", }, }, Things: roll.List{ Items: []string{ "Pretech artifacts both functional and broken", "Religious-jargon laced pretech replication techniques", "Waylaid payment for pretech artifacts", }, }, Places: roll.List{ Items: []string{ "Shrine to nonfunctional pretech", "Smuggler’s den", "Public procession showing a prized artifact", }, }, }, { Name: "Primitive Aliens", Desc: "The world is populated by a large number of sapient aliens that have yet to develop advanced technology. The human colonists may have a friendly or hostile relationship with the aliens, but a certain intrinsic tension is likely. Small human colonies might have been enslaved or otherwise subjugated.", Enemies: roll.List{ Items: []string{ "Hostile alien chief", "Human firebrand", "Dangerous local predator", "Alien religious zealot", }, }, Friends: roll.List{ Items: []string{ "Colonist leader", "Peace-faction alien chief", "Planetary frontiersman", "Xenoresearcher", }, }, Complications: roll.List{ Items: []string{ "The alien numbers are huge and can overwhelm the humans whenever they so choose", "One group is trying to use the other to kill their political opponents", "The aliens are incomprehensibly strange", "One side commits an atrocity", }, }, Things: roll.List{ Items: []string{ "Alien religious icon", "Ancient alien-human treaty", "Alien technology", }, }, Places: roll.List{ Items: []string{ "Alien village", "Fortified human settlement", "Massacre site", }, }, }, { Name: "Prison Planet", Desc: "This planet is or was intended as a prison. Some such prisons were meant for specific malefactors of the Terran Mandate, while others were to contain entire “dangerous” ethnic groups or alien races. Some may still have warden AIs or automatic systems to prevent any unauthorized person from leaving, and any authorization permits have long since expired.", Enemies: roll.List{ Items: []string{ "Crazed warden AI", "Brutal heir to gang leadership", "Offworlder who’s somehow acquired warden powers and exploits the locals", }, }, Friends: roll.List{ Items: []string{ "Innocent local born here", "Native technician forced to maintain the very tech that imprisons them", "Offworlder trapped here by accident", }, }, Complications: roll.List{ Items: []string{ "Departure permits are a precious currency", "The prison industry still makes valuable pretech devices", "Gangs have metamorphosed into governments", "The local nobility descended from the prison staff", }, }, Things: roll.List{ Items: []string{ "A pass to get offworld", "A key to bypass ancient security devices", "Contraband forbidden by the security scanners", }, }, Places: roll.List{ Items: []string{ "Mandate-era prison block converted to government building", "Industrial facility manned by mandatory numbers of prisoners", "Makeshift shop where contraband is assembled", }, }, }, { Name: "Psionics Academy", Desc: "This world is one of the few that have managed to redevelop the basics of psychic training. Without this education, a potential psychic is doomed to either madness or death unless they refrain from using their abilities. Psionic academies are rare enough that offworlders are often sent there to study by wealthy patrons. The secrets of psychic mentorship, the protocols and techniques that allow a psychic to successfully train another, are carefully guarded at these academies. Most are closely affiliated with the planetary government.", Enemies: roll.List{ Items: []string{ "Corrupt psychic instructor", "Renegade student", "Mad psychic researcher", "Resentful townie", }, }, Friends: roll.List{ Items: []string{ "Offworld researcher", "Aspiring student", "Wealthy tourist", }, }, Complications: roll.List{ Items: []string{ "The academy curriculum kills a significant percentage of students", "The faculty use students as research subjects", "The students are indoctrinated as sleeper agents", "The local natives hate the academy", "The academy is part of a religion.", }, }, Things: roll.List{ Items: []string{ "Secretly developed psitech", "A runaway psychic mentor", "Psychic research prize", }, }, Places: roll.List{ Items: []string{ "Training grounds", "Experimental laboratory", "School library", "Campus hangout", }, }, }, { Name: "<NAME>", Desc: "The locals are terrified of psychics. Perhaps their history is studded with feral psychics who went on murderous rampages, or perhaps they simply nurse an unreasoning terror of those “mutant freaks”. Psychics demonstrate their powers at risk of their lives.", Enemies: roll.List{ Items: []string{ "Mental purity investigator", "Suspicious zealot", "Witch-finder", }, }, Friends: roll.List{ Items: []string{ "Hidden psychic", "Offworlder psychic trapped here", "Offworld educator", }, }, Complications: roll.List{ Items: []string{ "Psychic potential is much more common here", "Some tech is mistaken as psitech", "Natives believe certain rituals and customs can protect them from psychic powers", }, }, Things: roll.List{ Items: []string{ "Hidden psitech cache", "Possessions of convicted psychics", "Reward for turning in a psychic", }, }, Places: roll.List{ Items: []string{ "Inquisitorial chamber", "Lynching site", "Museum of psychic atrocities", }, }, }, { Name: "<NAME>", Desc: "These natives view psionic powers as a visible gift of god or sign of superiority. If the world has a functional psychic training academy, psychics occupy almost all major positions of power and are considered the natural and proper rulers of the world. If the world lacks training facilities, it is likely a hodgepodge of demented cults, with each one dedicated to a marginally-coherent feral prophet and their psychopathic ravings.", Enemies: roll.List{ Items: []string{ "Psychic inquisitor", "Haughty mind-noble", "Psychic slaver", "Feral prophet", }, }, Friends: roll.List{ Items: []string{ "Offworlder psychic researcher", "Native rebel", "Offworld employer seeking psychics", }, }, Complications: roll.List{ Items: []string{ "The psychic training is imperfect", "and the psychics all show significant mental illness", "The psychics have developed a unique discipline", "The will of a psychic is law", "Psychics in the party are forcibly kidnapped for “enlightening”.", }, }, Things: roll.List{ Items: []string{ "Ancient psitech", "Valuable psychic research records", "Permission for psychic training", }, }, Places: roll.List{ Items: []string{ "Psitech-imbued council chamber", "Temple to the mind", "Sanitarium-prison for feral psychics", }, }, }, { Name: "Quarantined World", Desc: "The world is under a quarantine, and space travel to and from it is strictly forbidden. This may be enforced by massive ground batteries that burn any interlopers from the planet’s sky, or it may be that a neighboring world runs a persistent blockade.", Enemies: roll.List{ Items: []string{ "Defense installation commander", "Suspicious patrol leader", "Crazed asteroid hermit", }, }, Friends: roll.List{ Items: []string{ "Relative of a person trapped on the world", "Humanitarian relief official", "Treasure hunter", }, }, Complications: roll.List{ Items: []string{ "The natives want to remain isolated", "The quarantine is enforced by an ancient alien installation", "The world is rife with maltech abominations", "The blockade is meant to starve everyone on the barren world.", }, }, Things: roll.List{ Items: []string{ "Defense grid key", "Bribe for getting someone out", "Abandoned alien tech", }, }, Places: roll.List{ Items: []string{ "Bridge of a blockading ship", "Defense installation control room", "Refugee camp", }, }, }, { Name: "Radioactive World", Desc: "Whether due to a legacy of atomic warfare unhindered by nuke snuffers or a simple profusion of radioactive elements, this world glows in the dark. Even heavy vacc suits can filter only so much of the radiation, and most natives suffer a wide variety of cancers, mutations and other illnesses without the protection of advanced medical treatments.", Enemies: roll.List{ Items: []string{ "Bitter mutant", "Relic warlord", "Desperate wouldbe escapee", }, }, Friends: roll.List{ Items: []string{ "Reckless prospector", "Offworld scavenger", "Biogenetic variety seeker", }, }, Complications: roll.List{ Items: []string{ "The radioactivity is steadily growing worse", "The planet’s medical resources break down", "The radioactivity has inexplicable effects on living creatures", "The radioactivity is the product of a malfunctioning pretech manufactory.", }, }, Things: roll.List{ Items: []string{ "Ancient atomic weaponry", "Pretech anti-radioactivity drugs", "Untainted water supply", }, }, Places: roll.List{ Items: []string{ "Mutant-infested ruins", "Scorched glass plain", "Wilderness of bizarre native life", "Glowing barrens", }, }, }, { Name: "Refugees", Desc: "The world teems with refugees, either exiles from another planet who managed to get here, or the human detritus of some local conflict that have fled to the remaining stable states. The natives usually regard the refugees with hostility, an attitude returned by many among their unwilling guests.", Enemies: roll.List{ Items: []string{ "Xenophobic native leader", "Refugee chief aspiring to seize the host nation", "Politician seeking to use the refugees as a weapon", }, }, Friends: roll.List{ Items: []string{ "Sympathetic refugee waif", "Local hard-pressed by refugee gangs", "Clergy seeking peace", }, }, Complications: roll.List{ Items: []string{ "The xenophobes are right that the refugees are taking over", "The refugees are right that the xenophobes want them out or dead", "Both are right", "Outside powers are using the refugees to destabilize an enemy government", "Refugee and local cultures are extremely incompatible", }, }, Things: roll.List{ Items: []string{ "Treasures brought out by fleeing refugees", "Citizenship papers", "Cache of vital refugee supplies", "Hidden arms for terrorists", }, }, Places: roll.List{ Items: []string{ "Hopeless refugee camp", "City swarming with confused strangers", "Festival full of angry locals", }, }, }, { Name: "<NAME>", Desc: "This world has the technological sophistication, natural resources, and determined polity necessary to be a regional hegemon for the sector. Nearby worlds are likely either directly subservient to it or tack carefully to avoid its anger. It may even be the capital of a small stellar empire.", Enemies: roll.List{ Items: []string{ "Ambitious general", "Colonial official", "Contemptuous noble", }, }, Friends: roll.List{ Items: []string{ "Diplomat", "Offworld ambassador", "Foreign spy", }, }, Complications: roll.List{ Items: []string{ "The hegemon’s influence is all that’s keeping a murderous war from breaking out on nearby worlds", "The hegemon is decaying and losing its control", "The government is riddled with spies", "The hegemon is genuinely benign", }, }, Things: roll.List{ Items: []string{ "Diplomatic carte blanche", "Deed to an offworld estate", "Foreign aid grant", }, }, Places: roll.List{ Items: []string{ "Palace or seat of government", "Salon teeming with spies", "Protest rally", "Military base", }, }, }, { Name: "Restrictive Laws", Desc: "A myriad of laws, customs, and rules constrain the inhabitants of this world, and even acts that are completely permissible elsewhere are punished severely here. The locals may provide lists of these laws to offworlders, but few non-natives can hope to master all the important intricacies.", Enemies: roll.List{ Items: []string{ "Law enforcement officer", "Outraged native", "Native lawyer specializing in peeling offworlders", "Paid snitch", }, }, Friends: roll.List{ Items: []string{ "Frustrated offworlder", "Repressed native", "Reforming crusader", }, }, Complications: roll.List{ Items: []string{ "The laws change regularly in patterns only natives understand", "The laws forbid some action vital to the party", "The laws forbid the simple existence of some party members", "The laws are secret to offworlders", }, }, Things: roll.List{ Items: []string{ "Complete legal codex", "Writ of diplomatic immunity", "Fine collection vault contents", }, }, Places: roll.List{ Items: []string{ "Courtroom", "Mob scene of outraged locals", "Legislative chamber", "Police station", }, }, }, { Name: "Revanchists", Desc: "The locals formerly owned another world, or a major nation on the planet formerly owned an additional region of land. Something happened to take away this control or drive out the former rulers, and they’ve never forgotten it. The locals are obsessed with reclaiming their lost lands, and will allow no questions of practicality to interfere with their cause.", Enemies: roll.List{ Items: []string{ "Demagogue whipping the locals on to a hopeless war", "Politician seeking to use the resentment for their own ends", "Local convinced the PCs are agents of the “thieving” power", "Refugee from the land bitterly demanding it be reclaimed", }, }, Friends: roll.List{ Items: []string{ "Realist local clergy seeking peace", "Politician trying to calm the public", "Third-party diplomat trying to stamp out the fire", }, }, Complications: roll.List{ Items: []string{ "The revanchists’ claim is completely just and reasonable", "The land is now occupied entirely by heirs of the conquerors", "Both sides have seized lands the other thinks are theirs", }, }, Things: roll.List{ Items: []string{ "Stock of vital resource produced by the taken land", "Relic carried out of it", "Proof that the land claim is justified or unjustified", }, }, Places: roll.List{ Items: []string{ "Memorial monument to the loss", "Cemetery of those who died in the conquest", "Public ceremony commemorating the disaster", }, }, }, { Name: "Revolutionaries", Desc: "The world is convulsed by one or more bands of revolutionaries, with some nations perhaps in the grip of a current revolution. Most of these upheavals can be expected only to change the general flavor of problems in the polity, but the process of getting there usually produces a tremendous amount of suffering.", Enemies: roll.List{ Items: []string{ "Blood-drenched revolutionary leader", "Blooddrenched secret police chief", "Hostile foreign agent seeking further turmoil", }, }, Friends: roll.List{ Items: []string{ "Sympathetic victim accused of revolutionary sympathies or government collaboration", "Revolutionary or state agent who now repents", "Agent of a neutral power that wants peace", }, }, Complications: roll.List{ Items: []string{ "The revolutionaries actually do seem likely to put in better rulers", "The revolutionaries are client groups that got out of hand", "The revolutionaries are clearly much worse than the government", "The revolutionaries have no real ideals beyond power and merely pretend to ideology", }, }, Things: roll.List{ Items: []string{ "List of secret revolutionary sympathizers", "Proof of rebel hypocrisy", "Confiscated wealth", }, }, Places: roll.List{ Items: []string{ "Festival that explodes into violence", "Heavily-fortified police station", "Revolutionary base hidden in the wilderness", }, }, }, { Name: "Rigid Culture", Desc: "The local culture is extremely rigid. Certain forms of behavior and belief are absolutely mandated, and any deviation from these principles is punished, or else society may be strongly stratified by birth with limited prospects for change. Anything which threatens the existing social order is feared and shunned.", Enemies: roll.List{ Items: []string{ "Rigid reactionary", "Wary ruler", "Regime ideologue", "Offended potentate", }, }, Friends: roll.List{ Items: []string{ "Revolutionary agitator", "Ambitious peasant", "Frustrated merchant", }, }, Complications: roll.List{ Items: []string{ "The cultural patterns are enforced by technological aids", "The culture is run by a secret cabal of manipulators", "The culture has explicit religious sanction", "The culture evolved due to important necessities that have since been forgotten", }, }, Things: roll.List{ Items: []string{ "Precious traditional regalia", "Peasant tribute", "Opulent treasures of the ruling class", }, }, Places: roll.List{ Items: []string{ "Time-worn palace", "Low-caste slums", "Bandit den", "Reformist temple", }, }, }, { Name: "<NAME>", Desc: "This world is not yet a dominant power in the sector, but it’s well on its way there. Whether through newly-blossoming economic, military, or cultural power, they’re extending their influence over their neighbors and forging new arrangements between their government and the rulers of nearby worlds.", Enemies: roll.List{ Items: []string{ "Jingoistic supremacist", "Official bent on glorious success", "Foreign agent saboteur", }, }, Friends: roll.List{ Items: []string{ "Friendly emissary to the benighted", "Hardscrabble local turned great success", "Foreign visitor seeking contacts or knowledge", }, }, Complications: roll.List{ Items: []string{ "They’re only strong because their neighbors have been weakened", "Their success is based on a fluke resource or pretech find", "They bitterly resent their neighbors as former oppressors", }, }, Things: roll.List{ Items: []string{ "Tribute shipment", "Factory or barracks emblematic of their power source", "Tech or data that will deal a blow to their rise", }, }, Places: roll.List{ Items: []string{ "Rustic town being hurled into prosperity", "Government building being expanded", "Starport struggling under the flow of new ships", }, }, }, { Name: "Ritual Combat", Desc: "The locals favor some form of stylized combat to resolve disputes, provide entertainment, or settle religious differences. This combat is probably not normally lethal unless it’s reserved for a specific disposable class of slaves or professionals. Some combat may involve mastery of esoteric weapons and complex arenas, while other forms might require nothing more than a declaration in the street and a drawn gun.", Enemies: roll.List{ Items: []string{ "Bloodthirsty local champion", "Ambitious gladiator stable owner", "Xenophobic master fighter", }, }, Friends: roll.List{ Items: []string{ "Peace-minded foreign missionary", "Temperate defender of the weak", "Local eager to learn of offworld fighting styles", }, }, Complications: roll.List{ Items: []string{ "The required weapons are strange pretech artifacts", "Certain classes are forbidden from fighting and require champions", "Loss doesn’t mean death but it does mean ritual scarring or property loss", }, }, Things: roll.List{ Items: []string{ "Magnificent weapon", "Secret book of martial techniques", "Token signifying immunity to ritual combat challenges", "Prize won in bloody battle", }, }, Places: roll.List{ Items: []string{ "Area full of cheering spectators", "Dusty street outside a saloon", "Memorial for fallen warriors", }, }, }, { Name: "Robots", Desc: "The world has a great many robots on it. Most bots are going to be non-sentient expert systems, though an AI with enough computing resources can control many bots at once, and some worlds may have developed VIs to a degree that individual bots can seem (or be) sentient. Some worlds might even be ruled by metal overlords, ones which do not need to be sentient so long as they have overwhelming force.", Enemies: roll.List{ Items: []string{ "Hostile robot master", "Robot greedy to seize offworld tech", "Robot fallen in love with the PC’s ship", "Oligarch whose factories build robots", }, }, Friends: roll.List{ Items: []string{ "Data-seeking robot", "Plucky young robot tech", "Local being pushed out of a job by robots", }, }, Complications: roll.List{ Items: []string{ "The robots are only partially controlled", "The robots are salvaged and originally meant for a much darker use", "The robots require a rare material that the locals fight over", "The robots require the planet’s specific infrastructure so cannot be exported", }, }, Things: roll.List{ Items: []string{ "Prototype robot", "Secret robot override codes", "Vast cache of robot-made goods", "Robot-destroying pretech weapon", }, }, Places: roll.List{ Items: []string{ "Humming robotic factory", "Stark robotic “barracks”", "House crowded with robot servants and only one human owner", }, }, }, { Name: "Seagoing Cities", Desc: "Either the world is entirely water or else the land is simply too dangerous for most humans. Human settlement on this world consists of a number of floating cities that follow the currents and the fish. These city-ships might have been purpose-built for their task, or they could be jury-rigged conglomerations of ships and structures thrown together when the need for seagoing life become apparent to the locals.", Enemies: roll.List{ Items: []string{ "Pirate city lord", "Mer-human raider chieftain", "Hostile landsman noble", "Enemy city saboteur", }, }, Friends: roll.List{ Items: []string{ "City navigator", "Scout captain", "Curious mer-human", "Hard-pressed ship-city engineer", }, }, Complications: roll.List{ Items: []string{ "The seas are not water", "The fish schools have vanished and the city faces starvation", "Terrible storms drive the city into the glacial regions", "Suicide ships ram the city’s hull", }, }, Things: roll.List{ Items: []string{ "Giant pearls with mysterious chemical properties", "Buried treasure", "Vital repair materials", }, }, Places: roll.List{ Items: []string{ "Bridge of the city", "Storm-tossed sea", "A bridge fashioned of many small boats.", }, }, }, { Name: "<NAME>", Desc: "Something on this planet has the potential to create enormous havoc for the inhabitants if it is not kept safely contained by its keepers. Whether a massive seismic fault line suppressed by pretech terraforming technology, a disease that has to be quarantined within hours of discovery, or an ancient alien relic that requires regular upkeep in order to prevent planetary catastrophe, the menace is a constant shadow on the fearful populace.", Enemies: roll.List{ Items: []string{ "Hostile outsider bent on freeing the menace", "Misguided fool who thinks he can use it", "Reckless researcher who thinks he can fix it", }, }, Friends: roll.List{ Items: []string{ "Keeper of the menace", "Student of its nature", "Victim of the menace", }, }, Complications: roll.List{ Items: []string{ "The menace would bring great wealth along with destruction", "The menace is intelligent", "The natives don’t all believe in the menace", }, }, Things: roll.List{ Items: []string{ "A key to unlock the menace", "A precious byproduct of the menace", "The secret of the menace’s true nature", }, }, Places: roll.List{ Items: []string{ "Guarded fortress containing the menace", "Monitoring station", "Scene of a prior outbreak of the menace", }, }, }, { Name: "<NAME>", Desc: "The world is actually run by a hidden cabal, acting through their catspaws in the visible government. For one reason or another, this group finds it imperative that they not be identified by outsiders, and in some cases even the planet’s own government may not realize that they’re actually being manipulated by hidden masters.", Enemies: roll.List{ Items: []string{ "An agent of the cabal", "Government official who wants no questions asked", "Willfully blinded local", }, }, Friends: roll.List{ Items: []string{ "Paranoid conspiracy theorist", "Machiavellian gamesman within the cabal", "Interstellar investigator", }, }, Complications: roll.List{ Items: []string{ "The secret masters have a benign reason for wanting secrecy", "The cabal fights openly amongst itself", "The cabal is recruiting new members", }, }, Things: roll.List{ Items: []string{ "A dossier of secrets on a government official", "A briefcase of unmarked credit notes", "The identity of a cabal member", }, }, Places: roll.List{ Items: []string{ "Smoke-filled room", "Shadowy alleyway", "Secret underground bunker", }, }, }, { Name: "Sectarians", Desc: "The world is torn by violent disagreement between sectarians of a particular faith. Each views the other as a damnable heresy in need of extirpation. Local government may be able to keep open war from breaking out, but the poisonous hatred divides communities. The nature of the faith may be religious, or it may be based on some secular ideology.", Enemies: roll.List{ Items: []string{ "Paranoid believer", "Native convinced the party is working for the other side", "Absolutist ruler", }, }, Friends: roll.List{ Items: []string{ "Reformist clergy", "Local peacekeeping official", "Offworld missionary", "Exhausted ruler", }, }, Complications: roll.List{ Items: []string{ "The conflict has more than two sides", "The sectarians hate each other for multiple reasons", "The sectarians must cooperate or else life on this world is imperiled", "The sectarians hate outsiders more than they hate each other", "The differences in sects are incomprehensible to an outsider", }, }, Things: roll.List{ Items: []string{ "Ancient holy book", "Incontrovertible proof", "Offering to a local holy man", }, }, Places: roll.List{ Items: []string{ "Sectarian battlefield", "Crusading temple", "Philosopher’s salon", "Bitterly divided village", }, }, }, { Name: "Seismic Instability", Desc: "The local land masses are remarkably unstable, and regular earthquakes rack the surface. Local construction is either advanced enough to sway and move with the vibrations or primitive enough that it is easily rebuilt. Severe volcanic activity may be part of the instability.", Enemies: roll.List{ Items: []string{ "Earthquake cultist", "Hermit seismologist", "Burrowing native life form", "Earthquake-inducing saboteur", }, }, Friends: roll.List{ Items: []string{ "Experimental construction firm owner", "Adventurous volcanologist", "Geothermal prospector", }, }, Complications: roll.List{ Items: []string{ "The earthquakes are caused by malfunctioning pretech terraformers", "They’re caused by alien technology", "They’re restrained by alien technology that is being plundered by offworlders", "The earthquakes are used to generate enormous amounts of energy.", }, }, Things: roll.List{ Items: []string{ "Earthquake generator", "Earthquake suppressor", "Mineral formed at the core of the world", "Earthquake-proof building schematics", }, }, Places: roll.List{ Items: []string{ "Volcanic caldera", "Village during an earthquake", "Mud slide", "Earthquake opening superheated steam fissures", }, }, }, { Name: "<NAME>", Desc: "This world is being systematically contained by an outside power. Some ancient autonomous defense grid, robot law enforcement, alien artifact, or other force is preventing the locals from developing certain technology, or using certain devices, or perhaps from developing interstellar flight. This limit may or may not apply to offworlders; in the former case, the PCs may have to figure out a way to beat the shackles simply to escape the world.", Enemies: roll.List{ Items: []string{ "Passionless jailer-AI", "Paranoid military grid AI", "Robot overlord", "Enigmatic alien master", }, }, Friends: roll.List{ Items: []string{ "Struggling local researcher", "Offworlder trapped here", "Scientist with a plan to break the chains", }, }, Complications: roll.List{ Items: []string{ "The shackles come off for certain brief windows of time", "The locals think the shackles are imposed by God", "An outside power greatly profits from the shackles", "The rulers are exempt from the shackles", }, }, Things: roll.List{ Items: []string{ "Keycode to bypass the shackle", "Tech shielded from the shackle", "Exportable version of the shackle that can affect other worlds", }, }, Places: roll.List{ Items: []string{ "Grim high-tech control center", "Factory full of workaround tech", "Temple to the power or entity that imposed the shackle", }, }, }, { Name: "<NAME>", Desc: "The world’s dominant society has lost faith in itself. Whether through some all-consuming war, great catastrophe, overwhelming outside culture, or religious collapse, the natives no longer believe in their old values, and search desperately for something new. Fierce conflict often exists between the last believers in the old dispensation and the nihilistic or searching disciples of the new age.", Enemies: roll.List{ Items: []string{ "Zealot who blames outsiders for the decay", "Nihilistic warlord", "Offworlder looking to exploit the local despair", }, }, Friends: roll.List{ Items: []string{ "Struggling messenger of a new way", "Valiant paragon of a fading tradition", "Local going through the motions of serving a now-irrelevant role", }, }, Complications: roll.List{ Items: []string{ "A massive war discredited all the old values", "Outside powers are working to erode societal confidence for their own benefit", "A local power is profiting greatly from the despair", "The old ways were meant to aid survival on this world and their passing is causing many new woes", }, }, Things: roll.List{ Items: []string{ "Relic that would inspire a renaissance", "Art that would inspire new ideas", "Priceless artifact of a now-scorned belief", }, }, Places: roll.List{ Items: []string{ "Empty temple", "Crowded den of obliviating vice", "Smoky hall full of frantic speakers", }, }, }, { Name: "<NAME>", Desc: "Some extremely important resource is exported from this world and this world alone. It’s unlikely that the substance is critical for building spike drives unless this world is also the first to begin interstellar flight, but it may be critical to other high-tech processes or devices. The locals make a large amount of money off this trade and control of it is of critical importance to the planet’s rulers, and potentially to outside powers.", Enemies: roll.List{ Items: []string{ "Resource oligarch", "Ruthless smuggler", "Resource-controlling warlord", "Foreign agent seeking to subvert local government", }, }, Friends: roll.List{ Items: []string{ "Doughty resource miner", "Researcher trying to synthesize the stuff", "Small-scale resource producer", "Harried starport trade overseer", }, }, Complications: roll.List{ Items: []string{ "The substance is slow poison to process", "The substance is created by hostile alien natives", "The substance is very easy to smuggle in usable amounts", "Only the natives have the genes or tech to extract it effectively", }, }, Things: roll.List{ Items: []string{ "Cache of processed resource", "Trade permit to buy a load of it", "A shipment of nigh-undetectably fake substance", }, }, Places: roll.List{ Items: []string{ "Bustling resource extraction site", "Opulent palace built with resource money", "Lazy town square where everyone lives on resource payments", }, }, }, { Name: "<NAME>", Desc: "The natives here produce something that is both fabulously valuable and strictly forbidden elsewhere in the sector. It may be a lethally addictive drug, forbidden gengineering tech, vat-grown “perfect slaves”, or a useful substance that can only be made through excruciating human suffering. This treasure is freely traded on the world, but bringing it elsewhere is usually an invitation to a long prison stay or worse.", Enemies: roll.List{ Items: []string{ "Maker of a vile commodity", "Smuggler for a powerful offworlder", "Depraved offworlder here for “fun”", "Local warlord who controls the treasure", }, }, Friends: roll.List{ Items: []string{ "Reformer seeking to end its use", "Innovator trying to repurpose the treasure in innocent ways", "Wretched addict unwillingly prey to the treasure", }, }, Complications: roll.List{ Items: []string{ "The treasure is extremely hard to smuggle", "Its use visibly marks a user", "The natives consider it for their personal use only", }, }, Things: roll.List{ Items: []string{ "Load of the forbidden good", "Smuggling tech that could hide the good perfectly", "Blackmail data on offworld buyers of the good", }, }, Places: roll.List{ Items: []string{ "Den where the good is used", "Market selling the good to locals and a few outsiders", "Factory or processing area where the good is created", }, }, }, { Name: "<NAME>", Desc: "This world was marginal for human habitation when it was discovered, but the Mandate or the early government put in pretech terraforming engines to correct its more extreme qualities. The terraforming did not entirely work, either failing of its own or suffering the destruction of the engines during the Silence. The natives are only partly adapted to the world’s current state, and struggle with the environment.", Enemies: roll.List{ Items: []string{ "Brutal ruler who cares only for their people", "Offworlder trying to loot the damaged engines", "Warlord trying to seize limited habitable land", }, }, Friends: roll.List{ Items: []string{ "Local trying to fix the engines", "Offworlder student of the engines", "World-wise native survivor", }, }, Complications: roll.List{ Items: []string{ "The engines produced too much of something instead of too little", "The engines were hijacked by aliens with different preferences", "It was discovered that an Earth-like environment would eventually cause a catastrophic disaster", }, }, Things: roll.List{ Items: []string{ "Parts to repair or restore the engines", "Lootable pretech fragments", "Valuable local tech devised to cope with the world", }, }, Places: roll.List{ Items: []string{ "Zone of tolerable gravity or temperature", "Native settlement built to cope with the environment", "Massive ruined terraforming engine", }, }, }, { Name: "Theocracy", Desc: "The planet is ruled by the priesthood of the predominant religion or ideology. The rest of the locals may or may not be terribly pious, but the clergy have the necessary military strength, popular support or control of resources to maintain their rule. Alternative faiths or incompatible ideologies are likely to be both illegal and socially unacceptable.", Enemies: roll.List{ Items: []string{ "Decadent priest-ruler", "Zealous inquisitor", "Relentless proselytizer", "True Believer", }, }, Friends: roll.List{ Items: []string{ "Heretic", "Offworld theologian", "Atheistic merchant", "Desperate commoner", }, }, Complications: roll.List{ Items: []string{ "The theocracy actually works well", "The theocracy is decadent and hated by the common folk", "The theocracy is divided into mutually hostile sects", "The theocracy is led by aliens", }, }, Things: roll.List{ Items: []string{ "Precious holy text", "Martyr’s bones", "Secret church records", "Ancient church treasures", }, }, Places: roll.List{ Items: []string{ "Glorious temple", "Austere monastery", "Academy for ideological indoctrination", "Decadent pleasure-cathedral", }, }, }, { Name: "Tomb World", Desc: "Tomb worlds are planets that were once inhabited by humans before the Silence. The sudden collapse of the jump gate network and the inability to bring in the massive food supplies required by the planet resulted in starvation, warfare, and death. Most tomb worlds are naturally hostile to human habitation and could not raise sufficient crops to maintain life. The few hydroponic facilities were usually destroyed in the fighting, and all that is left now are ruins, bones, and silence.", Enemies: roll.List{ Items: []string{ "Demented survivor tribe chieftain", "Avaricious scavenger", "Automated defense system", "Native predator", }, }, Friends: roll.List{ Items: []string{ "Scavenger Fleet captain", "Archaeologist", "Salvaging historian", "Xenophilic native survivor", }, }, Complications: roll.List{ Items: []string{ "The ruins are full of booby-traps left by the final inhabitants", "The world’s atmosphere quickly degrades anything in an opened building", "A handful of desperate natives survived the Silence", "The structures are unstable and collapsing", }, }, Things: roll.List{ Items: []string{ "Lost pretech equipment", "Tech caches", "Stores of unused munitions", "Ancient historical data", }, }, Places: roll.List{ Items: []string{ "Crumbling hive-city", "City square carpeted in bones", "Ruined hydroponic facility", "Cannibal tribe’s lair", "Dead orbital jump gate", }, }, }, { Name: "<NAME>", Desc: "This world is a major crossroads for local interstellar trade. It is well-positioned at the nexus of several short-drill trade routes, and has facilities for easy transfer of valuable cargoes and the fueling and repairing of starships. The natives are accustomed to outsiders, and a polyglot mass of people from every nearby world can be found trading here.", Enemies: roll.List{ Items: []string{ "Cheating merchant", "Thieving dockworker", "Commercial spy", "Corrupt customs official", }, }, Friends: roll.List{ Items: []string{ "Rich tourist", "Hardscrabble free trader", "Merchant prince in need of catspaws", "Friendly spaceport urchin", }, }, Complications: roll.List{ Items: []string{ "An outworlder faction schemes to seize the trade hub", "Saboteurs seek to blow up a rival’s warehouses", "Enemies are blockading the trade routes", "Pirates lace the hub with spies", }, }, Things: roll.List{ Items: []string{ "Voucher for a warehouse’s contents", "Insider trading information", "Case of precious offworld pharmaceuticals", "Box of legitimate tax stamps indicating customs dues have been paid.", }, }, Places: roll.List{ Items: []string{ "Raucous bazaar", "Elegant restaurant", "Spaceport teeming with activity", "Foggy street lined with warehouses", }, }, }, { Name: "Tyranny", Desc: "The local government is brutal and indifferent to the will of the people. Laws may or may not exist, but the only one that matters is the whim of the rulers on any given day. Their minions swagger through the streets while the common folk live in terror of their appetites. The only people who stay wealthy are friends and servants of the ruling class.", Enemies: roll.List{ Items: []string{ "Debauched autocrat", "Sneering bully-boy", "Soulless government official", "Occupying army officer", }, }, Friends: roll.List{ Items: []string{ "Conspiring rebel", "Oppressed merchant", "Desperate peasant", "Inspiring religious leader", }, }, Complications: roll.List{ Items: []string{ "The tyrant rules with vastly superior technology", "The tyrant is a figurehead for a cabal of powerful men and women", "The people are resigned to their suffering", "The tyrant is hostile to “meddlesome outworlders”.", }, }, Things: roll.List{ Items: []string{ "Plundered wealth", "Beautiful toys of the elite", "Regalia of rulership", }, }, Places: roll.List{ Items: []string{ "Impoverished village", "Protest rally massacre", "Decadent palace", "Religious hospital for the indigent", }, }, }, { Name: "<NAME>", Desc: "Artificial intelligences are costly and difficult to create, requiring a careful sequence of “growth stages” in order to bring them to sentience before artificial limits on cognition speed and learning development are installed. These “brakes” prevent runaway cognition metastasis. This world has an “unbraked AI” on it, probably with a witting or unwitting corps of servants. Unbraked AIs are quite insane, but they learn and reason with a speed impossible for humans, and can demonstrate a truly distressing subtlety.", Enemies: roll.List{ Items: []string{ "AI Cultist", "Maltech researcher", "Government official dependent on the AI", }, }, Friends: roll.List{ Items: []string{ "Perimeter agent", "AI researcher", "Braked AI", }, }, Complications: roll.List{ Items: []string{ "The AI’s presence is unknown to the locals", "The locals depend on the AI for some vital service", "The AI appears to be harmless", "The AI has fixated on the group’s ship’s computer", "The AI wants transport offworld", }, }, Things: roll.List{ Items: []string{ "The room-sized AI core itself", "Maltech research files", "Perfectly tabulated blackmail on government officials", "Pretech computer circuitry", }, }, Places: roll.List{ Items: []string{ "Municipal computing banks", "Cult compound", "Repair center", "Ancient hardcopy library", }, }, }, { Name: "Urbanized Surface", Desc: "The world’s land area is covered with buildings that extend downward for multiple levels. Such worlds either have a population in the trillions, extremely little land area, or are largely-abandoned due to some past catastrophe. Agriculture and resource extraction are part of the urban complex, and there may be an advanced maintenance system that may not be entirely under the control of present natives.", Enemies: roll.List{ Items: []string{ "Maintenance AI that hates outsiders", "Tyrant of a habitation block", "Deep-dwelling prophet who considers “the sky” a blasphemy to be quelled", }, }, Friends: roll.List{ Items: []string{ "Local yearning for wild spaces", "Grubby urchin of the underlevels", "Harried engineer trying to maintain ancient works", "Grizzled hab cop", }, }, Complications: roll.List{ Items: []string{ "The urban blocks are needed to survive the environment", "The blocks were part of an ancient device of world-spanning size", "The blocks require constant maintenance to avoid dangerous types of decay", }, }, Things: roll.List{ Items: []string{ "Massively efficient power source", "Map of the secret ways of a zone", "Passkey into restricted hab block areas", }, }, Places: roll.List{ Items: []string{ "Giant hab block now devoid of inhabitants", "Chemical-reeking underway", "Seawater mine full of salt and massive flowing channels", }, }, }, { Name: "Utopia", Desc: "Natural and social conditions on this world have made it a paradise for its inhabitants, a genuine utopia of happiness and fulfillment. This is normally the result of drastic human engineering, including brain-gelding, neurochemical control, personality curbs, or complete “humanity” redefinitions. Even so, the natives are extremely happy with their lot, and may wish to extend that joy to poor, sad outsiders.", Enemies: roll.List{ Items: []string{ "Compassionate neurotherapist", "Proselytizing native missionary to outsiders", "Brutal tyrant who rules through inexorable happiness", }, }, Friends: roll.List{ Items: []string{ "Deranged malcontent", "Bloody-handed guerrilla leader of a rebellion of madmen", "Outsider trying to find a way to reverse the utopian changes", }, }, Complications: roll.List{ Items: []string{ "The natives really are deeply and contentedly happy with their altered lot", "The utopia produces something that attracts others", "The utopia works on converting outsiders through persuasion and generosity", "The utopia involves some sacrifice that’s horrifying to non-members", }, }, Things: roll.List{ Items: []string{ "Portable device that applies the utopian change", "Plans for a device that would destroy the utopia", "Goods created joyfully by the locals", }, }, Places: roll.List{ Items: []string{ "Plaza full of altered humans", "Social ritual site", "Secret office where “normal” humans rule", }, }, }, { Name: "Warlords", Desc: "The world is plagued by warlords. Numerous powerful men and women control private armies sufficiently strong to cow whatever local government may exist. On the lands they claim, their word is law. Most spend their time oppressing their own subjects and murderously pillaging those of their neighbors. Most like to wrap themselves in the mantle of ideology, religious fervor, or an ostensibly legitimate right to rule.", Enemies: roll.List{ Items: []string{ "Warlord", "Avaricious lieutenant", "Expensive assassin", "Aspiring minion", }, }, Friends: roll.List{ Items: []string{ "Vengeful commoner", "Government military officer", "Humanitarian aid official", "Village priest", }, }, Complications: roll.List{ Items: []string{ "The warlords are willing to cooperate to fight mutual threats", "The warlords favor specific religions or races over others", "The warlords are using substantially more sophisticated tech than others", "Some of the warlords are better rulers than the government", }, }, Things: roll.List{ Items: []string{ "Weapons cache", "Buried plunder", "A warlord’s personal battle harness", "Captured merchant shipping", }, }, Places: roll.List{ Items: []string{ "Gory battlefield", "Burnt-out village", "Barbaric warlord palace", "Squalid refugee camp", }, }, }, { Name: "Xenophiles", Desc: "The natives of this world are fast friends with a particular alien race. The aliens may have saved the planet at some point in the past, or awed the locals with superior tech or impressive cultural qualities. The aliens might even be the ruling class on the planet.", Enemies: roll.List{ Items: []string{ "Offworld xenophobe", "Suspicious alien leader", "Xenocultural imperialist", }, }, Friends: roll.List{ Items: []string{ "Benevolent alien", "Native malcontent", "Gone-native offworlder", }, }, Complications: roll.List{ Items: []string{ "The enthusiasm is due to alien psionics or tech", "The enthusiasm is based on a lie", "The aliens strongly dislike their “groupies”", "The aliens feel obliged to rule humanity for its own good", "Humans badly misunderstand the aliens", }, }, Things: roll.List{ Items: []string{ "Hybrid alien-human tech", "Exotic alien crafts", "Sophisticated xenolinguistic and xenocultural research data", }, }, Places: roll.List{ Items: []string{ "Alien district", "Alien-influenced human home", "Cultural festival celebrating alien artist", }, }, }, { Name: "Xenophobes", Desc: "The natives are intensely averse to dealings with outworlders. Whether through cultural revulsion, fear of tech contamination, or a genuine immunodeficiency, the locals shun foreigners from offworld and refuse to have anything to do with them beyond the bare necessities of contact. Trade may or may not exist on this world, but if it does, it is almost certainly conducted by a caste of untouchables and outcasts.", Enemies: roll.List{ Items: []string{ "Revulsed local ruler", "Native convinced some wrong was done to him", "Cynical demagogue", }, }, Friends: roll.List{ Items: []string{ "Curious native", "Exiled former ruler", "Local desperately seeking outworlder help", }, }, Complications: roll.List{ Items: []string{ "The natives are symptomless carriers of a contagious and dangerous disease", "The natives are exceptionally vulnerable to offworld diseases", "The natives require elaborate purification rituals after speaking to an offworlder or touching them", "The local ruler has forbidden any mercantile dealings with outworlders", }, }, Things: roll.List{ Items: []string{ "Jealously-guarded precious relic", "Local product under export ban", "Esoteric local technology", }, }, Places: roll.List{ Items: []string{ "Sealed treaty port", "Public ritual not open to outsiders", "Outcaste slum home", }, }, }, { Name: "Zombies", Desc: "This menace may not take the form of shambling corpses, but some disease, alien artifact, or crazed local practice produces men and women with habits similar to those of murderous cannibal undead. These outbreaks may be regular elements in local society, either provoked by some malevolent creators or the consequence of some local condition.", Enemies: roll.List{ Items: []string{ "Soulless maltech biotechnology cult", "Sinister governmental agent", "Crazed zombie cultist", }, }, Friends: roll.List{ Items: []string{ "Survivor of an outbreak", "Doctor searching for a cure", "Rebel against the secret malefactors", }, }, Complications: roll.List{ Items: []string{ "The zombies retain human intelligence", "The zombies can be cured", "The process is voluntary among devotees", "The condition is infectious", }, }, Things: roll.List{ Items: []string{ "Cure for the condition", "Alien artifact that causes it", "Details of the cult’s conversion process", }, }, Places: roll.List{ Items: []string{ "House with boarded-up windows", "Dead city", "Fortified bunker that was overrun from within", }, }, }, }
content/world.go
0.661376
0.406567
world.go
starcoder
package main import ( rl "github.com/gen2brain/raylib-go/raylib" "math" ) const ZOOM_FACTOR = 1.2 func NewGolState(world World, rules Rules) GoLState { goLState := GoLState{ updateEnabled: false, updateTickPeriod: 10, updateTicks: 0, camera2d: rl.Camera2D{}, world: world, rules: rules, targetBackup: rl.Vector2{}, dragInfo: DragInfo{ enabled: false, started: false, done: false, startPosition: rl.Vector2{}, currentPosition: rl.Vector2{}, }, } goLState.camera2d.Target.X = 0 goLState.camera2d.Target.Y = 0 goLState.camera2d.Zoom = 10 return goLState } func (state *GoLState) UpdateWorld() { (state.world).Update(state.rules) } type DragInfo struct { enabled bool started bool done bool startPosition rl.Vector2 currentPosition rl.Vector2 } type GoLState struct { updateTickPeriod int32 updateTicks int32 camera2d rl.Camera2D updateEnabled bool world World targetBackup rl.Vector2 rules Rules dragInfo DragInfo } func (state *GoLState) ToggleWorldUpdate() { state.updateEnabled = !state.updateEnabled } func (state *GoLState) IncreaseUpdateSpeed() { state.updateTickPeriod -= 5 if state.updateTickPeriod <= 0 { state.updateTickPeriod = 1 } } func (state *GoLState) DecreaseUpdateSpeed() { state.updateTickPeriod += 5 if state.updateTickPeriod >= 60 { state.updateTickPeriod = 60 } } func (state *GoLState) UpdateCameraOffset() { state.camera2d.Offset.X = float32(rl.GetScreenWidth()) * 0.5 state.camera2d.Offset.Y = float32(rl.GetScreenHeight()) * 0.5 } func (state *GoLState) HandleMouse() { state.HandleMouseZoom() state.HandleMouseClick() state.HandleMouseDrag() } func (state *GoLState) HandleInputs() { state.HandleKeys() state.HandleMouse() } func (state *GoLState) HandleMouseZoom() { wheelMove := rl.GetMouseWheelMove() if wheelMove == 0 { return } mousePosition := rl.GetScreenToWorld2D(rl.GetMousePosition(), state.camera2d) zoomFactor := float32(math.Pow(ZOOM_FACTOR, float64(wheelMove))) state.camera2d.Zoom *= zoomFactor state.camera2d.Target.X -= (mousePosition.X - state.camera2d.Target.X) * (1 - zoomFactor) state.camera2d.Target.Y -= (mousePosition.Y - state.camera2d.Target.Y) * (1 - zoomFactor) } func (state *GoLState) HandleMouseDrag() { state.dragInfo.Update(rl.MouseMiddleButton) if state.dragInfo.started { state.targetBackup = state.camera2d.Target } if state.dragInfo.enabled { start := rl.GetScreenToWorld2D(state.dragInfo.startPosition, state.camera2d) current := rl.GetScreenToWorld2D(state.dragInfo.currentPosition, state.camera2d) state.camera2d.Target.X = state.targetBackup.X - current.X + start.X state.camera2d.Target.Y = state.targetBackup.Y - current.Y + start.Y } } func (state *GoLState) HandleKeys() { if rl.IsKeyReleased(rl.KeySpace) { state.ToggleWorldUpdate() } if rl.IsKeyReleased(rl.KeyUp) { state.IncreaseUpdateSpeed() } if rl.IsKeyReleased(rl.KeyDown) { state.DecreaseUpdateSpeed() } if rl.IsKeyPressed(rl.KeyHome) { state.ResetZoom() } } func (state *GoLState) Tick() { if state.updateEnabled { state.updateTicks++ if state.updateTicks >= state.updateTickPeriod { state.UpdateWorld() state.updateTicks = 0 } } } func (state *GoLState) Draw() { state.world.Draw() } func (state *GoLState) HandleMouseClick() { leftDown := rl.IsMouseButtonDown(rl.MouseLeftButton) rightDown := rl.IsMouseButtonDown(rl.MouseRightButton) if leftDown == rightDown { return } position := rl.GetScreenToWorld2D(rl.GetMousePosition(), state.camera2d) x := int32(position.X) y := int32(position.Y) state.world.SetState(x, y, leftDown) } func (state *GoLState) ResetZoom() { state.camera2d.Zoom = state.world.EstimateZoom(rl.GetScreenWidth(), rl.GetScreenHeight()) } func (dragInfo *DragInfo) Update(button int32) { dragInfo.started = rl.IsMouseButtonPressed(button) dragInfo.done = rl.IsMouseButtonReleased(button) dragInfo.enabled = rl.IsMouseButtonDown(button) if dragInfo.started { dragInfo.startPosition = rl.GetMousePosition() } if dragInfo.enabled { dragInfo.currentPosition = rl.GetMousePosition() } }
golstate.go
0.532425
0.444384
golstate.go
starcoder
package utils import ( "math/big" "github.com/daoleno/uniswapv3-sdk/constants" ) var MaxFee = new(big.Int).Exp(big.NewInt(10), big.NewInt(6), nil) func ComputeSwapStep(sqrtRatioCurrentX96, sqrtRatioTargetX96, liquidity, amountRemaining *big.Int, feePips constants.FeeAmount) (sqrtRatioNextX96, amountIn, amountOut, feeAmount *big.Int, err error) { zeroForOne := sqrtRatioCurrentX96.Cmp(sqrtRatioTargetX96) >= 0 exactIn := amountRemaining.Cmp(constants.Zero) >= 0 if exactIn { amountRemainingLessFee := new(big.Int).Div(new(big.Int).Mul(amountRemaining, new(big.Int).Sub(MaxFee, big.NewInt(int64(feePips)))), MaxFee) if zeroForOne { amountIn = GetAmount0Delta(sqrtRatioTargetX96, sqrtRatioCurrentX96, liquidity, true) } else { amountIn = GetAmount1Delta(sqrtRatioCurrentX96, sqrtRatioTargetX96, liquidity, true) } if amountRemainingLessFee.Cmp(amountIn) >= 0 { sqrtRatioNextX96 = sqrtRatioTargetX96 } else { sqrtRatioNextX96, err = GetNextSqrtPriceFromInput(sqrtRatioCurrentX96, liquidity, amountRemainingLessFee, zeroForOne) if err != nil { return } } } else { if zeroForOne { amountOut = GetAmount1Delta(sqrtRatioTargetX96, sqrtRatioCurrentX96, liquidity, false) } else { amountOut = GetAmount0Delta(sqrtRatioCurrentX96, sqrtRatioTargetX96, liquidity, false) } if new(big.Int).Mul(amountRemaining, constants.NegativeOne).Cmp(amountOut) >= 0 { sqrtRatioNextX96 = sqrtRatioTargetX96 } else { sqrtRatioNextX96, err = GetNextSqrtPriceFromOutput(sqrtRatioCurrentX96, liquidity, new(big.Int).Mul(amountRemaining, constants.NegativeOne), zeroForOne) if err != nil { return } } } max := sqrtRatioTargetX96.Cmp(sqrtRatioNextX96) == 0 if zeroForOne { if !(max && exactIn) { amountIn = GetAmount0Delta(sqrtRatioNextX96, sqrtRatioCurrentX96, liquidity, true) } if !(max && !exactIn) { amountOut = GetAmount1Delta(sqrtRatioNextX96, sqrtRatioCurrentX96, liquidity, false) } } else { if !(max && exactIn) { amountIn = GetAmount1Delta(sqrtRatioCurrentX96, sqrtRatioNextX96, liquidity, true) } if !(max && !exactIn) { amountOut = GetAmount0Delta(sqrtRatioCurrentX96, sqrtRatioNextX96, liquidity, false) } } if !exactIn && amountOut.Cmp(new(big.Int).Mul(amountRemaining, constants.NegativeOne)) > 0 { amountOut = new(big.Int).Mul(amountRemaining, constants.NegativeOne) } if exactIn && sqrtRatioNextX96.Cmp(sqrtRatioTargetX96) != 0 { // we didn't reach the target, so take the remainder of the maximum input as fee feeAmount = new(big.Int).Sub(amountRemaining, amountIn) } else { feeAmount = MulDivRoundingUp(amountIn, big.NewInt(int64(feePips)), new(big.Int).Sub(MaxFee, big.NewInt(int64(feePips)))) } return }
utils/swap_math.go
0.521959
0.522141
swap_math.go
starcoder
package grid /* A grid is an array of bools which indicate whether or not a fox can be there or not. */ type Grid struct { Values []bool Checks []map[int]bool } /* Function for generating a new grid with the appropriate preallocated space. */ func CreateBlankGrid() *Grid { values := make([]bool, len(BaseGrid.Connections)) return &Grid{ Values: values, } } /* Function for copying a grid */ func (grid *Grid) Copy() *Grid { newGrid := CreateBlankGrid() for i, value := range grid.Values { newGrid.Values[i] = value } newChecks := make([]map[int]bool, len(grid.Checks)) for i, value := range grid.Checks { newChecks[i] = value } newGrid.Checks = newChecks return newGrid } /* Function for propogating a grid */ func (grid *Grid) Propogate() *Grid { newGrid := CreateBlankGrid() for i, value := range grid.Values { for _, j := range BaseGrid.Connections[i] { newGrid.Values[j] = newGrid.Values[j] || value } } newChecks := make([]map[int]bool, len(grid.Checks)) for i, value := range grid.Checks { newChecks[i] = value } newGrid.Checks = newChecks return newGrid } func (grid *Grid) PropogateWithChecksAndAdd(checks map[int]bool) *Grid { newGrid := grid.PropgateWithChecks(checks) newGrid.AddChecks(checks) return newGrid } func (grid *Grid) AddChecks(checks map[int]bool) { grid.Checks = append(grid.Checks, checks) } func (grid *Grid) PropgateWithChecks(checks map[int]bool) *Grid { newGrid := CreateBlankGrid() for i, value := range grid.Values { checkedValue := checks[i] for _, j := range BaseGrid.Connections[i] { propogatedValue := !checkedValue && value newGrid.Values[j] = newGrid.Values[j] || propogatedValue } } newChecks := make([]map[int]bool, len(grid.Checks)) for i, value := range grid.Checks { newChecks[i] = value } newGrid.Checks = newChecks return newGrid } /* Determines what needs to be checked to remove the possiblity of a certain tile appearing in the next propogation. Returns an array of the indexes which need to be checked to perform that removal. */ func (grid *Grid) HowToRemove(checks map[int]bool) []map[int]bool { howToRemove := []map[int]bool{} for i := 0; i < len(grid.Values); i++ { howToRemove = append(howToRemove, map[int]bool{}) } for i, value := range grid.Values { // Don't need to worry if the grid doesn't have this value as a possibility if !value { continue } for _, conn := range BaseGrid.Connections[i] { howToRemove[conn][i] = true } } return howToRemove } /* Determine how many trues are currently in the array */ func (grid *Grid) NFoxes() int { count := 0 for _, value := range grid.Values { if value { count++ } } return count } // Used for doing powers way faste var POWERS = map[int]int{} func init() { for i := 0; i < 1024; i++ { POWERS[i] = power2(i) } } // Helper function for large int powers func power2(p int) int { n := 1 for i := 0; i < p; i++ { n *= 2 } return n } /* Create a hash for the grid */ func (grid *Grid) Hash() int { // Because of all the symmetries, only grab the lowest hash lowestHash := -1 lowestHashFirstIndex := len(BaseGrid.Symmetries[0]) // Loop through each of the valid symettric configurations configurationLoop: for _, configuration := range BaseGrid.Symmetries { // Construct this has in the order of the symmetry configuration hash := 0 firstIndex := -1 for power, i := range configuration { if firstIndex == -1 && i > lowestHashFirstIndex { continue configurationLoop } if grid.Values[i] { if firstIndex == -1 { firstIndex = i } hash += POWERS[power] } } // Replace the lowest hash if applicable if lowestHash == -1 { lowestHash = hash } else if hash < lowestHash { lowestHash = hash } } return lowestHash } /* Check if a grid is equal to another grid */ func (grid *Grid) Equal(other *Grid) bool { for i, gridValue := range grid.Values { otherValue := other.Values[i] if gridValue != otherValue { return false } } return true }
grid/grid.go
0.776708
0.487002
grid.go
starcoder
package arrowutil import ( "fmt" "github.com/influxdata/flux/array" ) // CompareFunc defines the interface for a comparison function. // The comparison function should return 0 for equivalent values, // -1 if x[i] is before y[j], and +1 if x[i] is after y[j]. type CompareFunc func(x, y array.Array, i, j int) int // Compare will compare two values in the various arrays. // The result will be 0 if x[i] == y[j], -1 if x[i] < y[j], and +1 if x[i] > y[j]. // A null value is always less than every non-null value. func Compare(x, y array.Array, i, j int) int { switch x := x.(type) { case *array.Int: return IntCompare(x, y.(*array.Int), i, j) case *array.Uint: return UintCompare(x, y.(*array.Uint), i, j) case *array.Float: return FloatCompare(x, y.(*array.Float), i, j) case *array.Boolean: return BooleanCompare(x, y.(*array.Boolean), i, j) case *array.String: return StringCompare(x, y.(*array.String), i, j) default: panic(fmt.Errorf("unsupported array data type: %s", x.DataType())) } } // CompareDesc will compare two values in the various arrays. // The result will be 0 if x[i] == y[j], -1 if x[i] > y[j], and +1 if x[i] < y[j]. // A null value is always greater than every non-null value. func CompareDesc(x, y array.Array, i, j int) int { switch x := x.(type) { case *array.Int: return IntCompareDesc(x, y.(*array.Int), i, j) case *array.Uint: return UintCompareDesc(x, y.(*array.Uint), i, j) case *array.Float: return FloatCompareDesc(x, y.(*array.Float), i, j) case *array.Boolean: return BooleanCompareDesc(x, y.(*array.Boolean), i, j) case *array.String: return StringCompareDesc(x, y.(*array.String), i, j) default: panic(fmt.Errorf("unsupported array data type: %s", x.DataType())) } } func IntCompare(x, y *array.Int, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l < r { return -1 } else if l == r { return 0 } return 1 } func IntCompareDesc(x, y *array.Int, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l > r { return -1 } else if l == r { return 0 } return 1 } func UintCompare(x, y *array.Uint, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l < r { return -1 } else if l == r { return 0 } return 1 } func UintCompareDesc(x, y *array.Uint, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l > r { return -1 } else if l == r { return 0 } return 1 } func FloatCompare(x, y *array.Float, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l < r { return -1 } else if l == r { return 0 } return 1 } func FloatCompareDesc(x, y *array.Float, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l > r { return -1 } else if l == r { return 0 } return 1 } func BooleanCompare(x, y *array.Boolean, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if x.Value(i) { if y.Value(j) { return 0 } return 1 } else if y.Value(j) { return -1 } return 0 } func BooleanCompareDesc(x, y *array.Boolean, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if x.Value(i) { if y.Value(j) { return 0 } return -1 } else if y.Value(j) { return 1 } return 0 } func StringCompare(x, y *array.String, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l < r { return -1 } else if l == r { return 0 } return 1 } func StringCompareDesc(x, y *array.String, i, j int) int { if x.IsNull(i) { if y.IsNull(j) { return 0 } return -1 } else if y.IsNull(j) { return 1 } if l, r := x.Value(i), y.Value(j); l > r { return -1 } else if l == r { return 0 } return 1 }
internal/arrowutil/compare.gen.go
0.60743
0.663587
compare.gen.go
starcoder
package agent import ( "encoding/json" "fmt" "math" "github.com/stellentus/cartpoles/lib/logger" "github.com/stellentus/cartpoles/lib/rlglue" ) type handControllerSettings struct { EnableDebug bool `json:"enable-debug"` // PlanDuration is the number of actions that should be taken before looking at state again, minimum 1. // - 2 gives optimal behavior, insensitive to 'Threshold'. // - 0.2s is an appropriate human reaction time, which could also be used as a the time it takes for a human to // - change plans, even though that's not necessarily the same number.) // - This would correspond to a plan duration of reaction_time/tau = 0.2/0.02 = 10. PlanDuration int `json:"plan-duration"` // Threshold is a parameter between 0 and 1 to control behavior. Threshold float64 `json:"Threshold"` // FailAngle is the angle at which the environment terminates the episode. // It's imported in degrees but converted to radians. FailAngle float64 `json:"fail-degrees"` // FailPosition is the position at which the environment terminates the episode, whether it's positive or negative. FailPosition float64 `json:"fail-position"` } type HandController struct { logger.Debug handControllerSettings actions []int actionIndex int } func init() { Add("hand-controller", NewHandController) } func NewHandController(logger logger.Debug) (rlglue.Agent, error) { return &HandController{Debug: logger}, nil } // Initialize configures the agent with the provided parameters and resets any internal state. func (agent *HandController) Initialize(run uint, expAttr, envAttr rlglue.Attributes) error { // Set defaults, which will be overridden if the JSON contains different values. agent.handControllerSettings = handControllerSettings{ PlanDuration: 2, Threshold: 0.9, FailAngle: 15, FailPosition: 2.4, } err := json.Unmarshal(expAttr, &agent.handControllerSettings) if err != nil { agent.Message("warning", "agent.HandController settings weren't available: "+err.Error()) } if agent.EnableDebug { agent.Message("msg", "agent.HandController Initialize", "plan-duration", agent.PlanDuration, "Threshold", agent.Threshold, "fail-degrees", agent.FailAngle, "fail-position", agent.FailPosition, ) } agent.FailAngle /= 180 * math.Pi // convert degrees to radians agent.actions = make([]int, agent.PlanDuration) agent.Message("hand-controller settings", fmt.Sprintf("%+v", agent.handControllerSettings)) return nil } // Start provides an initial observation to the agent and returns the agent's action. func (agent *HandController) Start(state rlglue.State) rlglue.Action { if agent.EnableDebug { agent.Message("msg", "episode start") } agent.actions = make([]int, agent.PlanDuration) return agent.chooseAction(state) } // Step provides a new observation and a reward to the agent and returns the agent's next action. func (agent *HandController) Step(state rlglue.State, reward float64) rlglue.Action { return agent.chooseAction(state) } // End informs the agent that a terminal state has been reached, providing the final reward. func (agent *HandController) End(state rlglue.State, reward float64) { if agent.EnableDebug { agent.Message("msg", "end", "state", state, "reward", reward) } } // chooseAction allows the controller to act on a much slower timestep. (During each time period, // it can act with a specified ratio of left/right actions.) It also tiles into large tiles. Based // on the current tile, choose a pre-set action or action series and follow it for a while. // Then see which tile I'm in and make a new choice. func (agent *HandController) chooseAction(state rlglue.State) rlglue.Action { if agent.actionIndex >= agent.PlanDuration { agent.selectActions(state) } action := agent.actions[agent.actionIndex] agent.actionIndex++ return rlglue.Action(action) } // createActionSeries creates a predetermined series of actions for the next actions_per_step steps. // `level` should be a number between 0 and 1. It's the average action value for this time period. func (agent *HandController) createActionSeries(level float64) { // We expect after `actions_per_step` steps, the sum of actions should be `level*actions_per_step`. // So at each step, we decide which action will keep the average level closest to `level`. sm := 0 for i := range agent.actions { target_sum := level * float64(i) // By this time, the sum should be as close as possible to this value. // If the current sum is within 0.5 of the target, action is 0. Otherwise, the sum is too low and we need to increase it. next_action := 0 if float64(sm)+0.5 < target_sum { next_action = 1 } sm += next_action agent.actions[i] = next_action } agent.actionIndex = 0 } // Same as createActionSeries, but the input ranges from -1 to 1. func (agent *HandController) scaledCreateActionSeries(scaled_level float64) { agent.createActionSeries((scaled_level + 1) / 2) } // selectActions chooses the next action series based on the current state. func (agent *HandController) selectActions(state rlglue.State) { angle := state[2] // This code will try to keep the angle balanced, but ignores the position condition. // I think it still usually fails to keep the pole up for more than 2–3s. // Respond in proportion to how far we've tilted if math.Abs(angle) > agent.Threshold*agent.FailAngle { // Just do a maximum movement in the same direction agent.scaledCreateActionSeries(angle / math.Abs(angle)) } else { // Just do a proportional movement in the same direction agent.scaledCreateActionSeries(angle / agent.FailAngle) } }
lib/agent/hand.go
0.73431
0.451447
hand.go
starcoder
package jsondiff import ( "encoding/json" "fmt" "github.com/clarify/subtest" "github.com/nsf/jsondiff" ) // EqualJSON returns a check functions that fails if the the test value is not // equivalent to the expected JSON, or if either value is not valid JSON. func EqualJSON(expect string) subtest.CheckFunc { return func(got interface{}) error { d, s := compareJSON([]byte(expect), got) switch d { case jsondiff.BothArgsAreInvalidJson: fallthrough case jsondiff.SecondArgIsInvalidJson: return subtest.FailGot("expect value is invalid JSON", expect) case jsondiff.FirstArgIsInvalidJson: return subtest.FailGot("test value is invalid JSON", got) case jsondiff.FullMatch: return nil default: return fmt.Errorf("unequal JSON:\n %s", s) } } } // SupersetOfJSON returns a check functions that fails if the test value is not // a superset of expect. func SupersetOfJSON(expect string) subtest.CheckFunc { return func(got interface{}) error { d, s := compareJSON(got, []byte(expect)) switch d { case jsondiff.BothArgsAreInvalidJson: fallthrough case jsondiff.SecondArgIsInvalidJson: return subtest.FailGot("expect value is invalid JSON", expect) case jsondiff.FirstArgIsInvalidJson: return subtest.FailGot("test value is invalid JSON", got) case jsondiff.FullMatch, jsondiff.SupersetMatch: return nil default: return fmt.Errorf("test value is not superset of expect:\n %s", s) } } } // SubsetOfJSON returns a check functions that fails if the test value is not a // subset of expect. func SubsetOfJSON(expect string) subtest.CheckFunc { return func(got interface{}) error { d, s := compareJSON([]byte(expect), got) switch d { case jsondiff.BothArgsAreInvalidJson: fallthrough case jsondiff.FirstArgIsInvalidJson: return subtest.FailGot("expect value is invalid JSON", got) case jsondiff.SecondArgIsInvalidJson: return subtest.FailGot("test value is invalid JSON", expect) case jsondiff.FullMatch, jsondiff.SupersetMatch: return nil default: return fmt.Errorf("test value is not subset of expect:\n %s", s) } } } func compareJSON(a, b interface{}) (jsondiff.Difference, string) { var ab, bb []byte switch at := a.(type) { case []byte: ab = at case json.RawMessage: ab = []byte(at) case string: ab = []byte(at) default: var err error ab, err = json.Marshal(at) if err != nil { return jsondiff.FirstArgIsInvalidJson, "" } } switch bt := b.(type) { case []byte: bb = bt case json.RawMessage: bb = []byte(bt) case string: bb = []byte(bt) default: var err error bb, err = json.Marshal(bt) if err != nil { return jsondiff.SecondArgIsInvalidJson, "" } } return jsondiff.Compare(ab, bb, &cfg.jsonDiff) }
examples/jsondiff/chek.go
0.744656
0.44565
chek.go
starcoder
package model type Series struct { Type SeriesType `json:"type"` CoordinateSystem CoordinateSystem `json:"coordinateSystem"` Name string `json:"name"` ItemStyle *ItemStyle `json:"itemStyle,omitempty"` // Mark point in a chart. MarkPoint *MarkPoints `json:"markPoint,omitempty"` // Use a line in the chart to illustrate. MarkLine *MarkLines `json:"markLine,omitempty"` // For Line // The style of label line Smooth bool `json:"smooth,omitempty"` LineStyle *LineStyle `json:"lineStyle,omitempty"` Encode *Encode `json:"encode,omitempty"` AreaStyle *AreaStyle `json:"areaStyle,omitempty"` Stack string `json:"stack,omitempty"` // Highlight style of the graphic. Emphasis *Emphasis `json:"emphasis,omitempty"` // Text label of , to explain some data information about graphic item like value, name and so on Label *Label `json:"label,omitempty"` // Index of x axis to combine with, which is useful for multiple x axes in one chart. XAxisIndex int `json:"xAxisIndex,omitempty"` // Index of y axis to combine with, which is useful for multiple y axes in one chart. YAxisIndex int `json:"yAxisIndex,omitempty"` } type SeriesType string const ( SeriesTypeLine SeriesType = "line" SeriesTypeBar SeriesType = "bar" SeriesTypePie SeriesType = "pie" SeriesTypeScatter SeriesType = "scatter" SeriesTypeEffectScatter SeriesType = "effectScatter" SeriesTypeRadar SeriesType = "radar" SeriesTypeTree SeriesType = "tree" SeriesTypeTreemap SeriesType = "treemap" SeriesTypeSunburst SeriesType = "sunburst" SeriesTypeBoxplot SeriesType = "boxplot" SeriesTypeCandlestick SeriesType = "candlestick" SeriesTypeHeatmap SeriesType = "heatmap" SeriesTypeMap SeriesType = "map" SeriesTypeParallel SeriesType = "parallel" SeriesTypeLines SeriesType = "lines" SeriesTypeGraph SeriesType = "graph" SeriesTypeSankey SeriesType = "sankey" SeriesTypeFunnel SeriesType = "funnel" SeriesTypeGauge SeriesType = "gauge" SeriesTypePictorialBar SeriesType = "pictorialBar" SeriesTypeThemeRiver SeriesType = "themeRiver" SeriesTypeCustom SeriesType = "custom" ) type CoordinateSystem string const ( CoordinateSystemCartesian2d CoordinateSystem = "cartesian2d" CoordinateSystemPolar CoordinateSystem = "polar" ) type Encode struct { // These properties only work in cartesian(grid) coordinate system: X []string `json:"x,omitempty"` Y []string `json:"y,omitempty"` // These properties only work in polar coordinate system Radius []string `json:"radius,omitempty"` Angle []string `json:"angle,omitempty"` // These properties only work in geo coordinate system Lng []string `json:"lng,omitempty"` Lat []string `json:"lat,omitempty"` // For some type of series that are not in any coordinate system, like 'pie', 'funnel' etc.: Value []string `json:"value,omitempty"` } type Emphasis struct { // Whether to scale to highlight the data in emphasis state. Scale bool `json:"scale,omitempty"` // When the data is highlighted, whether to fade out of other data to focus the highlighted Focus Focus `json:"focus,omitempty"` // The range of fade out when focus is enabled. Support the following configurations BlurScope BlurScope `json:"blurScope,omitempty"` } type Focus string const ( // Do not fade out other data, it's by default. FocusNone Focus = "none" // Only focus (not fade out) the element of the currently highlighted data. FocusSelf Focus = "self" // Focus on all elements of the series which the currently highlighted data belongs to. FocusSeries Focus = "series" ) type BlurScope string const ( BlurScopeCoordinateSystem BlurScope = "coordinateSystem" BlurScopeSeries BlurScope = "series" BlurScopeGlobal BlurScope = "global" ) type Label struct { Show bool `json:"show"` Position Position `json:"position"` }
model/series.go
0.807726
0.425367
series.go
starcoder
package epd4in2 import ( "image/color" "machine" "time" ) type Config struct { Width int16 // Width is the display resolution Height int16 LogicalWidth int16 // LogicalWidth must be a multiple of 8 and same size or bigger than Width Rotation Rotation // Rotation is clock-wise } type Device struct { bus machine.SPI cs machine.Pin dc machine.Pin rst machine.Pin busy machine.Pin logicalWidth int16 width int16 height int16 buffer []uint8 bufferLength uint32 rotation Rotation } type Rotation uint8 // New returns a new epd4in2 driver. Pass in a fully configured SPI bus. func New(bus machine.SPI, csPin, dcPin, rstPin, busyPin machine.Pin) Device { csPin.Configure(machine.PinConfig{Mode: machine.PinOutput}) dcPin.Configure(machine.PinConfig{Mode: machine.PinOutput}) rstPin.Configure(machine.PinConfig{Mode: machine.PinOutput}) busyPin.Configure(machine.PinConfig{Mode: machine.PinInput}) return Device{ bus: bus, cs: csPin, dc: dcPin, rst: rstPin, busy: busyPin, } } // Configure sets up the device. func (d *Device) Configure(cfg Config) { if cfg.LogicalWidth != 0 { d.logicalWidth = cfg.LogicalWidth } else { d.logicalWidth = EPD_WIDTH } if cfg.Width != 0 { d.width = cfg.Width } else { d.width = EPD_WIDTH } if cfg.Height != 0 { d.height = cfg.Height } else { d.height = EPD_HEIGHT } d.rotation = cfg.Rotation d.bufferLength = (uint32(d.logicalWidth) * uint32(d.height)) / 8 d.buffer = make([]uint8, d.bufferLength) for i := uint32(0); i < d.bufferLength; i++ { d.buffer[i] = 0xFF } d.cs.Low() d.dc.Low() d.rst.Low() d.Reset() d.SendCommand(POWER_SETTING) d.SendData(0x03) // VDS_EN, VDG_EN d.SendData(0x00) // VCOM_HV, VGHL_LV[1], VGHL_LV[0] d.SendData(0x2b) // VDH d.SendData(0x2b) // VDL d.SendData(0xff) // VDHR d.SendCommand(BOOSTER_SOFT_START) d.SendData(0x17) d.SendData(0x17) d.SendData(0x17) //07 0f 17 1f 27 2F 37 2f d.SendCommand(POWER_ON) d.WaitUntilIdle() d.SendCommand(PANEL_SETTING) d.SendData(0xbf) // KW-BF KWR-AF BWROTP 0f d.SendData(0x0b) d.SendCommand(PLL_CONTROL) d.SendData(0x3c) // 3A 100HZ 29 150Hz 39 200HZ 31 171HZ } // Reset resets the device func (d *Device) Reset() { d.rst.Low() time.Sleep(200 * time.Millisecond) d.rst.High() time.Sleep(200 * time.Millisecond) } // DeepSleep puts the display into deepsleep func (d *Device) DeepSleep() { d.SendCommand(VCOM_AND_DATA_INTERVAL_SETTING) d.SendData(0x17) //border floating d.SendCommand(VCM_DC_SETTING) //VCOM to 0V d.SendCommand(PANEL_SETTING) time.Sleep(100 * time.Millisecond) d.SendCommand(POWER_SETTING) //VG&VS to 0V fast d.SendData(0x00) d.SendData(0x00) d.SendData(0x00) d.SendData(0x00) d.SendData(0x00) time.Sleep(100 * time.Millisecond) d.SendCommand(POWER_OFF) //power off d.WaitUntilIdle() d.SendCommand(DEEP_SLEEP) //deep sleep d.SendData(0xA5) } // SendCommand sends a command to the display func (d *Device) SendCommand(command uint8) { d.sendDataCommand(true, command) } // SendData sends a data byte to the display func (d *Device) SendData(data uint8) { d.sendDataCommand(false, data) } // sendDataCommand sends image data or a command to the screen func (d *Device) sendDataCommand(isCommand bool, data uint8) { if isCommand { d.dc.Low() } else { d.dc.High() } d.cs.Low() d.bus.Transfer(data) d.cs.High() } // SetLUT sets the look up tables for full or partial updates func (d *Device) SetLUT() { lut_vcom0 := []uint8{ 0x00, 0x17, 0x00, 0x00, 0x00, 0x02, 0x00, 0x17, 0x17, 0x00, 0x00, 0x02, 0x00, 0x0A, 0x01, 0x00, 0x00, 0x01, 0x00, 0x0E, 0x0E, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 44 bytes, unlike the others } lut_ww := []uint8{ 0x40, 0x17, 0x00, 0x00, 0x00, 0x02, 0x90, 0x17, 0x17, 0x00, 0x00, 0x02, 0x40, 0x0A, 0x01, 0x00, 0x00, 0x01, 0xA0, 0x0E, 0x0E, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, } lut_bw := []uint8{ 0x40, 0x17, 0x00, 0x00, 0x00, 0x02, 0x90, 0x17, 0x17, 0x00, 0x00, 0x02, 0x40, 0x0A, 0x01, 0x00, 0x00, 0x01, 0xA0, 0x0E, 0x0E, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, } lut_bb := []uint8{ 0x80, 0x17, 0x00, 0x00, 0x00, 0x02, 0x90, 0x17, 0x17, 0x00, 0x00, 0x02, 0x80, 0x0A, 0x01, 0x00, 0x00, 0x01, 0x50, 0x0E, 0x0E, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, } lut_wb := []uint8{ 0x80, 0x17, 0x00, 0x00, 0x00, 0x02, 0x90, 0x17, 0x17, 0x00, 0x00, 0x02, 0x80, 0x0A, 0x01, 0x00, 0x00, 0x01, 0x50, 0x0E, 0x0E, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, } d.SendCommand(LUT_FOR_VCOM) //vcom for count := 0; count < 44; count++ { d.SendData(lut_vcom0[count]) } d.SendCommand(LUT_WHITE_TO_WHITE) //ww -- for count := 0; count < 42; count++ { d.SendData(lut_ww[count]) } d.SendCommand(LUT_BLACK_TO_WHITE) //bw r for count := 0; count < 42; count++ { d.SendData(lut_bw[count]) } d.SendCommand(LUT_WHITE_TO_BLACK) //wb w for count := 0; count < 42; count++ { d.SendData(lut_bb[count]) } d.SendCommand(LUT_BLACK_TO_BLACK) //bb b for count := 0; count < 42; count++ { d.SendData(lut_wb[count]) } } // SetPixel modifies the internal buffer in a single pixel. // The display have 2 colors: black and white // We use RGBA(0,0,0, 255) as white (transparent) // Anything else as black func (d *Device) SetPixel(x int16, y int16, c color.RGBA) { x, y = d.xy(x, y) if x < 0 || x >= d.logicalWidth || y < 0 || y >= d.height { return } byteIndex := (uint32(x) + uint32(y)*uint32(d.logicalWidth)) / 8 if c.R == 0 && c.G == 0 && c.B == 0 { // TRANSPARENT / WHITE d.buffer[byteIndex] |= 0x80 >> uint8(x%8) } else { // WHITE / EMPTY d.buffer[byteIndex] &^= 0x80 >> uint8(x%8) } } // Display sends the buffer to the screen. func (d *Device) Display() error { d.SendCommand(RESOLUTION_SETTING) d.SendData(uint8(d.height >> 8)) d.SendData(uint8(d.logicalWidth & 0xff)) d.SendData(uint8(d.height >> 8)) d.SendData(uint8(d.height & 0xff)) d.SendCommand(VCM_DC_SETTING) d.SendData(0x12) d.SendCommand(VCOM_AND_DATA_INTERVAL_SETTING) d.SendCommand(0x97) //VBDF 17|D7 VBDW 97 VBDB 57 VBDF F7 VBDW 77 VBDB 37 VBDR B7 d.SendCommand(DATA_START_TRANSMISSION_1) var i int16 for i = 0; i < d.logicalWidth/8*d.height; i++ { d.SendData(0xFF) // bit set: white, bit reset: black } time.Sleep(2 * time.Millisecond) d.SendCommand(DATA_START_TRANSMISSION_2) for i = 0; i < d.logicalWidth/8*d.height; i++ { d.SendData(d.buffer[i]) } time.Sleep(2 * time.Millisecond) d.SetLUT() d.SendCommand(DISPLAY_REFRESH) time.Sleep(100 * time.Millisecond) d.WaitUntilIdle() return nil } // ClearDisplay erases the device SRAM func (d *Device) ClearDisplay() { d.SendCommand(RESOLUTION_SETTING) d.SendData(uint8(d.height >> 8)) d.SendData(uint8(d.logicalWidth & 0xff)) d.SendData(uint8(d.height >> 8)) d.SendData(uint8(d.height & 0xff)) d.SendCommand(DATA_START_TRANSMISSION_1) time.Sleep(2 * time.Millisecond) var i int16 for i = 0; i < d.logicalWidth/8*d.height; i++ { d.SendData(0xFF) } time.Sleep(2 * time.Millisecond) d.SendCommand(DATA_START_TRANSMISSION_2) time.Sleep(2 * time.Millisecond) for i = 0; i < d.logicalWidth/8*d.height; i++ { d.SendData(0xFF) } time.Sleep(2 * time.Millisecond) d.SetLUT() d.SendCommand(DISPLAY_REFRESH) time.Sleep(100 * time.Millisecond) d.WaitUntilIdle() } // WaitUntilIdle waits until the display is ready func (d *Device) WaitUntilIdle() { for d.busy.Get() { time.Sleep(100 * time.Millisecond) } } // IsBusy returns the busy status of the display func (d *Device) IsBusy() bool { return d.busy.Get() } // ClearBuffer sets the buffer to 0xFF (white) func (d *Device) ClearBuffer() { for i := uint32(0); i < d.bufferLength; i++ { d.buffer[i] = 0xFF } } // Size returns the current size of the display. func (d *Device) Size() (w, h int16) { if d.rotation == ROTATION_90 || d.rotation == ROTATION_270 { return d.height, d.logicalWidth } return d.logicalWidth, d.height } // SetRotation changes the rotation (clock-wise) of the device func (d *Device) SetRotation(rotation Rotation) { d.rotation = rotation } // xy chages the coordinates according to the rotation func (d *Device) xy(x, y int16) (int16, int16) { switch d.rotation { case NO_ROTATION: return x, y case ROTATION_90: return d.width - y - 1, x case ROTATION_180: return d.width - x - 1, d.height - y - 1 case ROTATION_270: return y, d.height - x - 1 } return x, y }
waveshare-epd/epd4in2/epd4in2.go
0.597021
0.462837
epd4in2.go
starcoder
package generator import ( "crypto/rand" "fmt" ) type sequenceValueGenerator struct { valueStorage ValueStorage rangeStep uint64 sequences map[string]*SequenceRange rangeStepsToReserve map[string]uint64 } func NewSequenceValueGenerator(valueStorage ValueStorage, rangeStep uint64) ValueGenerator { sequences := make(map[string]*SequenceRange) rangeStepsToReserve := make(map[string]uint64) return &sequenceValueGenerator{ valueStorage, rangeStep, sequences, rangeStepsToReserve, } } func (valueGenerator *sequenceValueGenerator) ReserveRange(sequence string, count uint64) { if count > valueGenerator.rangeStep { ratio := float64(count) / float64(valueGenerator.rangeStep) upperEdge := (uint64(ratio) + 1) * valueGenerator.rangeStep valueGenerator.rangeStepsToReserve[sequence] = upperEdge } } func (valueGenerator *sequenceValueGenerator) GetNextValue(sequence string) string { if valueGenerator.sequences[sequence] == nil { valueGenerator.updateRangeForSequence(sequence) } currentValue := &valueGenerator.sequences[sequence].CurrentValue *currentValue++ if *currentValue > valueGenerator.sequences[sequence].MaxValue { currentValue = valueGenerator.updateRangeAndGetNextValueForSequence(sequence) } sequentialPart := valueGenerator.formatSequentialPart(currentValue) randomPart := valueGenerator.generateRandomPart(8) return randomPart + "-" + sequentialPart } func (valueGenerator *sequenceValueGenerator) updateRangeForSequence(sequence string) *SequenceRange { var rangeStep uint64 if valueGenerator.rangeStepsToReserve[sequence] > 0 { rangeStep = valueGenerator.rangeStepsToReserve[sequence] delete(valueGenerator.rangeStepsToReserve, sequence) } else { rangeStep = valueGenerator.rangeStep } nextRange := valueGenerator.valueStorage.GetNextRangeForSequence(sequence, rangeStep) valueGenerator.sequences[sequence] = &nextRange return &nextRange } func (valueGenerator *sequenceValueGenerator) updateRangeAndGetNextValueForSequence(sequence string) *uint64 { delete(valueGenerator.sequences, sequence) nextRange := valueGenerator.updateRangeForSequence(sequence) return &nextRange.CurrentValue } func (valueGenerator *sequenceValueGenerator) formatSequentialPart(currentValue *uint64) string { lowerSequentialPartHex := fmt.Sprintf("%012x", *currentValue&0x0000FFFFFFFFFFFF) higherSequentialPartHex := fmt.Sprintf("%04x", *currentValue>>48&0x000000000000FFFF) sequentialPartHex := higherSequentialPartHex + "-" + lowerSequentialPartHex return sequentialPartHex } func (valueGenerator *sequenceValueGenerator) generateRandomPart(randomBytesCount int) string { randomBytes := make([]byte, randomBytesCount) _, err := rand.Read(randomBytes) if err != nil { panic(err) } randomHex := "" for i := 0; i < randomBytesCount; i++ { if i == 4 || i == 6 { randomHex += "-" } if i == 6 { // uuid version character special value = 0 randomHex += fmt.Sprintf("0%01x", randomBytes[i]&0x0F) } else { randomHex += fmt.Sprintf("%02x", randomBytes[i]) } } return randomHex }
generator/SequenceValueGenerator.go
0.699152
0.527499
SequenceValueGenerator.go
starcoder
package money import "errors" // Money stores amount and currency value type Money struct { amount *Amount currency *Currency } var calc = &calculator{} const ( greaterThanCheckResult = 1 equalCheckResult = 0 lessThanCheckResult = -1 ) // New creates and returns a new Money instance func New(amount int64, currency *Currency) *Money { return &Money{ amount: &Amount{val: amount}, currency: currency, } } // Currency returns the currency used by Money func (m *Money) Currency() *Currency { return m.currency } // Amount returns the amount value as int64 func (m *Money) Amount() *Amount { return m.amount } // Add returns new Money struct with value representing sum of Self and Other Money func (m *Money) Add(money *Money) (*Money, error) { if err := m.assertSameCurrency(money); err != nil { return nil, err } return &Money{amount: calc.add(m.Amount(), money.Amount()), currency: m.currency}, nil } // Subtract returns new Money struct with value representing difference of Self and Other Money func (m *Money) Subtract(money *Money) (*Money, error) { if err := m.assertSameCurrency(money); err != nil { return nil, err } return &Money{amount: calc.subtract(m.Amount(), money.Amount()), currency: m.currency}, nil } // Multiply returns new Money struct with value representing Self multiplied value by multiplier func (m *Money) Multiply(mul int64) *Money { return &Money{amount: calc.multiply(m.Amount(), mul), currency: m.currency} } // Allocate returns slice of Money structs with split Self value in given ratios. // It lets split money by given ratios without losing pennies and as Split operations distributes // leftover pennies amongst the parties with round-robin principle. func (m *Money) Allocate(rs ...int) ([]*Money, error) { if len(rs) == 0 { return nil, errors.New("No ratios specified") } // Calculate total of ratios var total int for _, r := range rs { total += r } var remainder = m.Amount().Value() var ms []*Money for _, r := range rs { m := &Money{ amount: calc.allocate(m.Amount(), r, total), currency: m.currency, } ms = append(ms, m) remainder -= m.Amount().Value() } for i := 0; i < int(remainder); i++ { ms[i] = &Money{ amount: calc.add(ms[i].Amount(), &Amount{1}), currency: ms[i].Currency(), } } return ms, nil } // Equals checkes equality between two Money instances func (m *Money) Equals(money *Money) (bool, error) { if err := m.assertSameCurrency(money); err != nil { return false, err } return m.compare(money) == 0, nil } // GreaterThan checks whether the value of Money is greater than the other func (m *Money) GreaterThan(money *Money) (bool, error) { if err := m.assertSameCurrency(money); err != nil { return false, err } return m.compare(money) == greaterThanCheckResult, nil } // GreaterThanOrEqual checks whether the value of Money is greater or equal than the other func (m *Money) GreaterThanOrEqual(money *Money) (bool, error) { if err := m.assertSameCurrency(money); err != nil { return false, err } return m.compare(money) >= equalCheckResult, nil } // LessThan checks whether the value of Money is less than the other func (m *Money) LessThan(money *Money) (bool, error) { if err := m.assertSameCurrency(money); err != nil { return false, err } return m.compare(money) == lessThanCheckResult, nil } // LessThanOrEqual checks whether the value of Money is less or equal than the other func (m *Money) LessThanOrEqual(money *Money) (bool, error) { if err := m.assertSameCurrency(money); err != nil { return false, err } return m.compare(money) <= equalCheckResult, nil } // Display returns a formatted amount string for the current currency func (m *Money) Display() string { return m.currency.Format(m.Amount().Value()) } func (m *Money) assertSameCurrency(money *Money) error { if !m.currency.equals(money.currency) { return errors.New("Currency don't match") } return nil } func (m *Money) compare(money *Money) int { if m.Amount().Value() > money.Amount().Value() { return greaterThanCheckResult } if m.Amount().Value() < money.Amount().Value() { return lessThanCheckResult } return equalCheckResult }
money.go
0.846006
0.506836
money.go
starcoder
package decoder import ( "aoc-2021-day16/packet" "aoc-2021-day16/packet/lentype" "aoc-2021-day16/packet/pkgtype" "errors" "fmt" "github.com/dropbox/godropbox/container/bitvector" ) func Decode(vector *bitvector.BitVector) (packet.Packet, *bitvector.BitVector, error) { h, err := GetHeader(vector) if err != nil { return nil, nil, err } if pkgtype.IsLiteral(h.TypeID) { packet := parseLiteral(h, vector) return packet, vector, nil } else { packet, err := parseOp(h, vector) if err != nil { return nil, nil, err } return packet, vector, nil } } func parseOp(h packet.Header, v *bitvector.BitVector) (packet.Packet, error) { lenType, err := GetLen(v) if err != nil { return nil, err } root := packet.OpPacket{ Header: h, Len: lenType, Subpackets: nil, } var subpackets []packet.Packet switch { case lentype.IsLenInBits(lenType.ID): subpackets, err = parseWithLenInBits(lenType.Value, v) if err != nil { return nil, err } case lentype.IsNumOfSubpackets(lenType.ID): subpackets, err = parseWithLenInPackets(lenType.Value, v) if err != nil { return nil, err } default: return nil, fmt.Errorf("unknown lentype: %v", lenType.ID) } root.Subpackets = subpackets return root, nil } func parseWithLenInPackets(maxReadLen int, v *bitvector.BitVector) ([]packet.Packet, error) { subpackets := make([]packet.Packet, 0) readPackets := 0 for maxReadLen > readPackets { packet, vector, err := Decode(v) if err != nil { return nil, err } else { v = vector subpackets = append(subpackets, packet) } readPackets++ } if maxReadLen < readPackets { return nil, errors.New("read more packets than required, something went wrong") } return subpackets, nil } func parseWithLenInBits(maxReadLen int, v *bitvector.BitVector) ([]packet.Packet, error) { subpackets := make([]packet.Packet, 0) beforeReadLen := v.Length() for maxReadLen > beforeReadLen-v.Length() { packet, vector, err := Decode(v) if err != nil { return nil, err } else { v = vector subpackets = append(subpackets, packet) } } if maxReadLen < beforeReadLen-v.Length() { return nil, errors.New("read more bits than required, something went wrong") } return subpackets, nil } func GetLen(v *bitvector.BitVector) (packet.Len, error) { id := getFirstBits(v, 1) deleteFirstBits(v, 1) if lentype.IsLenInBits(lentype.ID(id)) { bits := getFirstBits(v, lentype.BitsForLenInBits) deleteFirstBits(v, lentype.BitsForLenInBits) return packet.Len{ ID: lentype.ID(id), Value: bits, }, nil } if lentype.IsNumOfSubpackets(lentype.ID(id)) { bits := getFirstBits(v, lentype.BitsNumOfSubpackets) deleteFirstBits(v, lentype.BitsNumOfSubpackets) return packet.Len{ ID: lentype.ID(id), Value: bits, }, nil } return packet.Len{}, fmt.Errorf("invalid lentype.ID: %v", id) } func parseLiteral(h packet.Header, v *bitvector.BitVector) packet.Packet { value := 0 for bit := 1; bit != 0; { bit = getFirstBits(v, 1) deleteFirstBits(v, 1) value = value << 4 num := getFirstBits(v, 4) deleteFirstBits(v, 4) value += num } return packet.LVPacket{ Header: h, Value: value, } } func GetHeader(vector *bitvector.BitVector) (packet.Header, error) { if vector.Length() < 6 { return packet.Header{}, ErrCantParseHeaderEOF } version := getFirstBits(vector, 3) deleteFirstBits(vector, 3) typeID := getFirstBits(vector, 3) deleteFirstBits(vector, 3) return packet.Header{ Version: version, TypeID: pkgtype.ID(typeID), }, nil } func getFirstBits(vector *bitvector.BitVector, count int) int { num := 0 for i := 0; i < count; i++ { num = num << 1 num += int(vector.Element(i)) } return num } func deleteFirstBits(vector *bitvector.BitVector, count int) { for i := 0; i < count; i++ { vector.Delete(0) } } var ErrCantParseHeaderEOF = errors.New("cant parse header: EOF")
2021/day16/decoder/decoder.go
0.563858
0.419351
decoder.go
starcoder
package iso20022 // Chain of parties involved in the settlement of a transaction, including receipts and deliveries, book transfers, treasury deals, or other activities, resulting in the movement of a security or amount of money from one account to another. type SettlementParties49 struct { // First party in the settlement chain. In a plain vanilla settlement, it is the Central Securities Depository where the counterparty requests to receive the financial instrument or from where the counterparty delivers the financial instruments. Depository *PartyIdentification108 `xml:"Dpstry,omitempty"` // Party that, in a settlement chain interacts with the depository. Party1 *PartyIdentificationAndAccount146 `xml:"Pty1,omitempty"` // Party that, in a settlement chain interacts with the party 1. Party2 *PartyIdentificationAndAccount146 `xml:"Pty2,omitempty"` // Party that, in a settlement chain interacts with the party 2. Party3 *PartyIdentificationAndAccount146 `xml:"Pty3,omitempty"` // Party that, in a settlement chain interacts with the party 3. Party4 *PartyIdentificationAndAccount146 `xml:"Pty4,omitempty"` // Party that, in a settlement chain interacts with the party 4. Party5 *PartyIdentificationAndAccount146 `xml:"Pty5,omitempty"` } func (s *SettlementParties49) AddDepository() *PartyIdentification108 { s.Depository = new(PartyIdentification108) return s.Depository } func (s *SettlementParties49) AddParty1() *PartyIdentificationAndAccount146 { s.Party1 = new(PartyIdentificationAndAccount146) return s.Party1 } func (s *SettlementParties49) AddParty2() *PartyIdentificationAndAccount146 { s.Party2 = new(PartyIdentificationAndAccount146) return s.Party2 } func (s *SettlementParties49) AddParty3() *PartyIdentificationAndAccount146 { s.Party3 = new(PartyIdentificationAndAccount146) return s.Party3 } func (s *SettlementParties49) AddParty4() *PartyIdentificationAndAccount146 { s.Party4 = new(PartyIdentificationAndAccount146) return s.Party4 } func (s *SettlementParties49) AddParty5() *PartyIdentificationAndAccount146 { s.Party5 = new(PartyIdentificationAndAccount146) return s.Party5 }
SettlementParties49.go
0.679498
0.468
SettlementParties49.go
starcoder
package path import ( "github.com/weworksandbox/lingo" "github.com/weworksandbox/lingo/expr" "github.com/weworksandbox/lingo/expr/operator" "github.com/weworksandbox/lingo/expr/set" "github.com/weworksandbox/lingo/sql" ) func NewFloat32WithAlias(e lingo.Table, name, alias string) Float32 { return Float32{ entity: e, name: name, alias: alias, } } func NewFloat32(e lingo.Table, name string) Float32 { return NewFloat32WithAlias(e, name, "") } type Float32 struct { entity lingo.Table name string alias string } func (p Float32) GetParent() lingo.Table { return p.entity } func (p Float32) GetName() string { return p.name } func (p Float32) GetAlias() string { return p.alias } func (p Float32) As(alias string) Float32 { p.alias = alias return p } func (p Float32) ToSQL(d lingo.Dialect) (sql.Data, error) { return ExpandColumnWithDialect(d, p) } func (p Float32) To(value float32) set.Set { return set.To(p, expr.NewValue(value)) } func (p Float32) ToExpr(exp lingo.Expression) set.Set { return set.To(p, exp) } func (p Float32) Eq(value float32) operator.Binary { return operator.Eq(p, expr.NewValue(value)) } func (p Float32) EqPath(exp lingo.Expression) operator.Binary { return operator.Eq(p, exp) } func (p Float32) NotEq(value float32) operator.Binary { return operator.NotEq(p, expr.NewValue(value)) } func (p Float32) NotEqPath(exp lingo.Expression) operator.Binary { return operator.NotEq(p, exp) } func (p Float32) LT(value float32) operator.Binary { return operator.LessThan(p, expr.NewValue(value)) } func (p Float32) LTPath(exp lingo.Expression) operator.Binary { return operator.LessThan(p, exp) } func (p Float32) LTOrEq(value float32) operator.Binary { return operator.LessThanOrEqual(p, expr.NewValue(value)) } func (p Float32) LTOrEqPath(exp lingo.Expression) operator.Binary { return operator.LessThanOrEqual(p, exp) } func (p Float32) GT(value float32) operator.Binary { return operator.GreaterThan(p, expr.NewValue(value)) } func (p Float32) GTPath(exp lingo.Expression) operator.Binary { return operator.GreaterThan(p, exp) } func (p Float32) GTOrEq(value float32) operator.Binary { return operator.GreaterThanOrEqual(p, expr.NewValue(value)) } func (p Float32) GTOrEqPath(exp lingo.Expression) operator.Binary { return operator.GreaterThanOrEqual(p, exp) } func (p Float32) IsNull() operator.Unary { return operator.IsNull(p) } func (p Float32) IsNotNull() operator.Unary { return operator.IsNotNull(p) } func (p Float32) In(values ...float32) operator.Binary { return operator.In(p, expr.NewParens(expr.NewValue(values))) } func (p Float32) InPaths(exps ...lingo.Expression) operator.Binary { return operator.In(p, expr.NewParens(expr.ToList(exps))) } func (p Float32) NotIn(values ...float32) operator.Binary { return operator.NotIn(p, expr.NewParens(expr.NewValue(values))) } func (p Float32) NotInPaths(exps ...lingo.Expression) operator.Binary { return operator.NotIn(p, expr.NewParens(expr.ToList(exps))) } func (p Float32) Between(first, second float32) operator.Binary { return operator.Between(p, expr.NewValue(first), expr.NewValue(second)) } func (p Float32) BetweenPaths(first, second lingo.Expression) operator.Binary { return operator.Between(p, first, second) } func (p Float32) NotBetween(first, second float32) operator.Binary { return operator.NotBetween(p, expr.NewValue(first), expr.NewValue(second)) } func (p Float32) NotBetweenPaths(first, second lingo.Expression) operator.Binary { return operator.NotBetween(p, first, second) }
expr/path/float32.go
0.701406
0.414543
float32.go
starcoder
package miris import ( "github.com/mitroadmaps/gomapinfer/common" "encoding/json" "io/ioutil" "sort" ) type Detection struct { FrameIdx int `json:"frame_idx"` TrackID int `json:"track_id"` Left int `json:"left"` Top int `json:"top"` Right int `json:"right"` Bottom int `json:"bottom"` Score float64 `json:"score,omitempty"` } func (d Detection) Bounds() common.Rectangle { return common.Rectangle{ Min: common.Point{float64(d.Left), float64(d.Top)}, Max: common.Point{float64(d.Right), float64(d.Bottom)}, } } func (d Detection) Equals(other Detection) bool { return d.FrameIdx == other.FrameIdx && d.Left == other.Left && d.Top == other.Top && d.Right == other.Right && d.Bottom == other.Bottom } func Interpolate(a Detection, b Detection, frameIdx int) Detection { factor := float64(frameIdx-a.FrameIdx) / float64(b.FrameIdx-a.FrameIdx) d := Detection{ FrameIdx: frameIdx, TrackID: a.TrackID, } d.Left = int(factor*float64(b.Left-a.Left)) + a.Left d.Top = int(factor*float64(b.Top-a.Top)) + a.Top d.Right = int(factor*float64(b.Right-a.Right)) + a.Right d.Bottom = int(factor*float64(b.Bottom-a.Bottom)) + a.Bottom return d } func Densify(track []Detection) []Detection { var denseTrack []Detection for _, detection := range track { if len(denseTrack) > 0 { prev := denseTrack[len(denseTrack)-1] for frameIdx := prev.FrameIdx + 1; frameIdx < detection.FrameIdx; frameIdx++ { denseTrack = append(denseTrack, Interpolate(prev, detection, frameIdx)) } } denseTrack = append(denseTrack, detection) } return denseTrack } func DensifyAt(track []Detection, indexes []int) []Detection { relevant := make(map[int]bool) for _, frameIdx := range indexes { if frameIdx < track[0].FrameIdx || frameIdx > track[len(track)-1].FrameIdx { continue } relevant[frameIdx] = true } for _, detection := range track { delete(relevant, detection.FrameIdx) } var denseTrack []Detection for _, detection := range track { if len(denseTrack) > 0 { prev := denseTrack[len(denseTrack)-1] for frameIdx := prev.FrameIdx + 1; frameIdx < detection.FrameIdx; frameIdx++ { if !relevant[frameIdx] { continue } denseTrack = append(denseTrack, Interpolate(prev, detection, frameIdx)) } } denseTrack = append(denseTrack, detection) } return denseTrack } type FeatureVector [64]float64 func (v1 FeatureVector) Distance(v2 FeatureVector) float64 { var d float64 = 0 for i := 0; i < len(v1); i++ { d += (v1[i] - v2[i]) * (v1[i] - v2[i]) } return d / float64(len(v1)) } type ActionVectorJSON struct { X float64 `json:"x"` Y float64 `json:"y"` P float64 `json:"p"` } func (v ActionVectorJSON) ActionVector() ActionVector { return ActionVector{ Displacement: common.Point{v.X, v.Y}, Probability: v.P, } } type ActionVector struct { Displacement common.Point // probability that the track remains in the frame Probability float64 } func ReadDetections(fname string) [][]Detection { bytes, err := ioutil.ReadFile(fname) if err != nil { panic(err) } var detections [][]Detection if err := json.Unmarshal(bytes, &detections); err != nil { panic(err) } return detections } func GetTracks(detections [][]Detection) [][]Detection { tracks := make(map[int][]Detection) for frameIdx := range detections { for _, detection := range detections[frameIdx] { if detection.TrackID < 0 { continue } tracks[detection.TrackID] = append(tracks[detection.TrackID], detection) } } var trackList [][]Detection for _, track := range tracks { trackList = append(trackList, track) } sort.Slice(trackList, func(i, j int) bool { return trackList[i][0].FrameIdx < trackList[j][0].FrameIdx }) return trackList } func TracksToDetections(tracks [][]Detection) [][]Detection { var detections [][]Detection for _, track := range tracks { for _, detection := range track { for len(detections) <= detection.FrameIdx { detections = append(detections, []Detection{}) } detections[detection.FrameIdx] = append(detections[detection.FrameIdx], detection) } } return detections } func FilterByScore(detections [][]Detection, threshold float64) [][]Detection { ndetections := make([][]Detection, len(detections)) for frameIdx, dlist := range detections { for _, detection := range dlist { if detection.Score < threshold { continue } ndetections[frameIdx] = append(ndetections[frameIdx], detection) } } return ndetections } func CountDetections(detections [][]Detection) int { var n int = 0 for _, dlist := range detections { n += len(dlist) } return n } func GetCoarse(track []Detection, freq int, k int) []Detection { var coarse []Detection for _, detection := range track { if detection.FrameIdx%freq != k { continue } coarse = append(coarse, detection) } return coarse } func GetAllCoarse(track []Detection, freq int) [][]Detection { var l [][]Detection for k := 0; k < freq; k++ { l = append(l, GetCoarse(track, freq, k)) } return l }
miris/detection.go
0.674908
0.415847
detection.go
starcoder
package schemes import "image/color" // Fire is a gradient color scheme from grey to white through red and // yellow. var Fire []color.Color func init() { Fire = []color.Color{ color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xfd, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xfa, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xf7, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xf4, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xf1, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xee, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xea, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xe7, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xe3, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xdf, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xdb, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xd6, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xd3, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xce, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xca, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xc5, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xc0, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xbb, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xb7, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xb2, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xac, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xa7, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0xa3, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x9d, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x98, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x93, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x8e, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x88, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x84, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x7e, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x79, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x74, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x6f, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x6a, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x66, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x61, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x5b, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x57, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x52, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x4e, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x4a, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x46, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x41, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x3d, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x39, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x35, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x32, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x2e, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x2b, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x27, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x26, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x22, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x1f, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x1d, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x1a, A: 0xff}, color.RGBA{R: 0xff, G: 0xff, B: 0x19, A: 0xff}, color.RGBA{R: 0xff, G: 0xfe, B: 0x17, A: 0xff}, color.RGBA{R: 0xff, G: 0xfb, B: 0x16, A: 0xff}, color.RGBA{R: 0xff, G: 0xfa, B: 0x16, A: 0xff}, color.RGBA{R: 0xff, G: 0xf7, B: 0x17, A: 0xff}, color.RGBA{R: 0xff, G: 0xf5, B: 0x17, A: 0xff}, color.RGBA{R: 0xff, G: 0xf2, B: 0x18, A: 0xff}, color.RGBA{R: 0xff, G: 0xef, B: 0x18, A: 0xff}, color.RGBA{R: 0xff, G: 0xec, B: 0x19, A: 0xff}, color.RGBA{R: 0xff, G: 0xe8, B: 0x19, A: 0xff}, color.RGBA{R: 0xff, G: 0xe5, B: 0x1a, A: 0xff}, color.RGBA{R: 0xff, G: 0xe2, B: 0x1a, A: 0xff}, color.RGBA{R: 0xff, G: 0xde, B: 0x1b, A: 0xff}, color.RGBA{R: 0xff, G: 0xda, B: 0x1b, A: 0xff}, color.RGBA{R: 0xff, G: 0xd7, B: 0x1c, A: 0xff}, color.RGBA{R: 0xff, G: 0xd2, B: 0x1c, A: 0xff}, color.RGBA{R: 0xff, G: 0xcf, B: 0x1d, A: 0xff}, color.RGBA{R: 0xff, G: 0xcb, B: 0x1d, A: 0xff}, color.RGBA{R: 0xff, G: 0xc7, B: 0x1e, A: 0xff}, color.RGBA{R: 0xff, G: 0xc2, B: 0x1e, A: 0xff}, color.RGBA{R: 0xff, G: 0xbe, B: 0x1f, A: 0xff}, color.RGBA{R: 0xff, G: 0xba, B: 0x1f, A: 0xff}, color.RGBA{R: 0xff, G: 0xb6, B: 0x20, A: 0xff}, color.RGBA{R: 0xff, G: 0xb0, B: 0x20, A: 0xff}, color.RGBA{R: 0xff, G: 0xac, B: 0x21, A: 0xff}, color.RGBA{R: 0xff, G: 0xa8, B: 0x22, A: 0xff}, color.RGBA{R: 0xff, G: 0xa3, B: 0x22, A: 0xff}, color.RGBA{R: 0xff, G: 0x9f, B: 0x23, A: 0xff}, color.RGBA{R: 0xff, G: 0x9a, B: 0x23, A: 0xff}, color.RGBA{R: 0xff, G: 0x96, B: 0x24, A: 0xff}, color.RGBA{R: 0xff, G: 0x91, B: 0x24, A: 0xff}, color.RGBA{R: 0xff, G: 0x8d, B: 0x25, A: 0xff}, color.RGBA{R: 0xff, G: 0x88, B: 0x25, A: 0xff}, color.RGBA{R: 0xff, G: 0x84, B: 0x26, A: 0xff}, color.RGBA{R: 0xff, G: 0x80, B: 0x27, A: 0xff}, color.RGBA{R: 0xff, G: 0x7c, B: 0x27, A: 0xff}, color.RGBA{R: 0xff, G: 0x77, B: 0x28, A: 0xff}, color.RGBA{R: 0xff, G: 0x73, B: 0x28, A: 0xff}, color.RGBA{R: 0xff, G: 0x6f, B: 0x29, A: 0xff}, color.RGBA{R: 0xff, G: 0x6b, B: 0x29, A: 0xff}, color.RGBA{R: 0xff, G: 0x67, B: 0x2a, A: 0xff}, color.RGBA{R: 0xff, G: 0x63, B: 0x2a, A: 0xff}, color.RGBA{R: 0xff, G: 0x5f, B: 0x2b, A: 0xff}, color.RGBA{R: 0xff, G: 0x5c, B: 0x2c, A: 0xff}, color.RGBA{R: 0xff, G: 0x59, B: 0x2c, A: 0xff}, color.RGBA{R: 0xff, G: 0x55, B: 0x2d, A: 0xff}, color.RGBA{R: 0xff, G: 0x51, B: 0x2d, A: 0xff}, color.RGBA{R: 0xff, G: 0x4f, B: 0x2e, A: 0xff}, color.RGBA{R: 0xff, G: 0x4c, B: 0x2f, A: 0xff}, color.RGBA{R: 0xff, G: 0x48, B: 0x2f, A: 0xff}, color.RGBA{R: 0xff, G: 0x46, B: 0x30, A: 0xff}, color.RGBA{R: 0xff, G: 0x43, B: 0x30, A: 0xff}, color.RGBA{R: 0xff, G: 0x41, B: 0x31, A: 0xff}, color.RGBA{R: 0xff, G: 0x3f, B: 0x32, A: 0xff}, color.RGBA{R: 0xff, G: 0x3c, B: 0x32, A: 0xff}, color.RGBA{R: 0xff, G: 0x3b, B: 0x33, A: 0xff}, color.RGBA{R: 0xff, G: 0x39, B: 0x33, A: 0xff}, color.RGBA{R: 0xff, G: 0x37, B: 0x34, A: 0xff}, color.RGBA{R: 0xff, G: 0x37, B: 0x35, A: 0xff}, color.RGBA{R: 0xff, G: 0x35, B: 0x35, A: 0xff}, color.RGBA{R: 0xfd, G: 0x36, B: 0x36, A: 0xff}, color.RGBA{R: 0xfd, G: 0x36, B: 0x36, A: 0xff}, color.RGBA{R: 0xfb, G: 0x37, B: 0x37, A: 0xff}, color.RGBA{R: 0xfa, G: 0x38, B: 0x38, A: 0xff}, color.RGBA{R: 0xf8, G: 0x38, B: 0x38, A: 0xff}, color.RGBA{R: 0xf7, G: 0x39, B: 0x39, A: 0xff}, color.RGBA{R: 0xf6, G: 0x39, B: 0x39, A: 0xff}, color.RGBA{R: 0xf4, G: 0x3a, B: 0x3a, A: 0xff}, color.RGBA{R: 0xf2, G: 0x3b, B: 0x3b, A: 0xff}, color.RGBA{R: 0xf0, G: 0x3b, B: 0x3b, A: 0xff}, color.RGBA{R: 0xef, G: 0x3c, B: 0x3c, A: 0xff}, color.RGBA{R: 0xee, G: 0x3d, B: 0x3d, A: 0xff}, color.RGBA{R: 0xeb, G: 0x3d, B: 0x3d, A: 0xff}, color.RGBA{R: 0xea, G: 0x3e, B: 0x3e, A: 0xff}, color.RGBA{R: 0xe8, G: 0x3e, B: 0x3e, A: 0xff}, color.RGBA{R: 0xe5, G: 0x3f, B: 0x3f, A: 0xff}, color.RGBA{R: 0xe4, G: 0x40, B: 0x40, A: 0xff}, color.RGBA{R: 0xe2, G: 0x40, B: 0x40, A: 0xff}, color.RGBA{R: 0xe0, G: 0x41, B: 0x41, A: 0xff}, color.RGBA{R: 0xde, G: 0x42, B: 0x42, A: 0xff}, color.RGBA{R: 0xdb, G: 0x42, B: 0x42, A: 0xff}, color.RGBA{R: 0xda, G: 0x43, B: 0x43, A: 0xff}, color.RGBA{R: 0xd8, G: 0x43, B: 0x43, A: 0xff}, color.RGBA{R: 0xd5, G: 0x44, B: 0x44, A: 0xff}, color.RGBA{R: 0xd3, G: 0x45, B: 0x45, A: 0xff}, color.RGBA{R: 0xd1, G: 0x45, B: 0x45, A: 0xff}, color.RGBA{R: 0xcf, G: 0x46, B: 0x46, A: 0xff}, color.RGBA{R: 0xcd, G: 0x47, B: 0x47, A: 0xff}, color.RGBA{R: 0xcb, G: 0x47, B: 0x47, A: 0xff}, color.RGBA{R: 0xc8, G: 0x48, B: 0x48, A: 0xff}, color.RGBA{R: 0xc7, G: 0x49, B: 0x49, A: 0xff}, color.RGBA{R: 0xc4, G: 0x49, B: 0x49, A: 0xff}, color.RGBA{R: 0xc2, G: 0x4a, B: 0x4a, A: 0xff}, color.RGBA{R: 0xc0, G: 0x4a, B: 0x4a, A: 0xff}, color.RGBA{R: 0xbe, G: 0x4b, B: 0x4b, A: 0xff}, color.RGBA{R: 0xbc, G: 0x4c, B: 0x4c, A: 0xff}, color.RGBA{R: 0xba, G: 0x4c, B: 0x4c, A: 0xff}, color.RGBA{R: 0xb7, G: 0x4d, B: 0x4d, A: 0xff}, color.RGBA{R: 0xb5, G: 0x4e, B: 0x4e, A: 0xff}, color.RGBA{R: 0xb3, G: 0x4e, B: 0x4e, A: 0xff}, color.RGBA{R: 0xb1, G: 0x4f, B: 0x4f, A: 0xff}, color.RGBA{R: 0xaf, G: 0x50, B: 0x50, A: 0xff}, color.RGBA{R: 0xad, G: 0x50, B: 0x50, A: 0xff}, color.RGBA{R: 0xaa, G: 0x51, B: 0x51, A: 0xff}, color.RGBA{R: 0xa9, G: 0x52, B: 0x52, A: 0xff}, color.RGBA{R: 0xa6, G: 0x52, B: 0x52, A: 0xff}, color.RGBA{R: 0xa5, G: 0x53, B: 0x53, A: 0xff}, color.RGBA{R: 0xa2, G: 0x53, B: 0x53, A: 0xff}, color.RGBA{R: 0xa0, G: 0x54, B: 0x54, A: 0xff}, color.RGBA{R: 0x9e, G: 0x55, B: 0x55, A: 0xff}, color.RGBA{R: 0x9c, G: 0x55, B: 0x55, A: 0xff}, color.RGBA{R: 0x9a, G: 0x56, B: 0x56, A: 0xff}, color.RGBA{R: 0x99, G: 0x57, B: 0x57, A: 0xff}, color.RGBA{R: 0x96, G: 0x57, B: 0x57, A: 0xff}, color.RGBA{R: 0x95, G: 0x58, B: 0x58, A: 0xff}, color.RGBA{R: 0x93, G: 0x59, B: 0x59, A: 0xff}, color.RGBA{R: 0x92, G: 0x5a, B: 0x5a, A: 0xff}, color.RGBA{R: 0x90, G: 0x5b, B: 0x5b, A: 0xff}, color.RGBA{R: 0x8e, G: 0x5c, B: 0x5c, A: 0xff}, color.RGBA{R: 0x8e, G: 0x5e, B: 0x5e, A: 0xff}, color.RGBA{R: 0x8d, G: 0x5f, B: 0x5f, A: 0xff}, color.RGBA{R: 0x8c, G: 0x60, B: 0x60, A: 0xff}, color.RGBA{R: 0x8b, G: 0x62, B: 0x62, A: 0xff}, color.RGBA{R: 0x8a, G: 0x63, B: 0x63, A: 0xff}, color.RGBA{R: 0x88, G: 0x64, B: 0x64, A: 0xff}, color.RGBA{R: 0x87, G: 0x65, B: 0x65, A: 0xff}, color.RGBA{R: 0x87, G: 0x67, B: 0x67, A: 0xff}, color.RGBA{R: 0x86, G: 0x68, B: 0x68, A: 0xff}, color.RGBA{R: 0x85, G: 0x69, B: 0x69, A: 0xff}, color.RGBA{R: 0x85, G: 0x6b, B: 0x6b, A: 0xff}, color.RGBA{R: 0x84, G: 0x6c, B: 0x6c, A: 0xff}, color.RGBA{R: 0x83, G: 0x6d, B: 0x6d, A: 0xff}, color.RGBA{R: 0x84, G: 0x6f, B: 0x6f, A: 0xff}, color.RGBA{R: 0x83, G: 0x70, B: 0x70, A: 0xff}, color.RGBA{R: 0x82, G: 0x71, B: 0x71, A: 0xff}, color.RGBA{R: 0x82, G: 0x72, B: 0x72, A: 0xff}, color.RGBA{R: 0x82, G: 0x74, B: 0x74, A: 0xff}, color.RGBA{R: 0x82, G: 0x75, B: 0x75, A: 0xff}, color.RGBA{R: 0x82, G: 0x76, B: 0x76, A: 0xff}, color.RGBA{R: 0x81, G: 0x77, B: 0x77, A: 0xff}, color.RGBA{R: 0x82, G: 0x79, B: 0x79, A: 0xff}, color.RGBA{R: 0x82, G: 0x7a, B: 0x7a, A: 0xff}, color.RGBA{R: 0x82, G: 0x7b, B: 0x7b, A: 0xff}, color.RGBA{R: 0x82, G: 0x7c, B: 0x7c, A: 0xff}, color.RGBA{R: 0x83, G: 0x7e, B: 0x7e, A: 0xff}, color.RGBA{R: 0x83, G: 0x7f, B: 0x7f, A: 0xff}, color.RGBA{R: 0x82, G: 0x80, B: 0x80, A: 0xff}, color.RGBA{R: 0x83, G: 0x81, B: 0x81, A: 0xff}, color.RGBA{R: 0x84, G: 0x83, B: 0x83, A: 0xff}, color.RGBA{R: 0x84, G: 0x84, B: 0x84, A: 0xff}, color.RGBA{R: 0x85, G: 0x85, B: 0x85, A: 0xff}, color.RGBA{R: 0x86, G: 0x86, B: 0x86, A: 0xff}, color.RGBA{R: 0x87, G: 0x87, B: 0x87, A: 0xff}, color.RGBA{R: 0x88, G: 0x88, B: 0x88, A: 0xff}, color.RGBA{R: 0x8a, G: 0x8a, B: 0x8a, A: 0xff}, color.RGBA{R: 0x8b, G: 0x8b, B: 0x8b, A: 0xff}, color.RGBA{R: 0x8c, G: 0x8c, B: 0x8c, A: 0xff}, color.RGBA{R: 0x8d, G: 0x8d, B: 0x8d, A: 0xff}, color.RGBA{R: 0x8e, G: 0x8e, B: 0x8e, A: 0xff}, color.RGBA{R: 0x8f, G: 0x8f, B: 0x8f, A: 0xff}, color.RGBA{R: 0x90, G: 0x90, B: 0x90, A: 0xff}, color.RGBA{R: 0x91, G: 0x91, B: 0x91, A: 0xff}, color.RGBA{R: 0x93, G: 0x93, B: 0x93, A: 0xff}, color.RGBA{R: 0x94, G: 0x94, B: 0x94, A: 0xff}, color.RGBA{R: 0x95, G: 0x95, B: 0x95, A: 0xff}, color.RGBA{R: 0x96, G: 0x96, B: 0x96, A: 0xff}, color.RGBA{R: 0x97, G: 0x97, B: 0x97, A: 0xff}, color.RGBA{R: 0x98, G: 0x98, B: 0x98, A: 0xff}, color.RGBA{R: 0x99, G: 0x99, B: 0x99, A: 0xff}, color.RGBA{R: 0x9a, G: 0x9a, B: 0x9a, A: 0xff}, color.RGBA{R: 0x9b, G: 0x9b, B: 0x9b, A: 0xff}, color.RGBA{R: 0x9c, G: 0x9c, B: 0x9c, A: 0xff}, color.RGBA{R: 0x9d, G: 0x9d, B: 0x9d, A: 0xff}, color.RGBA{R: 0x9e, G: 0x9e, B: 0x9e, A: 0xff}, color.RGBA{R: 0x9f, G: 0x9f, B: 0x9f, A: 0xff}, color.RGBA{R: 0xa0, G: 0xa0, B: 0xa0, A: 0xff}, color.RGBA{R: 0xa0, G: 0xa0, B: 0xa0, A: 0xff}, color.RGBA{R: 0xa1, G: 0xa1, B: 0xa1, A: 0xff}, color.RGBA{R: 0xa2, G: 0xa2, B: 0xa2, A: 0xff}, color.RGBA{R: 0xa3, G: 0xa3, B: 0xa3, A: 0xff}, color.RGBA{R: 0xa4, G: 0xa4, B: 0xa4, A: 0xff}, color.RGBA{R: 0xa5, G: 0xa5, B: 0xa5, A: 0xff}, color.RGBA{R: 0xa6, G: 0xa6, B: 0xa6, A: 0xff}, color.RGBA{R: 0xa7, G: 0xa7, B: 0xa7, A: 0xff}, color.RGBA{R: 0xa7, G: 0xa7, B: 0xa7, A: 0xff}, color.RGBA{R: 0xa8, G: 0xa8, B: 0xa8, A: 0xff}, color.RGBA{R: 0xa9, G: 0xa9, B: 0xa9, A: 0xff}, color.RGBA{R: 0xaa, G: 0xaa, B: 0xaa, A: 0xff}, color.RGBA{R: 0xaa, G: 0xaa, B: 0xaa, A: 0xff}, color.RGBA{R: 0xab, G: 0xab, B: 0xab, A: 0xff}, color.RGBA{R: 0xac, G: 0xac, B: 0xac, A: 0xff}, color.RGBA{R: 0xad, G: 0xad, B: 0xad, A: 0xff}, color.RGBA{R: 0xad, G: 0xad, B: 0xad, A: 0xff}, color.RGBA{R: 0xae, G: 0xae, B: 0xae, A: 0xff}, color.RGBA{R: 0xaf, G: 0xaf, B: 0xaf, A: 0xff}, color.RGBA{R: 0xaf, G: 0xaf, B: 0xaf, A: 0xff}, color.RGBA{R: 0xb0, G: 0xb0, B: 0xb0, A: 0xff}, color.RGBA{R: 0xb0, G: 0xb0, B: 0xb0, A: 0xff}, color.RGBA{R: 0xb1, G: 0xb1, B: 0xb1, A: 0xff}, color.RGBA{R: 0xb1, G: 0xb1, B: 0xb1, A: 0xff}, } }
schemes/fire.go
0.545286
0.667926
fire.go
starcoder
// Package mmf provides a sample match function that uses the GRPC harness to set up 1v1 matches. // This sample is a reference to demonstrate the usage of the GRPC harness and should only be used as // a starting point for your match function. You will need to modify the // matchmaking logic in this function based on your game's requirements. package mmf import ( "fmt" "log" "time" "google.golang.org/grpc" "open-match.dev/open-match/pkg/matchfunction" "open-match.dev/open-match/pkg/pb" ) var ( matchName = "a-simple-1v1-matchfunction" ) // matchFunctionService implements pb.MatchFunctionServer, the server generated // by compiling the protobuf, by fulfilling the pb.MatchFunctionServer interface. type matchFunctionService struct { grpc *grpc.Server queryServiceClient pb.QueryServiceClient port int } func makeMatches(poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) { tickets := map[string]*pb.Ticket{} for _, pool := range poolTickets { for _, ticket := range pool { tickets[ticket.GetId()] = ticket } } var matches []*pb.Match t := time.Now().Format("2006-01-02T15:04:05.00") thisMatch := make([]*pb.Ticket, 0, 2) matchNum := 0 for _, ticket := range tickets { thisMatch = append(thisMatch, ticket) if len(thisMatch) >= 2 { matches = append(matches, &pb.Match{ MatchId: fmt.Sprintf("profile-%s-time-%s-num-%d", matchName, t, matchNum), MatchProfile: matchName, MatchFunction: matchName, Tickets: thisMatch, }) thisMatch = make([]*pb.Ticket, 0, 2) matchNum++ } } return matches, nil } // Run is this match function's implementation of the gRPC call defined in api/matchfunction.proto. func (s *matchFunctionService) Run(req *pb.RunRequest, stream pb.MatchFunction_RunServer) error { // Fetch tickets for the pools specified in the Match Profile. log.Printf("Generating proposals for function %v", req.GetProfile().GetName()) poolTickets, err := matchfunction.QueryPools(stream.Context(), s.queryServiceClient, req.GetProfile().GetPools()) if err != nil { log.Printf("Failed to query tickets for the given pools, got %s", err.Error()) return err } // Generate proposals. proposals, err := makeMatches(poolTickets) if err != nil { log.Printf("Failed to generate matches, got %s", err.Error()) return err } log.Printf("Streaming %v proposals to Open Match", len(proposals)) // Stream the generated proposals back to Open Match. for _, proposal := range proposals { if err := stream.Send(&pb.RunResponse{Proposal: proposal}); err != nil { log.Printf("Failed to stream proposals to Open Match, got %s", err.Error()) return err } } return nil }
examples/functions/golang/soloduel/mmf/matchfunction.go
0.752559
0.448547
matchfunction.go
starcoder
package test_version1 import ( "testing" "time" "github.com/pip-services-users/pip-clients-organizations-go/version1" "github.com/stretchr/testify/assert" ) type OrganizationsClientFixtureV1 struct { Client version1.IOrganizationsClientV1 } var ORGANIZATION1 = &version1.OrganizationV1{ Id: "1", Code: "111", Name: "Organization #1", Description: "Test organization #1", CreateTime: time.Now(), CreatorId: "123", Active: true, } var ORGANIZATION2 = &version1.OrganizationV1{ Id: "2", Code: "", Name: "Organization #2", Description: "Test organization #2", CreateTime: time.Now(), CreatorId: "123", Active: true, } func NewOrganizationsClientFixtureV1(client version1.IOrganizationsClientV1) *OrganizationsClientFixtureV1 { return &OrganizationsClientFixtureV1{ Client: client, } } func (c *OrganizationsClientFixtureV1) clear() { page, _ := c.Client.GetOrganizations("", nil, nil) for _, v := range page.Data { organization := v.(*version1.OrganizationV1) c.Client.DeleteOrganizationById("", organization.Id) } } func (c *OrganizationsClientFixtureV1) TestCrudOperations(t *testing.T) { c.clear() defer c.clear() // Create one organization organization, err := c.Client.CreateOrganization("", ORGANIZATION1) assert.Nil(t, err) assert.NotNil(t, organization) assert.Equal(t, organization.Name, ORGANIZATION1.Name) assert.Equal(t, organization.Description, ORGANIZATION1.Description) assert.NotEmpty(t, organization.Code) organization1 := organization // Create another organization organization, err = c.Client.CreateOrganization("", ORGANIZATION2) assert.Nil(t, err) assert.NotNil(t, organization) assert.Equal(t, organization.Name, ORGANIZATION2.Name) assert.Equal(t, organization.Description, ORGANIZATION2.Description) assert.NotEmpty(t, organization.Code) //organization2 := organization // Get all organizations page, err1 := c.Client.GetOrganizations("", nil, nil) assert.Nil(t, err1) assert.NotNil(t, page) assert.True(t, len(page.Data) >= 2) // Get organization by code organization, err = c.Client.GetOrganizationByCode("", organization1.Code) assert.Nil(t, err) assert.NotNil(t, organization) assert.Equal(t, organization.Id, organization1.Id) // Generate code code, err2 := c.Client.GenerateCode("", organization1.Id) assert.Nil(t, err2) assert.NotEmpty(t, code) // Update the organization organization1.Description = "Updated Content 1" organization, err = c.Client.UpdateOrganization("", organization1) assert.Nil(t, err) assert.NotNil(t, organization) assert.Equal(t, organization.Description, "Updated Content 1") assert.Equal(t, organization.Name, organization1.Name) organization1 = organization // Delete organization organization, err = c.Client.DeleteOrganizationById("", organization1.Id) assert.Nil(t, err) // Try to get deleted organization organization, err = c.Client.GetOrganizationById("", organization1.Id) assert.Nil(t, err) assert.NotNil(t, organization) assert.True(t, organization.Deleted) //assert.Nil(t, organization) }
test/version1/OrganizationsClientFixtureV1.go
0.570571
0.435121
OrganizationsClientFixtureV1.go
starcoder
package main import ( "math" ) func mathBboxes(bbox1 []float64, bbox2 []float64) bool { if bbox1[0] == bbox2[0] { if bbox1[1] == bbox2[1] { if bbox1[2] == bbox2[2] { if bbox1[3] == bbox2[3] { return true } } } } return false } //IOU Computes IUO (Intersection Over Union) between two bboxes in the form [x1,y1,x2,y2] func IOU(bbox1 []float64, bbox2 []float64) float64 { xx1 := math.Max(bbox1[0], bbox2[0]) yy1 := math.Max(bbox1[1], bbox2[1]) //was Max xx2 := math.Min(bbox1[2], bbox2[2]) yy2 := math.Min(bbox1[3], bbox2[3]) //was Min interArea := math.Max(0., xx2-xx1+1) * math.Max(0, yy2-yy1+1) bbox1Area := (bbox1[2] - bbox1[0] + 1) * (bbox1[3] - bbox1[1] + 1) bbox2Area := (bbox2[2] - bbox2[0] + 1) * (bbox2[3] - bbox2[1] + 1) iou := interArea / (bbox1Area + bbox2Area - interArea) if math.IsNaN(iou) { iou = 0 } return iou } //RatioMatch computes how close the bbox dimensions from the two bboxes are (0-1). 1-perfect match func RatioMatch(bbox1 []float64, bbox2 []float64) float64 { w1 := (bbox1[2] - bbox1[0]) h1 := (bbox1[3] - bbox1[1]) w2 := (bbox2[2] - bbox2[0]) h2 := (bbox2[3] - bbox2[1]) r := (w1 / h1) / (w2 / h2) if math.IsNaN(r) { return 0 } if r > 1 { return 1 / r } return r } //AreaMatch computes how close the areas from the two boxes are (0-1). 1-perfect match func AreaMatch(bbox1 []float64, bbox2 []float64) float64 { r := Area(bbox1) / Area(bbox2) if math.IsNaN(r) { return 0 } if r > 1 { return 1 / r } return r } //Area calculates area of a bounding box func Area(bbox []float64) float64 { a := bbox[2] - bbox[0] b := bbox[3] - bbox[1] return math.Abs(a * b) } //ResizeFromCenter resizes a bounding box by a scale factor from its center func ResizeFromCenter(bbox []float64, scale float64) []float64 { w := (bbox[2] - bbox[0]) h := (bbox[3] - bbox[1]) dx := (scale*w - w) / 2.0 dy := (scale*h - h) / 2.0 // fmt.Printf("bbox %v %f %f", bbox, dx, dy) bbox2 := make([]float64, 4) bbox2[0] = math.Max(bbox[0]-dx, 0) bbox2[1] = math.Max(bbox[1]-dy+h, 0) bbox2[2] = math.Min(bbox[2]+dx, 99999) bbox2[3] = math.Min(bbox[3]+dy+h, 99999) return bbox2 }
utils.go
0.628635
0.473049
utils.go
starcoder
package yamlpatch import ( "fmt" "strconv" "github.com/pkg/errors" "gopkg.in/yaml.v3" ) // container is an interface to abstract away indexing into sequence (list) and mapping (object) nodes. // Keys are strings for compatibility with JSONPatch and JSON map keys. // Sequence containers parse integer indices from the string representation. type container interface { // Get returns the child node at the key index. If the key does not exist, it returns nil, nil. Get(key string) (*yaml.Node, error) // Set overwrites the key with val. It returns an error if the key does not already exist (or index out of bounds, for sequences). Set(key string, val *yaml.Node) error // Add adds a new node to the container. It returns an error if the key already exists. Add(key string, val *yaml.Node) error // Remove removes a node from the container. It returns an error if the key does not exist. Remove(key string) error } // newContainer returns the container impl matching node.Kind. // If the node is not a Map or Sequence, an error is returned. func newContainer(node *yaml.Node) (container, error) { if node == nil { return nil, errors.Errorf("unexpected nil yaml node") } switch node.Kind { case yaml.MappingNode: return mappingContainer{node: node}, nil case yaml.SequenceNode: return sequenceContainer{node: node}, nil case yaml.DocumentNode: if len(node.Content) != 1 { return nil, errors.Errorf("unexpected yaml node: expected DocumentNode to have 1 child, got %v", node.Content) } return documentContainer{node: node}, nil case yaml.AliasNode: // Recursive call to bypass alias wrapping // TODO(maybe): Block writes to nodes accessed via alias since they may have unintended side effects. // When generating the JSONPatch for a diff, the values are fully dealiased so if two paths that share an alias // begin to differ, a change will be produced that ends up changing the alias target. This will change the // resolved value(s) for the path that was supposed to remain unchanged. In this case the "best" approach is // probably to copy the alias target to the original path then edit the copy and remove the alias reference. return newContainer(node.Alias) case yaml.ScalarNode: return nil, errors.Errorf("unexpected yaml node: scalar can not be a container") default: return nil, errors.Errorf("unexpected yaml node: kind %d tag %s", node.Kind, node.Tag) } } type mappingContainer struct { node *yaml.Node } func (c mappingContainer) Get(key string) (*yaml.Node, error) { if err := c.validate(); err != nil { return nil, err } _, _, valNode := c.find(key) return valNode, nil } func (c mappingContainer) Set(key string, val *yaml.Node) error { if err := c.validate(); err != nil { return err } keyIdx, _, _ := c.find(key) if keyIdx == -1 { return errors.Errorf("key %s does not exist and can not be replaced", key) } c.node.Content[keyIdx+1] = val return nil } func (c mappingContainer) Add(key string, val *yaml.Node) error { if err := c.validate(); err != nil { return err } if _, _, existingValue := c.find(key); existingValue != nil { return errors.Errorf("key %s already exists and can not be added", key) } keyNode := &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: key} c.node.Content = append(c.node.Content, keyNode, val) return nil } func (c mappingContainer) Remove(key string) error { if err := c.validate(); err != nil { return err } keyIdx, _, _ := c.find(key) if keyIdx == -1 { return errors.Errorf("key %s does not exist and can not be removed", key) } ary := make([]*yaml.Node, len(c.node.Content)-2) copy(ary[0:keyIdx], c.node.Content[0:keyIdx]) copy(ary[keyIdx:], c.node.Content[keyIdx+2:]) // overwrite Content with new array c.node.Content = ary return nil } func (c mappingContainer) find(key string) (keyIdx int, keyNode, valNode *yaml.Node) { for i := 0; i < len(c.node.Content); i += 2 { keyNode, valNode := c.node.Content[i], c.node.Content[i+1] if keyNode.Value == key { return i, keyNode, valNode } } return -1, nil, nil } func (c mappingContainer) validate() error { if len(c.node.Content)%2 != 0 { return errors.Errorf("expected MappingNode to have even number of children, got %d", len(c.node.Content)) } // Mapping nodes are stored as [k0,v0,k1,v1...] so we iterate two at a time. for i := 0; i < len(c.node.Content); i += 2 { keyNode := c.node.Content[i] if keyNode.Kind != yaml.ScalarNode { return errors.Errorf("jsonpatch only supports scalar mapping keys, got %d %s", keyNode.Kind, keyNode.Tag) } } return nil } type sequenceContainer struct { node *yaml.Node } func (c sequenceContainer) Get(key string) (*yaml.Node, error) { if key == "-" { return nil, nil } // Parse key into integer and index into array idx, err := parseSeqIndex(key) if err != nil { return nil, err } if idx > len(c.node.Content)-1 { // key is out of bounds return nil, nil } return c.node.Content[idx], nil } func (c sequenceContainer) Set(key string, val *yaml.Node) error { idx, err := parseSeqIndex(key) if err != nil { return err } if idx > len(c.node.Content)-1 { return errors.Errorf("set index key out of bounds (idx %d, len %d)", idx, len(c.node.Content)) } c.node.Content[idx] = val return nil } func (c sequenceContainer) Add(key string, val *yaml.Node) error { if key == "-" { c.node.Content = append(c.node.Content, val) return nil } idx, err := parseSeqIndex(key) if err != nil { return err } if idx > len(c.node.Content) { return errors.Errorf("add index key out of bounds (idx %d, len %d)", idx, len(c.node.Content)) } // create new array ary and insert val at idx ary := make([]*yaml.Node, len(c.node.Content)+1) copy(ary[0:idx], c.node.Content[0:idx]) ary[idx] = val copy(ary[idx+1:], c.node.Content[idx:]) // overwrite Content with new array c.node.Content = ary return nil } func (c sequenceContainer) Remove(key string) error { idx, err := parseSeqIndex(key) if err != nil { return err } if idx > len(c.node.Content)-1 { return errors.Errorf("remove index key out of bounds (idx %d, len %d)", idx, len(c.node.Content)) } ary := make([]*yaml.Node, len(c.node.Content)-1) copy(ary[0:idx], c.node.Content[0:idx]) copy(ary[idx:], c.node.Content[idx+1:]) // overwrite Content with new array c.node.Content = ary return nil } func parseSeqIndex(indexStr string) (int, error) { idx, err := strconv.Atoi(indexStr) if err != nil { return 0, errors.Wrapf(err, "index into SequenceNode with non-integer %q key", indexStr) } if idx < 0 { return 0, errors.Errorf("index into SequenceNode with negative %q key", indexStr) } return idx, nil } // documentContainer is a special container that wraps a yaml.Document. // Since documents have a single element, the 'key' argument in all methods must be the empty string "". // An error is returned if any other key is provided, since the intention is likely not to be accessing a document node. type documentContainer struct { node *yaml.Node } var errIllegalDocumentAccess = fmt.Errorf("documentContainer does not allow non-empty key access") func (c documentContainer) Get(key string) (*yaml.Node, error) { if key != "" { return nil, errIllegalDocumentAccess } if c.isEmpty() { return nil, nil } return c.node.Content[0], nil } func (c documentContainer) Set(key string, val *yaml.Node) error { if key != "" { return errIllegalDocumentAccess } if c.isEmpty() { return errors.Errorf("document value does not exist and can not be replaced") } c.node.Content[0] = val return nil } func (c documentContainer) Add(key string, val *yaml.Node) error { if key != "" { return errIllegalDocumentAccess } // If we have a 'null' node, we can overwrite it. if !c.isEmpty() { return errors.Errorf("document value already exists and can not be added") } c.node.Content[0] = val return nil } func (c documentContainer) Remove(key string) error { if key != "" { return errIllegalDocumentAccess } return errors.Errorf("document does not implement Remove()") } func (c documentContainer) isEmpty() bool { return c.node.Content[0].Kind == yaml.ScalarNode && c.node.Content[0].Tag == "!!null" }
yamlpatch/container.go
0.619701
0.514644
container.go
starcoder
package ahocorasick const ( AlphabetSize int64 = 256 // The size of the alphabet is fixed to the size of a byte. RootState int64 = 0 // The root state of the trie is always 0. EmptyCell int64 = -1 // Represents an unused cell. DefaultBase int64 = 0 // The default base for new states. ) // A TrieBuilder must be used to properly build Tries. type TrieBuilder struct { base []int64 check []int64 dict []int64 fail []int64 suff []int64 } // Create and initialize a new TrieBuilder. func NewTrieBuilder() *TrieBuilder { tb := &TrieBuilder{ base: make([]int64, 0), check: make([]int64, 0), dict: make([]int64, 0), fail: make([]int64, 0), suff: make([]int64, 0), } // Add the root state. tb.addState() return tb } // Add a new pattern to be built into the resulting Trie. func (tb *TrieBuilder) AddPattern(pattern []byte) *TrieBuilder { s := RootState for _, c := range pattern { t := tb.base[s] + EncodeByte(c) if t >= int64(len(tb.check)) || tb.check[t] == EmptyCell { // Cell is empty: expand arrays and set transition. tb.expandArrays(t) tb.check[t] = s } else if tb.check[t] == s { // Cell is in use by s, simply move on. } else { // Someone is occupying the cell. Move the occupier. o := tb.check[t] // Relocating o changes its states. So if o has a transition to s, // we must update s after relocating o. First check if o actually has // a transition to s. oc := s - tb.base[o] if tb.check[tb.base[o]+oc] != o { oc = EmptyCell // State o does not have a transition to s. } tb.relocate(o) // Update s and t if o had transitions to s. if oc != EmptyCell { s = tb.base[o] + oc t = tb.base[s] + EncodeByte(c) } // Set transition. tb.check[t] = s } // Move to next state. s = t } // Mark s as in dictionary by setting pattern len in dict. tb.dict[s] = int64(len(pattern)) return tb } // A helper method to make adding multiple patterns a little more comfortable. func (tb *TrieBuilder) AddPatterns(patterns [][]byte) *TrieBuilder { for _, pattern := range patterns { tb.AddPattern(pattern) } return tb } // A helper method to make adding a string pattern more comfortable. func (tb *TrieBuilder) AddString(pattern string) *TrieBuilder { return tb.AddPattern([]byte(pattern)) } // A helper method to make adding multiple string patterns a little more comfortable. func (tb *TrieBuilder) AddStrings(patterns []string) *TrieBuilder { for _, pattern := range patterns { tb.AddString(pattern) } return tb } // Build the trie. func (tb *TrieBuilder) Build() *Trie { // Initialize link arrays. tb.fail = make([]int64, len(tb.base)) tb.suff = make([]int64, len(tb.base)) for i := 0; i < len(tb.base); i++ { tb.fail[i] = EmptyCell tb.suff[i] = EmptyCell } // Root fails to itself. tb.fail[RootState] = RootState for s := int64(0); s < int64(len(tb.base)); s++ { tb.computeFailLink(s) } for s := int64(0); s < int64(len(tb.base)); s++ { tb.computeSuffLink(s) } // Should I copy these slices over or? return &Trie{ base: tb.base, check: tb.check, dict: tb.dict, fail: tb.fail, suff: tb.suff, } } func (tb *TrieBuilder) computeFailLink(s int64) { if tb.fail[s] != EmptyCell { return // Avoid computing more than one time. } p := tb.check[s] // The parent of s. if p == EmptyCell { // No transitions to s, ignore. return } else if p == s { return // If s is it's own parent. } tb.computeFailLink(p) c := s - tb.base[p] // The transition symbol to this state. if p == RootState { // If parent is root, fail to root tb.fail[s] = RootState } else { // Follow fail links (starting from parent) until we find a state f with // a transition on this states symbol (c). for f := tb.fail[p]; f > 0; f = tb.fail[f] { // Set s' fail to f's child if it has a transition. t := tb.base[f] + c if t < int64(len(tb.check)) && tb.check[t] == f { tb.fail[s] = t break } // Compute f's fail link before the next iteration. tb.computeFailLink(f) } // If for some reason we didn't find any fail link. if tb.fail[s] == EmptyCell { // Check if root has transition on this s' symbol. t := tb.base[RootState] + c if t < int64(len(tb.check)) && tb.check[t] == RootState { tb.fail[s] = t } else { // Else fail to root. tb.fail[s] = RootState } } } } func (tb *TrieBuilder) computeSuffLink(s int64) { // Follow fail links until we (possibly) find a state in the dictionary. for f := tb.fail[s]; f > 0; f = tb.fail[f] { if tb.dict[f] != 0 { tb.suff[s] = f return } } } func (tb *TrieBuilder) addState() { tb.base = append(tb.base, DefaultBase) tb.check = append(tb.check, EmptyCell) tb.dict = append(tb.dict, 0) } func (tb *TrieBuilder) expandArrays(n int64) { for int64(len(tb.base)) <= n { tb.addState() } } // Get all c's for which state s has a transition (that is, where check[base[s]+c] == s). func (tb *TrieBuilder) transitions(s int64) []int64 { cs := make([]int64, 0) for c := int64(0); c < AlphabetSize+1; c++ { t := tb.base[s] + (c + 1) if t < int64(len(tb.check)) && tb.check[t] == s { cs = append(cs, c+1) } } return cs } // Check wether b is a suitable base for s given it's transitions on cs. func (tb *TrieBuilder) suitableBase(b, s int64, cs []int64) bool { for _, c := range cs { t := b + int64(c) // All offsets above len(check) is of course empty. if t >= int64(len(tb.check)) { return true } if tb.check[t] != EmptyCell { return false } } return true } // Find a suitable (new) base for s. func (tb *TrieBuilder) findBase(s int64, cs []int64) int64 { for b := DefaultBase; ; b++ { if tb.suitableBase(b, s, cs) { return b } } return EmptyCell } func (tb *TrieBuilder) relocate(s int64) { // First find all symbols for which s has a transition. cs := tb.transitions(s) // Find a new suitable base for s. b := tb.findBase(s, cs) // Move the base of s to b. First we must update the transitions. for _, c := range cs { // Old t and new t'. t := tb.base[s] + int64(c) t_ := b + int64(c) tb.expandArrays(t_) // Ensure arrays are big enough for t'. tb.check[t_] = s // Mark s as owner of t'. tb.base[t_] = tb.base[t] // Copy base value. tb.dict[t_] = tb.dict[t] // As well as the dictionary value. // We must also update all states which had transitions from t to t'. for c := int64(0); c < AlphabetSize+1; c++ { u := tb.base[t] + (c + 1) if u >= int64(len(tb.check)) { break } if tb.check[u] == t { tb.check[u] = t_ } } // Unset old tb.check and dictionary values for t. tb.check[t] = EmptyCell tb.dict[t] = 0 } // Finally we can move the base for s. tb.base[s] = b } // EncodeByte optimizes for ASCII text by shifting to 0x41 ('A'). // Also adds one to avoid byte == 0. func EncodeByte(b byte) int64 { return ((int64(b) - 0x41 + AlphabetSize) % AlphabetSize) + 1 } func DecodeByte(e int64) byte { return byte((e+0x41)%AlphabetSize) - 1 }
builder.go
0.736969
0.47244
builder.go
starcoder
package behavioural import "fmt" /* Summary: Template Method is used to define the skeleteon of the algorithm as a sequence of operations. How the operation is performed is left to the concrete implementations. Example: SurgeryWorkflowTemplate method Let's perform a surgery workflow for a patient using this. Surgery's CareProvider interface defines the workflow. The template method executes that workflow for different concrete implementations: shalby, practo surgeries Benefit: The skeleton of the algorithm which performs certain work in some sequence is decoupled with actual implementations. This is very useful way to implement workflows and also help in testing as it works on an interface. */ // CareProvider defines what makes a care provider for surgery type CareProvider interface { // Book appointment to consult a doctor BookConsult() // Consult can lead to surgery or not Consult() // BookSurgery book the surgery BookSurgery() // Operate is used to perform the surgery Operate() // Recover is used to recover from surgery Recover() } // SurgeryWorkflowTemplate is the template method that does a sequence of work to // get the surgery done func SurgeryWorkflowTemplate(provider CareProvider) { provider.BookConsult() provider.Consult() provider.BookSurgery() provider.Operate() provider.Recover() } type shalbyHospital struct{} func NewShalbyHospital() CareProvider { return &shalbyHospital{} } func (s *shalbyHospital) BookConsult() { fmt.Printf("shalby book consult ") } func (s *shalbyHospital) Consult() { fmt.Printf("shalby consult ") } func (s *shalbyHospital) BookSurgery() { fmt.Printf("shalby book surgery ") } func (s *shalbyHospital) Operate() { fmt.Printf("shalby operate ") } func (s *shalbyHospital) Recover() { fmt.Printf("shalby recover") } type practoCareSurgery struct{} func NewPractoCareSurgery() CareProvider { return &practoCareSurgery{} } func (s *practoCareSurgery) BookConsult() { fmt.Printf("practo book consult ") } func (s *practoCareSurgery) Consult() { fmt.Printf("practo consult ") } func (s *practoCareSurgery) BookSurgery() { fmt.Printf("practo book surgery ") } func (s *practoCareSurgery) Operate() { fmt.Printf("practo operate ") } func (s *practoCareSurgery) Recover() { fmt.Printf("practo recover") }
behavioural/template_method.go
0.729712
0.452838
template_method.go
starcoder
package aws import ( "context" "time" "github.com/aws/aws-sdk-go/service/costexplorer" "github.com/golang/protobuf/ptypes/timestamp" "github.com/turbot/steampipe-plugin-sdk/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/plugin" ) // AllCostMetrics is a constant returning all the cost metrics func AllCostMetrics() []string { return []string{ "BlendedCost", "UnblendedCost", "NetUnblendedCost", "AmortizedCost", "NetAmortizedCost", "UsageQuantity", "NormalizedUsageAmount", } } var costExplorerColumnDefs = []*plugin.Column{ { Name: "period_start", Description: "Start timestamp for this cost metric.", Type: proto.ColumnType_TIMESTAMP, }, { Name: "period_end", Description: "End timestamp for this cost metric.", Type: proto.ColumnType_TIMESTAMP, }, { Name: "estimated", Description: "Whether the result is estimated.", Type: proto.ColumnType_BOOL, }, { Name: "blended_cost_amount", Description: "This cost metric reflects the average cost of usage across the consolidated billing family. If you use the consolidated billing feature in AWS Organizations, you can view costs using blended rates.", Type: proto.ColumnType_DOUBLE, }, { Name: "blended_cost_unit", Description: "Unit type for blended costs.", Type: proto.ColumnType_STRING, }, { Name: "unblended_cost_amount", Description: "Unblended costs represent your usage costs on the day they are charged to you. In finance terms, they represent your costs on a cash basis of accounting.", Type: proto.ColumnType_DOUBLE, }, { Name: "unblended_cost_unit", Description: "Unit type for unblended costs.", Type: proto.ColumnType_STRING, }, { Name: "net_unblended_cost_amount", Description: "This cost metric reflects the unblended cost after discounts.", Type: proto.ColumnType_DOUBLE, }, { Name: "net_unblended_cost_unit", Description: "Unit type for net unblended costs.", Type: proto.ColumnType_STRING, }, { Name: "amortized_cost_amount", Description: "This cost metric reflects the effective cost of the upfront and monthly reservation fees spread across the billing period. By default, Cost Explorer shows the fees for Reserved Instances as a spike on the day that you're charged, but if you choose to show costs as amortized costs, the costs are amortized over the billing period. This means that the costs are broken out into the effective daily rate. AWS estimates your amortized costs by combining your unblended costs with the amortized portion of your upfront and recurring reservation fees.", Type: proto.ColumnType_DOUBLE, }, { Name: "amortized_cost_unit", Description: "Unit type for amortized costs.", Type: proto.ColumnType_STRING, }, { Name: "net_amortized_cost_amount", Description: "This cost metric amortizes the upfront and monthly reservation fees while including discounts such as RI volume discounts.", Type: proto.ColumnType_DOUBLE, }, { Name: "net_amortized_cost_unit", Description: "Unit type for net amortized costs.", Type: proto.ColumnType_STRING, }, { Name: "usage_quantity_amount", Description: "The amount of usage that you incurred. NOTE: If you return the UsageQuantity metric, the service aggregates all usage numbers without taking into account the units. For example, if you aggregate usageQuantity across all of Amazon EC2, the results aren't meaningful because Amazon EC2 compute hours and data transfer are measured in different units (for example, hours vs. GB).", Type: proto.ColumnType_DOUBLE, }, { Name: "usage_quantity_unit", Description: "Unit type for usage quantity.", Type: proto.ColumnType_STRING, }, { Name: "normalized_usage_amount", Description: "The amount of usage that you incurred, in normalized units, for size-flexible RIs. The NormalizedUsageAmount is equal to UsageAmount multiplied by NormalizationFactor.", Type: proto.ColumnType_DOUBLE, }, { Name: "normalized_usage_unit", Description: "Unit type for normalized usage.", Type: proto.ColumnType_STRING, }, } // append the common aws cost explorer columns onto the column list func costExplorerColumns(columns []*plugin.Column) []*plugin.Column { return append(columns, costExplorerColumnDefs...) } //// LIST FUNCTION func streamCostAndUsage(ctx context.Context, d *plugin.QueryData, params *costexplorer.GetCostAndUsageInput) (interface{}, error) { logger := plugin.Logger(ctx) logger.Trace("streamCostAndUsage") // Create session svc, err := CostExplorerService(ctx, d) if err != nil { return nil, err } // List call morePages := true for morePages { output, err := svc.GetCostAndUsage(params) if err != nil { logger.Error("streamCostAndUsage", "err", err) return nil, err } // stream the results... for _, row := range buildCEMetricRows(ctx, output, d.KeyColumnQuals) { d.StreamListItem(ctx, row) } // get more pages if there are any... if output.NextPageToken == nil { morePages = false break } params.SetNextPageToken(*output.NextPageToken) } return nil, nil } func buildCEMetricRows(ctx context.Context, costUsageData *costexplorer.GetCostAndUsageOutput, keyQuals map[string]*proto.QualValue) []CEMetricRow { logger := plugin.Logger(ctx) logger.Trace("buildCEMetricRows") var rows []CEMetricRow for _, result := range costUsageData.ResultsByTime { // If there are no groupings, create a row from the totals if len(result.Groups) == 0 { var row CEMetricRow row.Estimated = result.Estimated row.PeriodStart = result.TimePeriod.Start row.PeriodEnd = result.TimePeriod.End row.setRowMetrics(result.Total) rows = append(rows, row) } // make a row per group for _, group := range result.Groups { var row CEMetricRow row.Estimated = result.Estimated row.PeriodStart = result.TimePeriod.Start row.PeriodEnd = result.TimePeriod.End if len(group.Keys) > 0 { row.Dimension1 = group.Keys[0] if len(group.Keys) > 1 { row.Dimension2 = group.Keys[1] } } row.setRowMetrics(group.Metrics) rows = append(rows, row) } } return rows } // CEMetricRow is the flattened, aggregated value for a metric. type CEMetricRow struct { Estimated *bool // The time period that the result covers. PeriodStart *string PeriodEnd *string Dimension1 *string Dimension2 *string //Tag *string BlendedCostAmount *string UnblendedCostAmount *string NetUnblendedCostAmount *string AmortizedCostAmount *string NetAmortizedCostAmount *string UsageQuantityAmount *string NormalizedUsageAmount *string BlendedCostUnit *string UnblendedCostUnit *string NetUnblendedCostUnit *string AmortizedCostUnit *string NetAmortizedCostUnit *string UsageQuantityUnit *string NormalizedUsageUnit *string } func (row *CEMetricRow) setRowMetrics(metrics map[string]*costexplorer.MetricValue) { if metrics["BlendedCost"] != nil { row.BlendedCostAmount = metrics["BlendedCost"].Amount row.BlendedCostUnit = metrics["BlendedCost"].Unit } if metrics["UnblendedCost"] != nil { row.UnblendedCostAmount = metrics["UnblendedCost"].Amount row.UnblendedCostUnit = metrics["UnblendedCost"].Unit } if metrics["NetUnblendedCost"] != nil { row.NetUnblendedCostAmount = metrics["NetUnblendedCost"].Amount row.NetUnblendedCostUnit = metrics["NetUnblendedCost"].Unit } if metrics["AmortizedCost"] != nil { row.AmortizedCostAmount = metrics["AmortizedCost"].Amount row.AmortizedCostUnit = metrics["AmortizedCost"].Unit } if metrics["NetAmortizedCost"] != nil { row.NetAmortizedCostAmount = metrics["NetAmortizedCost"].Amount row.NetAmortizedCostUnit = metrics["NetAmortizedCost"].Unit } if metrics["UsageQuantity"] != nil { row.UsageQuantityAmount = metrics["UsageQuantity"].Amount row.UsageQuantityUnit = metrics["UsageQuantity"].Unit } if metrics["NormalizedUsageAmount"] != nil { row.NormalizedUsageAmount = metrics["NormalizedUsageAmount"].Amount row.NormalizedUsageUnit = metrics["NormalizedUsageAmount"].Unit } } func getCEStartDateForGranularity(granularity string) time.Time { switch granularity { case "DAILY", "MONTHLY": // 1 year return time.Now().AddDate(-1, 0, 0) case "HOURLY": // 13 days return time.Now().AddDate(0, 0, -13) } return time.Now().AddDate(0, 0, -13) } type CEQuals struct { // Quals stuff SearchStartTime *timestamp.Timestamp SearchEndTime *timestamp.Timestamp Granularity string DimensionType1 string DimensionType2 string } func hydrateCostAndUsageQuals(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { plugin.Logger(ctx).Trace("hydrateKeyQuals") //plugin.Logger(ctx).Warn("hydrateKeyQuals", "d.KeyColumnQuals", d.KeyColumnQuals) return &CEQuals{ SearchStartTime: d.KeyColumnQuals["search_start_time"].GetTimestampValue(), SearchEndTime: d.KeyColumnQuals["search_end_time"].GetTimestampValue(), Granularity: d.KeyColumnQuals["granularity"].GetStringValue(), DimensionType1: d.KeyColumnQuals["dimension_type_1"].GetStringValue(), DimensionType2: d.KeyColumnQuals["dimension_type_2"].GetStringValue(), }, nil }
aws/cost_explorer.go
0.765944
0.46041
cost_explorer.go
starcoder
package neuralnet func multMatrixVect(m [][]float64, v []float64) []float64 { result := make([]float64, len(m)) for i := range result { var sum float64 for j := range v { sum += m[i][j] * v[j] } result[i] = sum } return result } func applySigmoidVector(v []float64) { for i := range v { v[i] = Sigmoid(v[i]) } } func sigmoidDiffVector(v []float64) []float64 { res := make([]float64, len(v)) for i, val := range v { res[i] = SigmoidDiff(val) } return res } func multTransposeMatVector(m [][]float64, v []float64) []float64 { row, col := len(m), len(m[0]) res := make([]float64, col) for i := 0; i < col; i++ { sum := float64(0) for j := 0; j < row; j++ { sum += m[j][i] * v[j] } res[i] = sum } return res } func hadamardProdVect(v1 []float64, v2 []float64) []float64 { res := make([]float64, len(v1)) for i := range v1 { res[i] = v1[i] * v2[i] } return res } func multVectTransposeVect(v1 []float64, v2 []float64) [][]float64 { res := make([][]float64, len(v1)) for i := range v1 { res[i] = make([]float64, len(v2)) for j := range v2 { res[i][j] = v1[i] * v2[j] } } return res } // subVects returns v1-v2. // Panics if the vector sizes don't match. func subVects(v1, v2 []float64) []float64 { if len(v1) != len(v2) { panic("neuralnet - subVects: vector sizes don't match") } res := make([]float64, len(v1)) for i := range res { res[i] = v1[i] - v2[i] } return res } // addVects returns v1+v2. // Panics if sizes don't match. func addVects(v1, v2 []float64) []float64 { if len(v1) != len(v2) { panic("neuralnet - addVects: vector sizes don't match") } res := make([]float64, len(v1)) for i := range res { res[i] = v1[i] + v2[i] } return res } // sigmoidVect return sigmoid(v). func sigmoidVect(v []float64) []float64 { res := make([]float64, len(v)) for i, val := range v { res[i] = Sigmoid(val) } return res } // multScalarVect return x*v. func multScalarVect(x float64, v []float64) []float64 { res := make([]float64, len(v)) for i, val := range v { res[i] = x * val } return res } // addVectOfVects adds vectors in another vector and returns the sum. func addVectOfVects(x, y [][]float64) [][]float64 { if len(x) != len(y) { panic("neuralnet: addVectOfVects: lenghs don't match") } res := make([][]float64, len(x)) for i := range x { if len(x[i]) != len(y[i]) { panic("neuralnet: addVectOfVects: lenghs don't match") } res[i] = make([]float64, len(x[i])) for j := range x[i] { res[i][j] = x[i][j] + y[i][j] } } return res } // addVectOfMats adds the matrices presents in both vectors and returns the sum. func addVectOfMats(v1, v2 [][][]float64) [][][]float64 { if len(v1) != len(v2) { panic("neuralnet: addVectOfMats: lenghs don't match" + string(len(v1)) + "!=" + string(len(v2))) } res := make([][][]float64, len(v1)) for i := range v1 { res[i] = addVectOfVects(v1[i], v2[i]) } return res }
linearalg.go
0.676086
0.528351
linearalg.go
starcoder
package token import ( "fmt" ) // Pos encodes a line, column and offset into a uint with max sizes of 4095x2 // and 127 in order Col (12 bits) | Off (12 bits) | Line (8 bits). type Pos uint // Zero is the zero position. const ( Zero Pos = (1 << 20) | 1 ) // At returns a position set to the given line, column and offset. func At(line, column, offset int) Pos { var p Pos return p.Set(line, column, offset) } func (p *Pos) Set(l, c, o int) Pos { *p = Pos((c << 20) | (o << 8) | l) return *p } func (p *Pos) Inc(l, c, o int) Pos { p.Set(p.Line()+l, p.Column()+c, p.Offset()+o) return *p } // Valid returns true if the Line and Column are non-zero. func (p Pos) Valid() bool { return p.Line() > 0 && p.Column() > 0 && p.Offset() > -1 } // Line returns the line number starting from 1. func (p Pos) Line() int { return int(p & 0x000000ff) } // Column returns the column number starting from 1. func (p Pos) Column() int { return int(p & 0xfff00000 >> 20) } // Offset returns the byte offset starting from 0. func (p Pos) Offset() int { return int(p & 0x000fff00 >> 8) } // String returns the string representation of a position in the form of // line:column [+|-offset] while omitting zero value offset (0) or line (1). func (p Pos) String() string { l, c, o := p.Line(), p.Column(), p.Offset() switch { case l <= 0 || c <= 0 || o < 0: return `?` case l > 1 && o > 0: return fmt.Sprintf("%d:%d (byte %d)", l, c, o) case o > 0: return fmt.Sprintf("rune %d (byte %d)", c, o) default: return fmt.Sprintf("rune %d", c) } } // GoString returns a clearer syntax for code form using token.At. func (p Pos) GoString() string { return fmt.Sprintf("token.At(%d, %d, %d)", p.Line(), p.Column(), p.Offset()) } // Token represents a single lexical token in a route pattern. type Token struct { Lex Lexeme Lit string Beg, End Pos } // Valid returns true if Lexeme, beg and end are all valid. func (t Token) Valid() bool { return t.Beg.Valid() && t.End.Valid() && t.Lex.Valid() } // String returns the string representation of this Token. func (t Token) String() string { switch lex := t.Lex.String(); { case !t.Beg.Valid() && t.Lit == ``: return fmt.Sprintf("token (%v)", lex) case t.Lit == ``: return fmt.Sprintf("token (%v) at %v", lex, t.Beg) default: return fmt.Sprintf("token %q (%v) at %v", t.Lit, lex, t.Beg) } } // Tokens is a slice of tokens. type Tokens []Token // Lexemes returns each tokens lexeme. func (t Tokens) Lexemes() Lexemes { l := make(Lexemes, len(t)) for i := 0; i < len(t); i++ { l[i] = t[i].Lex } return l } // Join returns the string representation by using each the lexeme of each token // to call the Join method of Lexemes. func (t Tokens) Join(s string) string { switch len(t) { case 0: return `(NONE)` case 1: return t[0].String() } beg, end := t[0].Beg, t[len(t)-1].End if !end.Valid() { end = t[len(t)-1].Beg } return fmt.Sprintf(`%v from %v to %v`, t.Lexemes().Join(s), beg, end) } // String returns the string representation for a slice of Token. func (t Tokens) String() string { return t.Join(`, `) }
internal/token/token.go
0.834069
0.574335
token.go
starcoder
// Package sortedset implements an AVL tree with ordered set behaviors. package sortedset import ( "fmt" coll "github.com/maguerrido/collection" ) // node of a AVL tree. type node struct { // value stored in the node. value interface{} // left points to a node smaller than this. // right points to a node greater than this. left, right *node // h is the current height (number of levels in the AVL tree). // len is the current length (number of nodes). h, len int } // clear sets the properties of the node to its zero values. // Time complexity: O(1). func (n *node) clear() { n.value, n.left, n.right, n.h, n.len = nil, nil, nil, 0, 0 } // SortedSet represents a AVL tree. // The zero value for SortedSet is an empty SortedSet ready to use. type SortedSet struct { // root points to the root node in the AVL tree. root *node } // balance returns the balance of the particular AVL tree 'n'. // If 'n' equals nil, then return 0. // Time complexity: O(1). func balance(n *node) int { if n == nil { return 0 } return height(n.left) - height(n.right) } // height returns the height of the particular AVL tree 'n'. // If 'n' equals nil, then return 0. // Time complexity: O(1). func height(n *node) int { if n == nil { return 0 } return n.h } // leftRotate do the avl tree left rotate with 'n' as a root. // Time complexity: O(1). func leftRotate(n *node) *node { root := n.right tree := root.left root.left = n n.right = tree n.h = 1 + maxInt(height(n.left), height(n.right)) n.len = 1 + length(n.left) + length(n.right) root.h = 1 + maxInt(height(root.left), height(root.right)) root.len = 1 + length(root.left) + length(root.right) return root } // length returns the length of the particular AVL tree 'n'. // If 'n' equals nil, then return 0. // Time complexity: O(1). func length(n *node) int { if n == nil { return 0 } return n.len } // maxInt returns the biggest integer between 'a' and 'b'. // Time complexity: O(1). func maxInt(a, b int) int { if a > b { return a } else { return b } } // min returns the minimum value in the avl tree 'n'. // Time complexity: O(log(n)), where n is the current length of the AVL tree. func min(n *node) *node { if n == nil { return nil } for n.left != nil { n = n.left } return n } // rightRotate do the avl tree right rotate with 'n' as a root. // Time complexity: O(1). func rightRotate(n *node) *node { root := n.left tree := root.right root.right = n n.left = tree n.h = 1 + maxInt(height(n.left), height(n.right)) n.len = 1 + length(n.left) + length(n.right) root.h = 1 + maxInt(height(root.left), height(root.right)) root.len = 1 + length(root.left) + length(root.right) return root } // New returns a new SortedSet ready to use. // Time complexity: O(1). func New() *SortedSet { return new(SortedSet) } // NewBySlice returns a new SortedSet with the values stored in the slice. // The comparison to order the values is defined by the parameter 'compare'. // The function 'compare' must return a negative int, zero, or a positive int as 'v1' is less than, equal to, or //greater than 'v2'. // Time complexity: O(n*log(n)), where n is the current length of the slice. func NewBySlice(values []interface{}, compare func(v1, v2 interface{}) int) *SortedSet { s := New() for _, v := range values { s.Push(v, compare) } return s } // Clone returns a new cloned SortedSet. // Time complexity: O(n), where n is the current length of the set. func (s *SortedSet) Clone() *SortedSet { clone := New() clone.root = cloneRecursive(s.root) return clone } // cloneRecursive is an auxiliary recursive function of the SortedSet Clone method. func cloneRecursive(nS *node) *node { if nS == nil { return nil } left := cloneRecursive(nS.left) right := cloneRecursive(nS.right) return &node{value: nS.value, left: left, right: right, h: nS.h, len: nS.len} } // Contains returns true if the value 'v' belongs to the set. // The comparison to order the values is defined by the parameter 'compare'. // The function 'compare' must return a negative int, zero, or a positive int as 'v1' is less than, equal to, or //greater than 'v2'. // Time complexity: O(log(n)), where n is the current length of the set. func (s *SortedSet) Contains(v interface{}, compare func(v1, v2 interface{}) int) bool { return containsRecursive(v, s.root, compare) } // containsRecursive is an auxiliary recursive function of the SortedSet Contains method. func containsRecursive(v interface{}, n *node, compare func(v1, v2 interface{}) int) bool { if n == nil { return false } switch diff := compare(v, n.value); { case diff < 0: return containsRecursive(v, n.left, compare) case diff > 0: return containsRecursive(v, n.right, compare) default: // diff == 0 return true } } // Do gets the first (minor) value and performs all the procedures, then repeats it with the rest of the values. // The set retains its original state. // Time complexity: O(n*p), where n is the current length of the set and p is the number of procedures. func (s *SortedSet) Do(procedures ...func(v interface{})) { doRecursive(s.root, procedures...) } // doRecursive is an auxiliary recursive function of the SortedSet Do method. func doRecursive(n *node, procedures ...func(v interface{})) { if n == nil { return } doRecursive(n.left, procedures...) for _, procedure := range procedures { procedure(n.value) } doRecursive(n.right, procedures...) } // IsEmpty returns true if the set has no values. // Time complexity: O(1). func (s *SortedSet) IsEmpty() bool { return s.root == nil } func (s *SortedSet) Iterator() coll.Iterator { slice := make([]*node, 0, s.Len()) nodes(s.root, &slice) return &iterator{ nodes: slice, index: -1, lastCommand: -1, lastHasNext: false, } } func nodes(root *node, slice *[]*node) { if root == nil { return } nodes(root.left, slice) *slice = append(*slice, root) nodes(root.right, slice) } // Len returns the current length of the set. // Time complexity: O(1). func (s *SortedSet) Len() int { if s.IsEmpty() { return 0 } return s.root.len } // Max returns the maximum value of the set. // Time complexity: O(log(n)), where n is the current length of the set. func (s *SortedSet) Max() interface{} { if s.IsEmpty() { return nil } n := s.root for n.right != nil { n = n.right } return n.value } // Min returns the minimum value of the set. // Time complexity: O(log(n)), where n is the current length of the set. func (s *SortedSet) Min() interface{} { if s.IsEmpty() { return nil } return min(s.root).value } // Push inserts the value 'v' in an orderly way. // The comparison to order the values is defined by the parameter 'compare'. // The function 'compare' must return a negative int, zero, or a positive int as 'v1' is less than, equal to, or //greater than 'v2'. // Time complexity: O(log(n)), where n is the current length of the set. func (s *SortedSet) Push(v interface{}, compare func(v1, v2 interface{}) int) { s.root = pushRecursive(v, s.root, compare) } // pushRecursive is an auxiliary recursive function of the SortedSet Push method. func pushRecursive(v interface{}, n *node, compare func(v1, v2 interface{}) int) *node { if n == nil { return &node{value: v, left: nil, right: nil, h: 1, len: 1} } switch diff := compare(v, n.value); { case diff < 0: n.left = pushRecursive(v, n.left, compare) case diff > 0: n.right = pushRecursive(v, n.right, compare) case diff == 0: n.value = v return n } n.h = 1 + maxInt(height(n.left), height(n.right)) n.len = 1 + length(n.left) + length(n.right) balance := balance(n) if balance > 1 { if compare(v, n.left.value) < 0 { // case: left-left return rightRotate(n) } else { // case: left-right n.left = leftRotate(n.left) return rightRotate(n) } } if balance < -1 { if compare(v, n.right.value) > 0 { // case: right-right return leftRotate(n) } else { // case: right-left n.right = rightRotate(n.right) return leftRotate(n) } } return n } // Remove removes the value 'v' from the set. // The comparison to order the values is defined by the parameter 'compare'. // The function 'compare' must return a negative int, zero, or a positive int as 'v1' is less than, equal to, or //greater than 'v2'. // Time complexity: O(log(n)), where n is the current length of the set. func (s *SortedSet) Remove(v interface{}, compare func(v1, v2 interface{}) int) bool { var removed bool s.root, removed = removeRecursive(v, s.root, compare) return removed } // removeRecursive is an auxiliary recursive function of the SortedSet Remove method. func removeRecursive(v interface{}, n *node, compare func(v1, v2 interface{}) int) (*node, bool) { if n == nil { return nil, false } removed := false switch diff := compare(v, n.value); { case diff < 0: n.left, removed = removeRecursive(v, n.left, compare) case diff > 0: n.right, removed = removeRecursive(v, n.right, compare) case diff == 0: removed = true var temp *node // case: node with only one child or no child if n.left == nil || n.right == nil { if n.left == nil { temp = n.right } else { temp = n.left } if temp == nil { // case: no child temp = n n.clear() n = nil } else { // case: one child n.value = temp.value temp.clear() } } else { // case: node with two children temp = min(n.right) n.value = temp.value n.right, _ = removeRecursive(temp.value, n.right, compare) } } // case: no child (removed) if n == nil { return nil, removed } n.h = 1 + maxInt(height(n.left), height(n.right)) n.len = 1 + length(n.left) + length(n.right) balanceTreeNode := balance(n) if balanceTreeNode > 1 { if balance(n.left) >= 0 { // case: left-left return rightRotate(n), removed } else { // case: left-right n.left = leftRotate(n.left) return rightRotate(n), removed } } if balanceTreeNode < -1 { if balance(n.right) <= 0 { // case: right-right return leftRotate(n), removed } else { // case: right-left n.right = rightRotate(n.right) return leftRotate(n), removed } } return n, removed } // RemoveAll sets the properties of the set to its zero values. // Time complexity: O(1). func (s *SortedSet) RemoveAll() { s.root = nil } // Slice returns a new slice with the values stored in the set keeping its order. // The set retains its original state. // Time complexity: O(n), where n is the current length of the set. func (s *SortedSet) Slice() []interface{} { values := make([]interface{}, 0, s.Len()) sliceRecursive(s.root, &values) return values } // sliceRecursive is an auxiliary recursive function of the SortedSet Slice method. func sliceRecursive(n *node, values *[]interface{}) { if n == nil { return } sliceRecursive(n.left, values) *values = append(*values, n.value) sliceRecursive(n.right, values) } // String returns a representation of the set as a string. // SortedSet implements the fmt.Stringer interface. // Time complexity: O(n), where n is the current length of the set. func (s *SortedSet) String() string { if s.IsEmpty() { return "[]" } str := "[" + stringRecursive(s.root) return str[:len(str)-1] + "]" } // stringRecursive is an auxiliary recursive function of the SortedSet String method. func stringRecursive(n *node) string { if n == nil { return "" } return stringRecursive(n.left) + fmt.Sprintf("%v ", n.value) + stringRecursive(n.right) } type iterator struct { nodes []*node index int lastCommand int lastHasNext bool } const ( iteratorCommandHasNext = 0 iteratorCommandNext = 1 // iteratorCommandRemove = 2 ) func (i *iterator) ForEach(action func(v *interface{})) { if action != nil { for _, n := range i.nodes { if n != nil { action(&n.value) } } } } func (i *iterator) HasNext() bool { i.lastCommand = iteratorCommandHasNext i.lastHasNext = i.index < len(i.nodes)-1 return i.lastHasNext } func (i *iterator) Next() (interface{}, error) { if i.lastCommand != iteratorCommandHasNext { return nil, fmt.Errorf(coll.ErrorIteratorNext) } else if !i.lastHasNext { return nil, fmt.Errorf(coll.ErrorIteratorHasNext) } i.index++ i.lastCommand = iteratorCommandNext return i.nodes[i.index].value, nil } func (i *iterator) Remove() error { return fmt.Errorf(coll.ErrorIteratorRemoveNotSupported) }
sortedset/sortedset.go
0.901724
0.587411
sortedset.go
starcoder
package main import ( "fmt" "github.com/rooklift/sgf" ) func main() { // Start a new game tree and get the root node... root := sgf.NewTree(19) node := root // Here we create the ancient Chinese pattern... node.AddValue("AB", sgf.Point(3, 3)) node.AddValue("AB", sgf.Point(15, 15)) node.AddValue("AW", sgf.Point(15, 3)) node.AddValue("AW", sgf.Point(3, 15)) // The normal way to create new nodes is by playing moves. // If successful, Play() returns the new node. node, err := node.Play("cf") // "cf" is SGF-speak fmt.Printf("%v\n", err) // Prints nil (no error) // We can get an SGF coordinate (e.g. "cf") by calling Point(). // Note that the coordinate system is zeroth-based, from the top left. node, err = node.Play(sgf.Point(2, 5)) fmt.Printf("%v\n", err) // Already filled // Illegal moves (including suicide and basic ko) will return an error. // As a convenience, Play() returns the original node in this case. // You may still wish to check for errors... node, err = node.Play(sgf.Point(19, 19)) fmt.Printf("%v\n", err) // Off-board // We can create variations from any node. node = node.Parent() node.Play(sgf.Point(13, 2)) // Create variation 1 node.Play(sgf.Point(16, 5)) // Create variation 2 // Colours are determined intelligently, but we can always force a colour. node.PlayColour(sgf.Point(2, 5), sgf.WHITE) // Create variation 3 // We can iterate through a node's children. for i, child := range node.Children() { child.SetValue("C", fmt.Sprintf("Comment %d", i)) } // And we can go down those variations if we wish. // (Errors ignored here for simplicity.) node, _ = node.Play(sgf.Point(5, 16)) // Create variation 4 and go down it node, _ = node.Play(sgf.Point(2, 12)) // ...continue going down it node, _ = node.Play(sgf.Point(3, 17)) // ...continue going down it // Passes are a thing. // Doing the same action on the same node many times just returns the first-created child each time. foo := node.Pass() bar := node.Pass() // Does not create a new node node = node.Pass() // Does not create a new node fmt.Printf("%v, %v\n", foo == bar, bar == node) // true, true // We can directly manipulate SGF properties... // We can also examine the board. node.SetValue("C", "White passed. Lets highlight all white stones for some reason...") board := node.Board() // Note that this is a deep copy for x := 0; x < board.Size; x++ { for y := 0; y < board.Size; y++ { if board.State[x][y] == sgf.WHITE { node.AddValue("TR", sgf.Point(x, y)) } } } // It is also possible to directly manage node creation and properties, // though this has no legality checks... node = sgf.NewNode(node) // Specify the parent node.AddValue("B", "dj") // It is possible to edit board-altering properties even if a node has // children. All cached boards in descendent nodes will be cleared, and // remade as needed. root.AddValue("AB", "jj") // Editing the root. node, err = node.Play("jj") // Trying to play at the current node. fmt.Printf("%v\n", err) // Prints the err: jj is not empty. // We can adjust the tree so this branch we are on is the main line... node.MakeMainLine() // Calling Save() will save the entire tree, regardless of node position. node.Save("foo.sgf") // We can also load files. node, err = sgf.Load("foo.sgf") }
examples/basic.go
0.637595
0.423637
basic.go
starcoder
package fp func BoolEquals(a, b Bool) bool { return a == b } func StringEquals(a, b String) bool { return a == b } func IntEquals(a, b Int) bool { return a == b } func Int8Equals(a, b Int8) bool { return a == b } func Int16Equals(a, b Int16) bool { return a == b } func Int32Equals(a, b Int32) bool { return a == b } func Int64Equals(a, b Int64) bool { return a == b } func UintEquals(a, b Uint) bool { return a == b } func Uint8Equals(a, b Uint8) bool { return a == b } func Uint16Equals(a, b Uint16) bool { return a == b } func Uint32Equals(a, b Uint32) bool { return a == b } func Uint64Equals(a, b Uint64) bool { return a == b } func UintptrEquals(a, b Uintptr) bool { return a == b } func ByteEquals(a, b Byte) bool { return a == b } func RuneEquals(a, b Rune) bool { return a == b } func Float32Equals(a, b Float32) bool { return a == b } func Float64Equals(a, b Float64) bool { return a == b } func Complex64Equals(a, b Complex64) bool { return a == b } func Complex128Equals(a, b Complex128) bool { return a == b } func AnyEquals(a, b Any) bool { return a == b } func BoolOptionEquals(a, b BoolOption) bool { return a.Equals(b) } func StringOptionEquals(a, b StringOption) bool { return a.Equals(b) } func IntOptionEquals(a, b IntOption) bool { return a.Equals(b) } func Int64OptionEquals(a, b Int64Option) bool { return a.Equals(b) } func ByteOptionEquals(a, b ByteOption) bool { return a.Equals(b) } func RuneOptionEquals(a, b RuneOption) bool { return a.Equals(b) } func Float32OptionEquals(a, b Float32Option) bool { return a.Equals(b) } func Float64OptionEquals(a, b Float64Option) bool { return a.Equals(b) } func AnyOptionEquals(a, b AnyOption) bool { return a.Equals(b) } func Tuple2OptionEquals(a, b Tuple2Option) bool { return a.Equals(b) } func BoolOptionOptionEquals(a, b BoolOptionOption) bool { return a.Equals(b) } func StringOptionOptionEquals(a, b StringOptionOption) bool { return a.Equals(b) } func IntOptionOptionEquals(a, b IntOptionOption) bool { return a.Equals(b) } func Int64OptionOptionEquals(a, b Int64OptionOption) bool { return a.Equals(b) } func ByteOptionOptionEquals(a, b ByteOptionOption) bool { return a.Equals(b) } func RuneOptionOptionEquals(a, b RuneOptionOption) bool { return a.Equals(b) } func Float32OptionOptionEquals(a, b Float32OptionOption) bool { return a.Equals(b) } func Float64OptionOptionEquals(a, b Float64OptionOption) bool { return a.Equals(b) } func AnyOptionOptionEquals(a, b AnyOptionOption) bool { return a.Equals(b) } func Tuple2OptionOptionEquals(a, b Tuple2OptionOption) bool { return a.Equals(b) } func BoolArrayOptionEquals(a, b BoolArrayOption) bool { return a.Equals(b) } func StringArrayOptionEquals(a, b StringArrayOption) bool { return a.Equals(b) } func IntArrayOptionEquals(a, b IntArrayOption) bool { return a.Equals(b) } func Int64ArrayOptionEquals(a, b Int64ArrayOption) bool { return a.Equals(b) } func ByteArrayOptionEquals(a, b ByteArrayOption) bool { return a.Equals(b) } func RuneArrayOptionEquals(a, b RuneArrayOption) bool { return a.Equals(b) } func Float32ArrayOptionEquals(a, b Float32ArrayOption) bool { return a.Equals(b) } func Float64ArrayOptionEquals(a, b Float64ArrayOption) bool { return a.Equals(b) } func AnyArrayOptionEquals(a, b AnyArrayOption) bool { return a.Equals(b) } func Tuple2ArrayOptionEquals(a, b Tuple2ArrayOption) bool { return a.Equals(b) } func BoolListOptionEquals(a, b BoolListOption) bool { return a.Equals(b) } func StringListOptionEquals(a, b StringListOption) bool { return a.Equals(b) } func IntListOptionEquals(a, b IntListOption) bool { return a.Equals(b) } func Int64ListOptionEquals(a, b Int64ListOption) bool { return a.Equals(b) } func ByteListOptionEquals(a, b ByteListOption) bool { return a.Equals(b) } func RuneListOptionEquals(a, b RuneListOption) bool { return a.Equals(b) } func Float32ListOptionEquals(a, b Float32ListOption) bool { return a.Equals(b) } func Float64ListOptionEquals(a, b Float64ListOption) bool { return a.Equals(b) } func AnyListOptionEquals(a, b AnyListOption) bool { return a.Equals(b) } func Tuple2ListOptionEquals(a, b Tuple2ListOption) bool { return a.Equals(b) } func BoolArrayEquals(a, b BoolArray) bool { return a.Equals(b) } func StringArrayEquals(a, b StringArray) bool { return a.Equals(b) } func IntArrayEquals(a, b IntArray) bool { return a.Equals(b) } func Int64ArrayEquals(a, b Int64Array) bool { return a.Equals(b) } func ByteArrayEquals(a, b ByteArray) bool { return a.Equals(b) } func RuneArrayEquals(a, b RuneArray) bool { return a.Equals(b) } func Float32ArrayEquals(a, b Float32Array) bool { return a.Equals(b) } func Float64ArrayEquals(a, b Float64Array) bool { return a.Equals(b) } func AnyArrayEquals(a, b AnyArray) bool { return a.Equals(b) } func Tuple2ArrayEquals(a, b Tuple2Array) bool { return a.Equals(b) } func BoolArrayArrayEquals(a, b BoolArrayArray) bool { return a.Equals(b) } func StringArrayArrayEquals(a, b StringArrayArray) bool { return a.Equals(b) } func IntArrayArrayEquals(a, b IntArrayArray) bool { return a.Equals(b) } func Int64ArrayArrayEquals(a, b Int64ArrayArray) bool { return a.Equals(b) } func ByteArrayArrayEquals(a, b ByteArrayArray) bool { return a.Equals(b) } func RuneArrayArrayEquals(a, b RuneArrayArray) bool { return a.Equals(b) } func Float32ArrayArrayEquals(a, b Float32ArrayArray) bool { return a.Equals(b) } func Float64ArrayArrayEquals(a, b Float64ArrayArray) bool { return a.Equals(b) } func AnyArrayArrayEquals(a, b AnyArrayArray) bool { return a.Equals(b) } func Tuple2ArrayArrayEquals(a, b Tuple2ArrayArray) bool { return a.Equals(b) } func BoolListEquals(a, b BoolList) bool { return a.Equals(b) } func StringListEquals(a, b StringList) bool { return a.Equals(b) } func IntListEquals(a, b IntList) bool { return a.Equals(b) } func Int64ListEquals(a, b Int64List) bool { return a.Equals(b) } func ByteListEquals(a, b ByteList) bool { return a.Equals(b) } func RuneListEquals(a, b RuneList) bool { return a.Equals(b) } func Float32ListEquals(a, b Float32List) bool { return a.Equals(b) } func Float64ListEquals(a, b Float64List) bool { return a.Equals(b) } func AnyListEquals(a, b AnyList) bool { return a.Equals(b) } func Tuple2ListEquals(a, b Tuple2List) bool { return a.Equals(b) } func BoolArrayListEquals(a, b BoolArrayList) bool { return a.Equals(b) } func StringArrayListEquals(a, b StringArrayList) bool { return a.Equals(b) } func IntArrayListEquals(a, b IntArrayList) bool { return a.Equals(b) } func Int64ArrayListEquals(a, b Int64ArrayList) bool { return a.Equals(b) } func ByteArrayListEquals(a, b ByteArrayList) bool { return a.Equals(b) } func RuneArrayListEquals(a, b RuneArrayList) bool { return a.Equals(b) } func Float32ArrayListEquals(a, b Float32ArrayList) bool { return a.Equals(b) } func Float64ArrayListEquals(a, b Float64ArrayList) bool { return a.Equals(b) } func AnyArrayListEquals(a, b AnyArrayList) bool { return a.Equals(b) } func Tuple2ArrayListEquals(a, b Tuple2ArrayList) bool { return a.Equals(b) } func BoolOptionListEquals(a, b BoolOptionList) bool { return a.Equals(b) } func StringOptionListEquals(a, b StringOptionList) bool { return a.Equals(b) } func IntOptionListEquals(a, b IntOptionList) bool { return a.Equals(b) } func Int64OptionListEquals(a, b Int64OptionList) bool { return a.Equals(b) } func ByteOptionListEquals(a, b ByteOptionList) bool { return a.Equals(b) } func RuneOptionListEquals(a, b RuneOptionList) bool { return a.Equals(b) } func Float32OptionListEquals(a, b Float32OptionList) bool { return a.Equals(b) } func Float64OptionListEquals(a, b Float64OptionList) bool { return a.Equals(b) } func AnyOptionListEquals(a, b AnyOptionList) bool { return a.Equals(b) } func Tuple2OptionListEquals(a, b Tuple2OptionList) bool { return a.Equals(b) } func BoolListListEquals(a, b BoolListList) bool { return a.Equals(b) } func StringListListEquals(a, b StringListList) bool { return a.Equals(b) } func IntListListEquals(a, b IntListList) bool { return a.Equals(b) } func Int64ListListEquals(a, b Int64ListList) bool { return a.Equals(b) } func ByteListListEquals(a, b ByteListList) bool { return a.Equals(b) } func RuneListListEquals(a, b RuneListList) bool { return a.Equals(b) } func Float32ListListEquals(a, b Float32ListList) bool { return a.Equals(b) } func Float64ListListEquals(a, b Float64ListList) bool { return a.Equals(b) } func AnyListListEquals(a, b AnyListList) bool { return a.Equals(b) } func Tuple2ListListEquals(a, b Tuple2ListList) bool { return a.Equals(b) } func Tuple2Equals(a, b Tuple2) bool { return a.Equals(b) }
fp/bootstrap_func_equal.go
0.841077
0.707821
bootstrap_func_equal.go
starcoder
package svg import ( "fmt" "strings" "golang.org/x/image/math/fixed" ) // This file defines the basic path structure // Operation groups the different SVG commands type Operation interface { // SVG text representation of the command fmt.Stringer } // OpMoveTo moves the current point. type OpMoveTo fixed.Point26_6 // OpLineTo draws a line from the current point, // and updates it. type OpLineTo fixed.Point26_6 // OpQuadTo draws a quadratic Bezier curve from the current point, // and updates it. type OpQuadTo [2]fixed.Point26_6 // OpCubicTo draws a cubic Bezier curve from the current point, // and updates it. type OpCubicTo [3]fixed.Point26_6 // OpClose close the current path. type OpClose struct{} func (op OpMoveTo) String() string { return fmt.Sprintf("M%4.3f,%4.3f", float32(op.X)/64, float32(op.Y)/64) } func (op OpLineTo) String() string { return fmt.Sprintf("L%4.3f,%4.3f", float32(op.X)/64, float32(op.Y)/64) } func (op OpQuadTo) String() string { return fmt.Sprintf("Q%4.3f,%4.3f,%4.3f,%4.3f", float32(op[0].X)/64, float32(op[0].Y)/64, float32(op[1].X)/64, float32(op[1].Y)/64) } func (op OpCubicTo) String() string { return "C" + fmt.Sprintf("C%4.3f,%4.3f,%4.3f,%4.3f,%4.3f,%4.3f", float32(op[0].X)/64, float32(op[0].Y)/64, float32(op[1].X)/64, float32(op[1].Y)/64, float32(op[2].X)/64, float32(op[2].Y)/64) } func (op OpClose) String() string { return "Z" } // Path describes a sequence of basic SVG operations, which should not be nil // Higher-level shapes may be reduced to a path. type Path []Operation // ToSVGPath returns a string representation of the path func (p Path) ToSVGPath() string { chunks := make([]string, len(p)) for i, op := range p { chunks[i] = op.String() } return strings.Join(chunks, " ") } // String returns a readable representation of a Path. func (p Path) String() string { return p.ToSVGPath() } // Clear zeros the path slice func (p *Path) Clear() { *p = (*p)[:0] } // Start starts a new curve at the given point. func (p *Path) Start(a fixed.Point26_6) { *p = append(*p, OpMoveTo{a.X, a.Y}) } // Line adds a linear segment to the current curve. func (p *Path) Line(b fixed.Point26_6) { *p = append(*p, OpLineTo{b.X, b.Y}) } // QuadBezier adds a quadratic segment to the current curve. func (p *Path) QuadBezier(b, c fixed.Point26_6) { *p = append(*p, OpQuadTo{b, c}) } // CubeBezier adds a cubic segment to the current curve. func (p *Path) CubeBezier(b, c, d fixed.Point26_6) { *p = append(*p, OpCubicTo{b, c, d}) } // Stop joins the ends of the path func (p *Path) Stop(closeLoop bool) { if closeLoop { *p = append(*p, OpClose{}) } }
path.go
0.822474
0.528473
path.go
starcoder
package kzg import ( "bytes" "encoding/binary" "golang.org/x/crypto/blake2b" "io" "io/ioutil" "go.dedis.ch/kyber/v3" "go.dedis.ch/kyber/v3/pairing/bn256" "golang.org/x/xerrors" ) // TrustedSetup is a trusted setup for KZG calculations with degree D. // The domain of Lagrange polynomials is either defined by powers of omega, assuming omega^i != 1 for any 0<=i<D // or, of omega == 0, it is 0, 1, 2, ..., D-1 // The secret itself must be destroyed immediately after trusted setup is generated. // The trusted setup is a public value stored for example in a file. // It is impossible to restore secret from the trusted setup // [x]1 means a projection of scalar x to the G1 curve. [x]1 = xG, where G is the generating element // [x]2 means a projection of scalar x to the G2 curve. [x]2 = xH, where H is the generating element type TrustedSetup struct { Suite *bn256.Suite D uint16 Omega kyber.Scalar // persistent LagrangeBasis []kyber.Point // persistent. TLi = [l<i>(secret)]1 Diff2 []kyber.Point // persistent // auxiliary, precalculated values Domain []kyber.Scalar // non-persistent. if omega != 0, domain_i = omega^i, otherwise domain_i = i. AprimeDomainI []kyber.Scalar // A'(i) precalc *precalculated // only not nil if omega == nil (onl for natural domain) ZeroG1 kyber.Scalar // aux OneG1 kyber.Scalar // aux } // used if omega == 0, i.e. for the natural domain type precalculated struct { // 1/(i-m). Array size is 2d-1. To find index for 1/(i-m) is (i-m)+d-1, from 0 to 2d-1. If i==m, index will contain nil invsub []kyber.Scalar ta [][]kyber.Scalar // ta[m][j] = (aprime(m)/aprime(j))(1/(m-j). Nil if m == j tk []kyber.Scalar // tk[m] = sum_{j!=m}ta[m][j] } var ( errWrongSecret = xerrors.New("wrong secret") errNotROU = xerrors.New("not a root of unity") errWrongROU = xerrors.New("wrong root of unity") ) func newTrustedSetup(suite *bn256.Suite) *TrustedSetup { return &TrustedSetup{Suite: suite} } func (sd *TrustedSetup) init(d uint16) { sd.D = d sd.Omega = sd.Suite.G1().Scalar() sd.LagrangeBasis = make([]kyber.Point, d) sd.Diff2 = make([]kyber.Point, d) sd.Domain = make([]kyber.Scalar, d) sd.AprimeDomainI = make([]kyber.Scalar, d) for i := range sd.Domain { sd.Domain[i] = sd.Suite.G1().Scalar() } for i := range sd.AprimeDomainI { sd.AprimeDomainI[i] = sd.Suite.G1().Scalar() } for i := range sd.LagrangeBasis { sd.LagrangeBasis[i] = sd.Suite.G1().Point() sd.Diff2[i] = sd.Suite.G2().Point() } sd.ZeroG1 = sd.Suite.G1().Scalar().Zero() sd.OneG1 = sd.Suite.G1().Scalar().One() } // TrustedSetupFromSecretPowers calculates TrustedSetup from secret and omega // It uses powers of the omega as a domain for Lagrange basis // Only used once after what secret must be destroyed func TrustedSetupFromSecretPowers(suite *bn256.Suite, d uint16, omega, secret kyber.Scalar) (*TrustedSetup, error) { ret := newTrustedSetup(suite) ret.init(d) if err := ret.generatePowers(omega, secret); err != nil { return nil, err } return ret, nil } // TrustedSetupFromSecretNaturalDomain uses 0,1,2,.. domain instead of omega func TrustedSetupFromSecretNaturalDomain(suite *bn256.Suite, d uint16, secret kyber.Scalar) (*TrustedSetup, error) { ret := newTrustedSetup(suite) ret.init(d) if err := ret.generateFromNaturalDomain(secret); err != nil { return nil, err } return ret, nil } // TrustedSetupFromSeed for testing only func TrustedSetupFromSeed(suite *bn256.Suite, d uint16, seed []byte) (*TrustedSetup, error) { h := blake2b.Sum256(seed) secret := suite.G1().Scalar().SetBytes(h[:]) ret := newTrustedSetup(suite) ret.init(d) if err := ret.generateFromNaturalDomain(secret); err != nil { return nil, err } return ret, nil } // TrustedSetupFromBytes unmarshals trusted setup from binary representation func TrustedSetupFromBytes(suite *bn256.Suite, data []byte) (*TrustedSetup, error) { ret := newTrustedSetup(suite) if err := ret.read(bytes.NewReader(data)); err != nil { return nil, err } if !ret.Omega.Equal(ret.ZeroG1) { for i := range ret.Domain { powerSimple(ret.Suite, ret.Omega, i, ret.Domain[i]) if i > 0 && ret.Domain[i].Equal(ret.OneG1) { return nil, errWrongROU } } } else { for i := range ret.Domain { ret.Domain[i].SetInt64(int64(i)) } } for i := range ret.AprimeDomainI { ret.aprime(i, ret.AprimeDomainI[i]) } return ret, nil } // TrustedSetupFromFile restores trusted setup from file func TrustedSetupFromFile(suite *bn256.Suite, fname string) (*TrustedSetup, error) { data, err := ioutil.ReadFile(fname) if err != nil { return nil, err } ret, err := TrustedSetupFromBytes(suite, data) if err != nil { return nil, err } return ret, nil } // Bytes marshals the trusted setup func (sd *TrustedSetup) Bytes() []byte { var buf bytes.Buffer if err := sd.write(&buf); err != nil { panic(err) } return buf.Bytes() } // generatePowers creates a new TrustedSetup based on omega and secret func (sd *TrustedSetup) generatePowers(omega, secret kyber.Scalar) error { if len(secret.String()) < 50 { return errWrongSecret } sd.Omega.Set(omega) for i := range sd.Domain { powerSimple(sd.Suite, sd.Omega, i, sd.Domain[i]) if sd.Domain[i].Equal(secret) { return errWrongSecret } if i > 0 && sd.Domain[i].Equal(sd.OneG1) { return errWrongROU } } for i := range sd.AprimeDomainI { sd.aprime(i, sd.AprimeDomainI[i]) } // calculate Lagrange basis: [l_i(s)]1 for i := range sd.LagrangeBasis { l := sd.evalLagrangeValue(i, secret) sd.LagrangeBasis[i].Mul(l, nil) // [l_i(secret)]1 } // calculate [secret-rou^i]2 e2 := sd.Suite.G2().Scalar() for i := range sd.Diff2 { e2.Sub(secret, sd.Domain[i]) sd.Diff2[i].Mul(e2, nil) } return nil } // generateFromNaturalDomain creates a new TrustedSetup from secret and using 0,1,2..d-1 as a domain for Lagrange basis func (sd *TrustedSetup) generateFromNaturalDomain(secret kyber.Scalar) error { if len(secret.String()) < 50 { return errWrongSecret } sd.Omega.Zero() for i := range sd.Domain { sd.Domain[i].SetInt64(int64(i)) } for i := range sd.AprimeDomainI { sd.aprime(i, sd.AprimeDomainI[i]) } // calculate Lagrange basis: [l_i(s)]1 for i := range sd.LagrangeBasis { l := sd.evalLagrangeValue(i, secret) sd.LagrangeBasis[i].Mul(l, nil) // [l_i(secret)]1 } // calculate [secret-domain_i]2 e2 := sd.Suite.G2().Scalar() for i := range sd.Diff2 { e2.Sub(secret, sd.Domain[i]) sd.Diff2[i].Mul(e2, nil) } sd.precalculate() return nil } // evalLagrangeValue calculates li(X) = [prod<j=0,D-1;j!=i>((X-omega^j)/(omega^i-omega^j)]1 func (sd *TrustedSetup) evalLagrangeValue(i int, v kyber.Scalar) kyber.Scalar { ret := sd.Suite.G1().Scalar().One() numer := sd.Suite.G1().Scalar() denom := sd.Suite.G1().Scalar() elem := sd.Suite.G1().Scalar() for j := 0; j < int(sd.D); j++ { if j == i { continue } numer.Sub(v, sd.Domain[j]) denom.Sub(sd.Domain[i], sd.Domain[j]) elem.Div(numer, denom) ret.Mul(ret, elem) } return ret } // A'(omega^m) func (sd *TrustedSetup) aprime(m int, ret kyber.Scalar) kyber.Scalar { e := sd.Suite.G1().Scalar() ret.One() for i := range sd.Domain { if i == m { continue } e.Sub(sd.Domain[m], sd.Domain[i]) ret.Mul(ret, e) } return ret } // write marshal func (sd *TrustedSetup) write(w io.Writer) error { var tmp2 [2]byte binary.LittleEndian.PutUint16(tmp2[:], sd.D) if _, err := w.Write(tmp2[:]); err != nil { return err } if _, err := sd.Omega.MarshalTo(w); err != nil { return err } for i := range sd.LagrangeBasis { if _, err := sd.LagrangeBasis[i].MarshalTo(w); err != nil { return err } } for i := range sd.Diff2 { if _, err := sd.Diff2[i].MarshalTo(w); err != nil { return err } } return nil } // read unmarshal func (sd *TrustedSetup) read(r io.Reader) error { var tmp2 [2]byte if _, err := r.Read(tmp2[:]); err != nil { return err } sd.init(binary.LittleEndian.Uint16(tmp2[:])) if _, err := sd.Omega.UnmarshalFrom(r); err != nil { return err } if !isRootOfUnity(sd.Suite, sd.Omega) { return errNotROU } for i := range sd.LagrangeBasis { if _, err := sd.LagrangeBasis[i].UnmarshalFrom(r); err != nil { return err } } for i := range sd.Diff2 { if _, err := sd.Diff2[i].UnmarshalFrom(r); err != nil { return err } } return nil } func (sd *TrustedSetup) ta(m, j int, ret kyber.Scalar) kyber.Scalar { if sd.precalc != nil { ret.Set(sd.precalc.ta[m][j]) return ret } sd.invsub(m, j, ret) ret.Mul(ret, sd.AprimeDomainI[m]) ret.Div(ret, sd.AprimeDomainI[j]) return ret } func (sd *TrustedSetup) tk(m int, ret kyber.Scalar) kyber.Scalar { if sd.precalc != nil { ret.Set(sd.precalc.tk[m]) return ret } ret.Zero() t := sd.Suite.G1().Scalar() for j := 0; j < int(sd.D); j++ { if j == m { continue } ret.Add(ret, sd.ta(m, j, t)) } return ret } func (sd *TrustedSetup) precalculate() { sd.precalc = &precalculated{ invsub: make([]kyber.Scalar, sd.D*2-1), ta: make([][]kyber.Scalar, sd.D), tk: make([]kyber.Scalar, sd.D), } for i := range sd.precalc.ta { sd.precalc.ta[i] = make([]kyber.Scalar, sd.D) } tj := sd.Suite.G1().Scalar() for j := 0; j < int(sd.D); j++ { tj.SetInt64(int64(j)) for m := 0; m < int(sd.D); m++ { if j == m { continue } idx := int(sd.D) - 1 + m - j sd.precalc.invsub[idx] = sd.Suite.G1().Scalar().SetInt64(int64(m)) sd.precalc.invsub[idx].Sub(sd.precalc.invsub[idx], tj) sd.precalc.invsub[idx].Inv(sd.precalc.invsub[idx]) } } for j := range sd.precalc.ta { for m := range sd.precalc.ta[j] { if m == j { continue } sd.precalc.ta[m][j] = sd.Suite.G1().Scalar().Set(sd.AprimeDomainI[m]) sd.precalc.ta[m][j].Div(sd.precalc.ta[m][j], sd.AprimeDomainI[j]) sd.precalc.ta[m][j].Mul(sd.precalc.ta[m][j], sd.invsub(m, j)) } } for m := range sd.precalc.tk { sd.precalc.tk[m] = sd.Suite.G1().Scalar().Zero() for j := range sd.precalc.ta[m] { if j == m { continue } sd.precalc.tk[m].Add(sd.precalc.tk[m], sd.precalc.ta[m][j]) } } } func (sd *TrustedSetup) invsub(m, j int, set ...kyber.Scalar) kyber.Scalar { if sd.precalc == nil { var ret kyber.Scalar if len(set) > 0 { ret = set[0] } else { ret = sd.Suite.G1().Scalar() } ret.Sub(sd.Domain[m], sd.Domain[j]) ret.Inv(ret) return ret } idx := int(sd.D) - 1 + m - j if len(set) > 0 { set[0].Set(sd.precalc.invsub[idx]) } return sd.precalc.invsub[idx] }
kzg/trusted.go
0.548432
0.42913
trusted.go
starcoder
package main import ( "fmt" gospec "github.com/AlaxLee/go-spec-util" ) func main() { representableExample01() representableExample02() } type Info struct { x string T string } func representableExample01() { /* x is representable by a value of T because x T 'a' byte 97 is in the set of byte values 97 rune rune is an alias for int32, and 97 is in the set of 32-bit integers "foo" string "foo" is in the set of string values 1024 int16 1024 is in the set of 16-bit integers 42.0 byte 42 is in the set of unsigned 8-bit integers 1e10 uint64 10000000000 is in the set of unsigned 64-bit integers 2.718281828459045 float32 2.718281828459045 rounds to 2.7182817 which is in the set of float32 values -1e-1000 float64 -1e-1000 rounds to IEEE -0.0 which is further simplified to 0.0 0i int 0 is an integer value (42 + 0i) float32 42.0 (with zero imaginary part) is in the set of float32 values */ infos := []Info{ {`'a'`, `byte`}, {`97`, `rune`}, {`"foo"`, `string`}, {`1024`, `int16`}, {`42.0`, `byte`}, {`1e10`, `uint64`}, {`2.718281828459045`, `float32`}, {`-1e-1000`, `float64`}, {`0i`, `int`}, {`(42 + 0i)`, `float32`}, } for _, v := range infos { code := fmt.Sprintf("type T %s; const x = %s", v.T, v.x) s := gospec.NewSpec(code) if s.Representable("x", "T") { fmt.Printf("%20s is representable by a value of %s\n", v.x, v.T) } } /* the output is: 'a' is representable by a value of byte 97 is representable by a value of rune "foo" is representable by a value of string 1024 is representable by a value of int16 42.0 is representable by a value of byte 1e10 is representable by a value of uint64 2.718281828459045 is representable by a value of float32 -1e-1000 is representable by a value of float64 0i is representable by a value of int (42 + 0i) is representable by a value of float32 */ for _, v := range infos { code := fmt.Sprintf("const x = %s", v.x) s := gospec.NewSpec(code) if s.Representable("x", v.T) { fmt.Printf("%20s is representable by a value of %s\n", v.x, v.T) } } /* the output is: 'a' is representable by a value of byte 97 is representable by a value of rune "foo" is representable by a value of string 1024 is representable by a value of int16 42.0 is representable by a value of byte 1e10 is representable by a value of uint64 2.718281828459045 is representable by a value of float32 -1e-1000 is representable by a value of float64 0i is representable by a value of int (42 + 0i) is representable by a value of float32 */ } func representableExample02() { /* x is not representable by a value of T because x T 0 bool 0 is not in the set of boolean values 'a' string 'a' is a rune, it is not in the set of string values 1024 byte 1024 is not in the set of unsigned 8-bit integers -1 uint16 -1 is not in the set of unsigned 16-bit integers 1.1 int 1.1 is not an integer value 42i float32 (0 + 42i) is not in the set of float32 values 1e1000 float64 1e1000 overflows to IEEE +Inf after rounding */ infos := []Info{ {`0`, `bool`}, {`'a'`, `string`}, {`1024`, `byte`}, {`-1`, `uint16`}, {`1.1`, `int`}, {`42i`, `float32`}, {`1e1000`, `float64`}, } for _, v := range infos { code := fmt.Sprintf("type T %s; const x = %s", v.T, v.x) s := gospec.NewSpec(code) if !s.Representable("x", "T") { fmt.Printf("%10s is not representable by a value of %s\n", v.x, v.T) } } /* the output is: 0 is not representable by a value of bool 'a' is not representable by a value of string 1024 is not representable by a value of byte -1 is not representable by a value of uint16 1.1 is not representable by a value of int 42i is not representable by a value of float32 1e1000 is not representable by a value of float64 */ for _, v := range infos { code := fmt.Sprintf("const x = %s", v.x) s := gospec.NewSpec(code) if !s.Representable("x", v.T) { fmt.Printf("%10s is not representable by a value of %s\n", v.x, v.T) } } /* the output is: 0 is not representable by a value of bool 'a' is not representable by a value of string 1024 is not representable by a value of byte -1 is not representable by a value of uint16 1.1 is not representable by a value of int 42i is not representable by a value of float32 1e1000 is not representable by a value of float64 */ }
tutorial/02-assignability/representable.go
0.681303
0.501221
representable.go
starcoder
package edgedbtypes // Optional represents a shape field that is not required. type Optional struct { isSet bool } // Missing returns true if the value is missing. func (o *Optional) Missing() bool { return !o.isSet } // SetMissing sets the structs missing status. true means missing and false // means present. func (o *Optional) SetMissing(missing bool) { o.isSet = !missing } // OptionalInt16 is an optional int16. Optional types must be used for out // parameters when a shape field is not required. type OptionalInt16 struct { val int16 isSet bool } // Get returns the value and a boolean indicating if the value is present. func (o *OptionalInt16) Get() (int16, bool) { return o.val, o.isSet } // Set sets the value. func (o *OptionalInt16) Set(val int16) { o.val = val o.isSet = true } // Unset marks the value as missing. func (o *OptionalInt16) Unset() { o.val = 0 o.isSet = false } // OptionalInt32 is an optional int32. Optional types must be used for out // parameters when a shape field is not required. type OptionalInt32 struct { val int32 isSet bool } // Get returns the value and a boolean indicating if the value is present. func (o *OptionalInt32) Get() (int32, bool) { return o.val, o.isSet } // Set sets the value. func (o *OptionalInt32) Set(val int32) { o.val = val o.isSet = true } // Unset marks the value as missing. func (o *OptionalInt32) Unset() { o.val = 0 o.isSet = false } // OptionalInt64 is an optional int64. Optional types must be used for out // parameters when a shape field is not required. type OptionalInt64 struct { val int64 isSet bool } // Get returns the value and a boolean indicating if the value is present. func (o *OptionalInt64) Get() (int64, bool) { return o.val, o.isSet } // Set sets the value. func (o *OptionalInt64) Set(val int64) *OptionalInt64 { o.val = val o.isSet = true return o } // Unset marks the value as missing. func (o *OptionalInt64) Unset() *OptionalInt64 { o.val = 0 o.isSet = false return o } // OptionalFloat32 is an optional float32. Optional types must be used for out // parameters when a shape field is not required. type OptionalFloat32 struct { val float32 isSet bool } // Get returns the value and a boolean indicating if the value is present. func (o *OptionalFloat32) Get() (float32, bool) { return o.val, o.isSet } // Set sets the value. func (o *OptionalFloat32) Set(val float32) { o.val = val o.isSet = true } // Unset marks the value as missing. func (o *OptionalFloat32) Unset() { o.val = 0 o.isSet = false } // OptionalFloat64 is an optional float64. Optional types must be used for out // parameters when a shape field is not required. type OptionalFloat64 struct { val float64 isSet bool } // Get returns the value and a boolean indicating if the value is present. func (o *OptionalFloat64) Get() (float64, bool) { return o.val, o.isSet } // Set sets the value. func (o *OptionalFloat64) Set(val float64) { o.val = val o.isSet = true } // Unset marks the value as missing. func (o *OptionalFloat64) Unset() { o.val = 0 o.isSet = false }
internal/edgedbtypes/numbers.go
0.933211
0.494385
numbers.go
starcoder
package athena type Operator struct { Column string `json:"column"` Operation operation `json:"operation"` Value OpValue `json:"value"` } type OpValue interface{} type operation string const ( EqualOp operation = "=" NotEqualOp operation = "!=" GreaterThanOp operation = ">" GreaterThanEqualToOp operation = ">=" LessThanOp operation = "<" LessThanEqualToOp operation = "<=" InOp operation = "in" NotInOp operation = "not in" LikeOp operation = "like" LimitOp operation = "limit" OrderOp operation = "order" SkipOp operation = "skip" OrOp operation = "or" AndOp operation = "and" ExistsOp operation = "exists" ) var AllOperations = []operation{ EqualOp, NotEqualOp, GreaterThanOp, GreaterThanEqualToOp, LessThanOp, LessThanEqualToOp, InOp, NotInOp, LikeOp, LimitOp, OrderOp, SkipOp, OrOp, AndOp, ExistsOp, } func (o operation) IsValid() bool { switch o { case EqualOp, NotEqualOp, GreaterThanOp, LessThanOp, GreaterThanEqualToOp, LessThanEqualToOp, InOp, NotInOp, LikeOp, LimitOp, OrderOp, SkipOp, OrOp, AndOp, ExistsOp: return true } return false } func (o operation) Value() string { return string(o) } func NewOperators(operators ...*Operator) []*Operator { return operators } func NewLikeOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: LikeOp, Value: value, } } func NewEqualOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: EqualOp, Value: value, } } func NewNotEqualOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: NotEqualOp, Value: value, } } func NewGreaterThanOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: GreaterThanOp, Value: value, } } func NewGreaterThanEqualToOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: GreaterThanEqualToOp, Value: value, } } func NewLessThanOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: LessThanOp, Value: value, } } func NewLessThanEqualToOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: LessThanEqualToOp, Value: value, } } type Sort int const ( SortAsc Sort = 1 SortDesc Sort = -1 ) var AllSort = []Sort{ SortAsc, SortDesc, } func (e Sort) IsValid() bool { switch e { case SortAsc, SortDesc: return true } return false } func (e Sort) Value() int { return int(e) } func NewOrderOperator(column string, sort Sort) *Operator { if !sort.IsValid() { return nil } return &Operator{ Column: column, Operation: OrderOp, Value: sort.Value(), } } func NewInOperator(column string, values interface{}) *Operator { return &Operator{ Column: column, Operation: InOp, Value: values, } } func NewNotInOperator(column string, value interface{}) *Operator { return &Operator{ Column: column, Operation: NotInOp, Value: value, } } func NewLimitOperator(value int64) *Operator { return &Operator{ Column: "", Operation: LimitOp, Value: value, } } func NewSkipOperator(value int64) *Operator { return &Operator{ Column: "", Operation: SkipOp, Value: value, } } func NewOrOperator(value ...*Operator) *Operator { return &Operator{ Column: "", Operation: OrOp, Value: value, } } func NewAndOperator(value ...*Operator) *Operator { return &Operator{ Column: "", Operation: AndOp, Value: value, } } func NewExistsOperator(column string, value bool) *Operator { return &Operator{ Column: column, Operation: ExistsOp, Value: value, } }
operators.go
0.757077
0.402099
operators.go
starcoder
package matrix var ( ident = Matrix3x3{ {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, } ) // Point3 is a point in 3-space. type Point3 [3]int func (p Point3) Sub(q Point3) Vector3 { return Vector3{q[0] - p[0], q[1] - p[1], q[2] - p[2]} } func (p Point3) Mul(rot Matrix3x3) Point3 { return Point3(Vector3(p).Mul(rot)) } func (p Point3) Offset(q Vector3) Point3 { return Point3{p[0] + q[0], p[1] + q[1], p[2] + q[2]} } // Vector3 is a vector in 3-space, or a difference between two points. type Vector3 [3]int func (v Vector3) Mul(rot Matrix3x3) Vector3 { return Vector3{ v[0]*rot[0][0] + v[1]*rot[1][0] + v[2]*rot[2][0], v[0]*rot[0][1] + v[1]*rot[1][1] + v[2]*rot[2][1], v[0]*rot[0][2] + v[1]*rot[1][2] + v[2]*rot[2][2], } } func (v Vector3) Eq(w Vector3) bool { return v[0] == w[0] && v[1] == w[1] && v[2] == w[2] } func (v Vector3) Add(w Vector3) Vector3 { return Vector3{v[0] + w[0], v[1] + w[1], v[2] + w[2]} } type Matrix3x3 [3][3]int func Ident() Matrix3x3 { return ident.Clone() } func (m Matrix3x3) Transpose() Matrix3x3 { rc := Matrix3x3{} for i := 0; i < 3; i++ { for j := 0; j < 3; j++ { rc[i][j] = m[j][i] } } return rc } func (m Matrix3x3) Clone() Matrix3x3 { rc := Matrix3x3{} for i := 0; i < 3; i++ { for j := 0; j < 3; j++ { rc[i][j] = m[i][j] } } return rc } func (m Matrix3x3) Mul(n Matrix3x3) Matrix3x3 { rc := Matrix3x3{} for i := 0; i < 3; i++ { for j := 0; j < 3; j++ { rc[i][j] = m.dot(n, i, j) } } return rc } func (m Matrix3x3) Pow(n int) Matrix3x3 { rc := ident for i := 0; i < n; i++ { rc = rc.Mul(m) } return rc } func (m Matrix3x3) dot(n Matrix3x3, i, j int) int { var rc int for k := 0; k < 3; k++ { rc += m[i][k] * n[k][j] } return rc } func AllRotations() []Matrix3x3 { if allRotations == nil { allRotations = calcAllRotations() } return allRotations } func Rotation(i int) Matrix3x3 { return AllRotations()[i] } var ( allRotations []Matrix3x3 ) func calcAllRotations() []Matrix3x3 { zRot := Matrix3x3{ // rotate around z axis {0, -1, 0}, {1, 0, 0}, {0, 0, 1}, } yRot := Matrix3x3{ //rotate around y axis {0, 0, -1}, {0, 1, 0}, {1, 0, 0}, } xRot := Matrix3x3{ // rotate around x axis {1, 0, 0}, {0, 0, -1}, {0, 1, 0}, } rotations := []Matrix3x3{ident, zRot, yRot, xRot} for x := 0; x < 4; x++ { for y := 0; y < 4; y++ { for z := 0; z < 4; z++ { val := xRot.Pow(x).Mul(yRot.Pow(y)).Mul(zRot.Pow(z)) var found bool for _, rot := range rotations { if rot == val { found = true break } } if !found { rotations = append(rotations, val) } } } } return rotations }
commongen/matrix/matrix.go
0.728265
0.670538
matrix.go
starcoder
package influxdbclient import "sort" type DataStat struct { Name string Min float64 Max float64 Mean float64 Median float64 Length int } type DataStats []DataStat func (ds *DataStats) FieldSort(field string) { switch field { case "name": sort.Sort(NameDataStats{*ds}) case "min": sort.Sort(MinDataStats{*ds}) case "max": sort.Sort(MaxDataStats{*ds}) case "median": sort.Sort(MedianDataStats{*ds}) default: sort.Sort(MeanDataStats{*ds}) } } func (slice DataStats) Len() int { return len(slice) } func (slice DataStats) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } type NameDataStats struct{ DataStats } func (slice NameDataStats) Less(i, j int) bool { return slice.DataStats[i].Name > slice.DataStats[j].Name } type MinDataStats struct{ DataStats } func (slice MinDataStats) Less(i, j int) bool { return slice.DataStats[i].Min > slice.DataStats[j].Min } type MaxDataStats struct{ DataStats } func (slice MaxDataStats) Less(i, j int) bool { return slice.DataStats[i].Max > slice.DataStats[j].Max } type MeanDataStats struct{ DataStats } func (slice MeanDataStats) Less(i, j int) bool { return slice.DataStats[i].Mean > slice.DataStats[j].Mean } type MedianDataStats struct{ DataStats } func (slice MedianDataStats) Less(i, j int) bool { return slice.DataStats[i].Median > slice.DataStats[j].Median } func Sum(data []float64) (sum float64) { for _, n := range data { sum += n } return sum } func Mean(data []float64) (mean float64) { sum := Sum(data) return sum / float64(len(data)) } func BuildStats(dsets []*DataSet) (stats DataStats) { for _, ds := range dsets { for name, data := range ds.Datas { length := len(data) //sorting data sort.Float64s(data) var stat DataStat if len(ds.Tags) > 0 { for _, tagValue := range ds.Tags { if len(stat.Name) > 0 { stat.Name = stat.Name + "_" } stat.Name = stat.Name + tagValue } } else { stat.Name = name } stat.Min = data[0] stat.Max = data[length-1] stat.Mean = Mean(data) stat.Length = length if length%2 == 0 { stat.Median = Mean(data[length/2-1 : length/2+1]) } else { stat.Median = float64(data[length/2]) } stats = append(stats, stat) } } return }
datastat.go
0.603348
0.48749
datastat.go
starcoder
package main import "fmt" func MaximumCrossingArray(array []int, start, middle, end int) (startIndex, endIndex, maximum int) { maxUInt := ^uint(0) // inverse of 0 maxInt := int(maxUInt >> 1) // one int to the right (because first bit is used for the sign) minInt := -maxInt - 1 // zero is part of the possitive numbers var leftMax = minInt var rightMax = minInt var leftTotal, rightTotal int for i := middle; i >= 0; i-- { leftTotal += array[i] if leftTotal > leftMax { leftMax = leftTotal startIndex = i } } for i := middle + 1; i <= end; i++ { rightTotal += array[i] if rightTotal > rightMax { rightMax = rightTotal endIndex = i } } maximum = leftMax + rightMax return startIndex, endIndex, maximum } func MaximumSubarray(input []int, start, end int) (startIndex, endIndex, maximum int) { if start == end { return start, end, input[startIndex] } middle := (start + end) / 2 leftStart, leftEnd, leftMax := MaximumSubarray(input, start, middle) rightStart, rightEnd, rightMax := MaximumSubarray(input, middle+1, end) crossStart, crossEnd, crossMax := MaximumCrossingArray(input, start, middle, end) if leftMax > rightMax && leftMax > crossMax { startIndex = leftStart endIndex = leftEnd maximum = leftMax } else if rightMax > leftMax && rightMax > crossMax { startIndex = rightStart endIndex = rightEnd maximum = rightMax } else { startIndex = crossStart endIndex = crossEnd maximum = crossMax } return startIndex, endIndex, maximum } func main() { stockPrices := []int{100, 113, 110, 85, 105, 102, 86, 63, 81, 101, 94, 106, 101, 79, 94, 90, 97} differences := make([]int, len(stockPrices)-1) for i := 1; i < len(stockPrices); i++ { differences[i-1] = stockPrices[i] - stockPrices[i-1] } fmt.Println("Stock Prices:", stockPrices) fmt.Println("Differences:", differences) fmt.Println("MaxCrossingArray Test:", wrap(MaximumCrossingArray(differences, 0, len(differences)/2, len(differences)-1))) fmt.Println("MaxSubArray:", wrap(MaximumSubarray(differences, 0, len(differences)-1))) var leetCodeArray = []int{-2, 1, -3, 4, -1, 2, 1, -5, 4} fmt.Println("MaxSubArray:", wrap(MaximumSubarray(leetCodeArray, 0, len(leetCodeArray)-1))) var leftMax = []int{100, 200, 5, -10, -20, -20, -15, -5, -12, -18} fmt.Println("MaxSubArray:", wrap(MaximumSubarray(leftMax, 0, len(leftMax)-1))) } func wrap(input ...interface{}) []interface{} { return input }
common_problems/maximum_subarray.go
0.718199
0.453806
maximum_subarray.go
starcoder
package types import ( "math/big" "github.com/vs-ude/tinyfyr/internal/errlog" "github.com/vs-ude/tinyfyr/internal/parser" ) // ExprType represents type information about an expression. // It is more powerful than type alone, because it can store values in case the expression is constant. // Furthermore, it exposes mutability and group specifiers by removing them from the Type hierarchy. type ExprType struct { // Instances of GroupedType or MutableType are removed for convenience and // factored into the PointerDestMutable and PointerDestGroup properties. Type Type // Mutable defines whether the value of the expression is mutable. Mutable bool Volatile bool // Unsafe is true if the value of the expression has been obtained via // dereferencing of an unsafe pointer. Unsafe bool // PointerDestMutable defines the mutability of the value being pointed to. // This is required, because the type system distinguishes between the mutability of a pointer // and the mutability of the value it is pointing to. PointerDestMutable bool // PointerDestStackOrder applies to the values being pointed to (or null if none was specified). // This is required, because a pointer on the stack belongs to a stack-group, // but it might point to an object of another stack frame. PointerDestStackOrder *StackOrder StringValue string RuneValue rune IntegerValue *big.Int FloatValue *big.Float BoolValue bool ArrayValue []*ExprType StructValue map[string]*ExprType FuncValue *Func NamespaceValue *Namespace TypeConversionValue TypeConversion // HasValue is true if one of the *Value properties holds a value. // This does not imply that the expression has a constant value, because // an ArrayValue may contain an ExprType that has no value. // Use IsConstant() to determine whether an expression is constant. HasValue bool } // TypeConversion ... type TypeConversion int const ( // ConvertStringToPointer ... ConvertStringToPointer TypeConversion = 1 + iota // ConvertPointerToPointer ... ConvertPointerToPointer // ConvertIntegerToPointer ... ConvertIntegerToPointer // ConvertPointerToInteger ... ConvertPointerToInteger // ConvertSliceToPointer ... ConvertSliceToPointer // ConvertPointerToSlice ... ConvertPointerToSlice // ConvertStringToByteSlice ... ConvertStringToByteSlice // ConvertPointerToString ... ConvertPointerToString // ConvertByteSliceToString ... ConvertByteSliceToString // ConvertIntegerToInteger ... ConvertIntegerToInteger // ConvertFloatToInteger ... ConvertFloatToInteger // ConvertBoolToInteger ... ConvertBoolToInteger // ConvertRuneToInteger ... ConvertRuneToInteger // ConvertIntegerToFloat ... ConvertIntegerToFloat // ConvertFloatToFloat ... ConvertFloatToFloat // ConvertIntegerToBool ... ConvertIntegerToBool // ConvertIntegerToRune ... ConvertIntegerToRune // ConvertIllegal ... ConvertIllegal ) // Clone ... func (et *ExprType) Clone() *ExprType { result := &ExprType{} result.Type = et.Type result.Mutable = et.Mutable result.Unsafe = et.Unsafe result.PointerDestMutable = et.PointerDestMutable result.Volatile = et.Volatile result.PointerDestStackOrder = et.PointerDestStackOrder result.StringValue = et.StringValue result.RuneValue = et.RuneValue result.IntegerValue = et.IntegerValue result.FloatValue = et.FloatValue result.BoolValue = et.BoolValue result.ArrayValue = et.ArrayValue result.StructValue = et.StructValue result.HasValue = et.HasValue return result } // IsConstant ... func (et *ExprType) IsConstant() bool { if !et.HasValue { return false } // Only null-slices are constants if _, ok := et.Type.(*SliceType); ok { if et.IntegerValue != nil { return true } return false } // Only null-pointers are constants if _, ok := et.Type.(*PointerType); ok { if et.IntegerValue != nil { return true } return false } if len(et.ArrayValue) != 0 { for _, a := range et.ArrayValue { if !a.IsConstant() { return false } } } else if len(et.StructValue) != 0 { for _, a := range et.ArrayValue { if !a.IsConstant() { return false } } } return true } // IsNullValue returns true if the expression is a null pointer or null slice. func (et *ExprType) IsNullValue() bool { if _, ok := et.Type.(*SliceType); ok { if et.IntegerValue != nil { return true } return false } if _, ok := et.Type.(*PointerType); ok { if et.IntegerValue != nil { return true } return false } if _, ok := et.Type.(*FuncType); ok { return et.FuncValue == nil } return false } // ToType ... func (et *ExprType) ToType() Type { t := et.Type if et.PointerDestMutable || et.Volatile { t = &MutableType{TypeBase: TypeBase{location: t.Location(), pkg: t.Package()}, Type: t, Mutable: et.PointerDestMutable, Volatile: et.Volatile} } if et.PointerDestStackOrder != nil { t = &StackOrderedType{TypeBase: TypeBase{location: t.Location(), pkg: t.Package()}, StackOrder: et.PointerDestStackOrder, Type: t} } return t } func exprType(n parser.Node) *ExprType { return n.TypeAnnotation().(*ExprType) } // NewExprType ... func NewExprType(t Type) *ExprType { return makeExprType(t) } // makeExprType sets the Type property. // If the Type `t` is MutableType or GroupType, these are dropped and PointerDestMutable/PointerDestGroup are set accordingly. func makeExprType(t Type) *ExprType { e := &ExprType{} for { switch t2 := t.(type) { case *MutableType: e.PointerDestMutable = t2.Mutable e.Volatile = t2.Volatile t = t2.Type continue case *StackOrderedType: e.PointerDestStackOrder = t2.StackOrder t = t2.Type continue } break } e.Type = t return e } // DeriveExprType acts like makeExprType. // However, before it analyzes `t`, it copies the Mutable, PointerDestMutable, Volatile and Unsafe properties from `et`. // For example if `et` is the type of an array expression and `t` is the type of the array elements, then DeriveExprType // can be used to derive the ExprType of array elements. func DeriveExprType(et *ExprType, t Type) *ExprType { e := &ExprType{Mutable: et.Mutable, PointerDestMutable: et.PointerDestMutable, Unsafe: et.Unsafe, Volatile: et.Volatile} for { switch t2 := t.(type) { case *MutableType: e.PointerDestMutable = t2.Mutable e.Volatile = t2.Volatile t = t2.Type continue case *StackOrderedType: e.PointerDestStackOrder = t2.StackOrder t = t2.Type continue } break } e.Type = t return e } // DerivePointerExprType acts like makeExprType. // The Mutable property is set to `et.PointerDestMutable`. // The PointerDestMutable property becomes true if `et.PointerDestMutable` is true and `t` is a MutableType. // For example if `et` is the type of a slice expression and `t` is the type of the slice elements, then DerivePointerExprType // can be used to derive the ExprType of slice elements. func DerivePointerExprType(et *ExprType, t Type) *ExprType { e := &ExprType{Mutable: et.PointerDestMutable} if pt, ok := GetPointerType(et.Type); ok && pt.Mode == PtrUnsafe { e.Unsafe = true } for { switch t2 := t.(type) { case *MutableType: e.PointerDestMutable = et.PointerDestMutable && t2.Mutable e.Volatile = t2.Volatile t = t2.Type continue case *StackOrderedType: e.PointerDestStackOrder = t2.StackOrder t = t2.Type continue } break } e.Type = t return e } func deriveAddressOfExprType(et *ExprType, loc errlog.LocationRange) *ExprType { e := &ExprType{Mutable: true, PointerDestMutable: et.Mutable, Volatile: et.Volatile} pt := &PointerType{TypeBase: TypeBase{location: loc}, ElementType: et.ToType()} if et.Unsafe { pt.Mode = PtrUnsafe } else { pt.Mode = PtrOwner } e.Type = pt return e } func deriveSliceOfExprType(et *ExprType, elementType Type, loc errlog.LocationRange) *ExprType { e := &ExprType{Mutable: true, PointerDestMutable: et.Mutable, Volatile: et.Volatile} e.Type = &SliceType{TypeBase: TypeBase{location: loc}, ElementType: elementType} return e } // copyExprType copies the type information from `src` to `dest`. // It does not copy values stored in ExprType. func copyExprType(dest *ExprType, src *ExprType) { dest.Type = src.Type dest.Mutable = src.Mutable dest.PointerDestStackOrder = src.PointerDestStackOrder dest.Volatile = src.Volatile dest.Unsafe = src.Unsafe dest.PointerDestMutable = src.PointerDestMutable } // CloneExprType copies the type information from `src` to `dest`. // It does not copy values stored in ExprType. func CloneExprType(src *ExprType) *ExprType { dest := &ExprType{} dest.Type = src.Type dest.Mutable = src.Mutable dest.PointerDestStackOrder = src.PointerDestStackOrder dest.PointerDestMutable = src.PointerDestMutable dest.Volatile = src.Volatile dest.Unsafe = src.Unsafe return dest } // Checks whether the type `t` can be instantiated. // For literal types, the function tries to deduce a default type. func checkInstantiableExprType(t *ExprType, s *Scope, loc errlog.LocationRange, log *errlog.ErrorLog) error { if t.Type == integerType { if t.IntegerValue.IsInt64() { i := t.IntegerValue.Int64() if i <= (1<<31)-1 && i >= -(1<<31) { t.Type = intType } else { t.Type = int64Type } } else if t.IntegerValue.IsUint64() { t.Type = uint64Type } else { log.AddError(errlog.ErrorNumberOutOfRange, loc, t.IntegerValue.String()) } } else if t.Type == floatType { if _, acc := t.FloatValue.Float64(); acc == big.Exact { if _, acc := t.FloatValue.Float32(); acc == big.Exact { t.Type = float32Type } else { t.Type = float64Type } } else { log.AddError(errlog.ErrorNumberOutOfRange, loc, t.FloatValue.String()) } } else if t.Type == nullType || t.Type == voidType || t.Type == structLiteralType || t.Type == namespaceType { // TODO: Use a better string representation of the type log.AddError(errlog.ErrorTypeCannotBeInstantiated, loc, t.Type.Name()) } else if t.Type == arrayLiteralType { if len(t.ArrayValue) == 0 { log.AddError(errlog.ErrorTypeCannotBeInstantiated, loc, t.Type.Name()) } if err := checkInstantiableExprType(t.ArrayValue[0], s, loc, log); err != nil { return err } for i := 1; i < len(t.ArrayValue); i++ { if needsTypeInference(t.ArrayValue[i]) { if err := inferType(t.ArrayValue[i], t.ArrayValue[0], false, loc, log); err != nil { return err } } else { if err := checkExprEqualType(t.ArrayValue[0], t.ArrayValue[i], Assignable, loc, log); err != nil { return err } } } t.Type = &ArrayType{TypeBase: TypeBase{location: loc}, Size: uint64(len(t.ArrayValue)), ElementType: t.ArrayValue[0].ToType()} } return nil } func needsTypeInference(t *ExprType) bool { return t.Type == floatType || t.Type == integerType || t.Type == nullType || t.Type == arrayLiteralType || t.Type == structLiteralType } func checkExprEqualType(tleft *ExprType, tright *ExprType, mode EqualTypeMode, loc errlog.LocationRange, log *errlog.ErrorLog) error { if needsTypeInference(tleft) && needsTypeInference(tright) { if mode == Assignable { panic("Cannot assign to a constant") } if tleft.Type == integerType && tright.Type == floatType { return inferType(tleft, &ExprType{Type: floatType}, false, loc, log) } if tleft.Type == floatType && tright.Type == integerType { return inferType(tright, &ExprType{Type: floatType}, false, loc, log) } if tleft.Type == tright.Type { return nil } return log.AddError(errlog.ErrorIncompatibleTypes, loc) } else if needsTypeInference(tleft) { return inferType(tleft, tright, false, loc, log) } else if needsTypeInference(tright) { return inferType(tright, tleft, false, loc, log) } if mode == Strict && tleft.PointerDestMutable != tright.PointerDestMutable { return log.AddError(errlog.ErrorIncompatibleTypes, loc) } else if tleft.PointerDestMutable && !tright.PointerDestMutable && mode == Assignable { return log.AddError(errlog.ErrorIncompatibleTypes, loc) } return checkEqualType(tleft.Type, tright.Type, mode, loc, log) } func inferType(et *ExprType, target *ExprType, nested bool, loc errlog.LocationRange, log *errlog.ErrorLog) error { tt := StripType(target.Type) if et.Type == integerType { if tt == integerType { return nil } else if tt == intType { et.Type = target.Type return checkIntegerBoundaries(et.IntegerValue, 32, loc, log) } else if tt == int8Type { et.Type = target.Type return checkIntegerBoundaries(et.IntegerValue, 8, loc, log) } else if tt == int16Type { et.Type = target.Type return checkIntegerBoundaries(et.IntegerValue, 16, loc, log) } else if tt == int32Type { et.Type = target.Type return checkIntegerBoundaries(et.IntegerValue, 32, loc, log) } else if tt == int64Type { et.Type = target.Type return checkIntegerBoundaries(et.IntegerValue, 64, loc, log) } else if tt == uintType { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 32, loc, log) } else if tt == uint8Type { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 8, loc, log) } else if tt == uint16Type { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 16, loc, log) } else if tt == uint32Type { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 32, loc, log) } else if tt == uint64Type { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 64, loc, log) } else if tt == runeType { et.Type = target.Type return checkUIntegerBoundaries(et.IntegerValue, 16, loc, log) } else if tt == uintptrType { et.Type = target.Type // TODO: The 64 depends on the target plaform return checkUIntegerBoundaries(et.IntegerValue, 64, loc, log) } else if tt == floatType || tt == float32Type || tt == float64Type { et.Type = target.Type et.FloatValue = big.NewFloat(0) et.FloatValue.SetInt(et.IntegerValue) et.IntegerValue = nil return nil } else if IsUnsafePointerType(tt) { // Convert an integer to an unsafe pointer et.Type = target.Type et.PointerDestMutable = target.PointerDestMutable et.Volatile = target.Volatile // TODO: The 64 depends on the target plaform return checkUIntegerBoundaries(et.IntegerValue, 64, loc, log) } } else if et.Type == floatType { if tt == floatType { return nil } else if tt == float32Type { et.Type = target.Type return nil } else if tt == float64Type { et.Type = target.Type return nil } } else if et.Type == nullType { if IsPointerType(tt) || IsSliceType(tt) || IsFuncType(tt) || IsStringType(tt) { copyExprType(et, target) return nil } } else if et.Type == arrayLiteralType { if s, ok := GetSliceType(tt); ok { tet := DerivePointerExprType(target, s.ElementType) for _, vet := range et.ArrayValue { if needsTypeInference(vet) { // TODO: loc is not the optimal location if err := inferType(vet, tet, true, loc, log); err != nil { return err } } else { if err := checkExprEqualType(tet, vet, Assignable, loc, log); err != nil { return err } } } copyExprType(et, target) // Do not use stack order on temporary values. et.PointerDestStackOrder = nil return nil } else if a, ok := GetArrayType(tt); ok { tet := DeriveExprType(target, a.ElementType) if len(et.ArrayValue) != 0 && uint64(len(et.ArrayValue)) != a.Size { return log.AddError(errlog.ErrorIncompatibleTypes, loc) } for _, vet := range et.ArrayValue { if needsTypeInference(vet) { // TODO: loc is not the optimal location if err := inferType(vet, tet, true, loc, log); err != nil { return err } } else { if err := checkExprEqualType(tet, vet, Assignable, loc, log); err != nil { return err } } } copyExprType(et, target) // Do not use stack order on temporary values. et.PointerDestStackOrder = nil return nil } } else if et.Type == structLiteralType { targetType := tt isPointer := false if ptr, ok := GetPointerType(tt); ok { isPointer = true targetType = ptr.ElementType } if s, ok := GetStructType(targetType); ok { for name, vet := range et.StructValue { found := false for _, f := range s.Fields { if f.Name == name { var tet *ExprType if isPointer { tet = DerivePointerExprType(target, f.Type) } else { tet = DeriveExprType(target, f.Type) } found = true if needsTypeInference(vet) { // TODO: loc is not the optimal location if err := inferType(vet, tet, true, loc, log); err != nil { return err } } else { if err := checkExprEqualType(tet, vet, Assignable, loc, log); err != nil { return err } } break } } if !found { return log.AddError(errlog.ErrorUnknownField, loc, name) } } copyExprType(et, target) // Do not use stack order on temporary values. et.PointerDestStackOrder = nil return nil } if s, ok := GetUnionType(targetType); ok { if len(et.StructValue) > 1 { return log.AddError(errlog.ErrorExcessiveUnionValue, loc) } for name, vet := range et.StructValue { found := false for _, f := range s.Fields { if f.Name == name { var tet *ExprType if isPointer { tet = DerivePointerExprType(target, f.Type) } else { tet = DeriveExprType(target, f.Type) } found = true if needsTypeInference(vet) { // TODO: loc is not the optimal location if err := inferType(vet, tet, true, loc, log); err != nil { return err } } else { if err := checkExprEqualType(tet, vet, Assignable, loc, log); err != nil { return err } } break } } if !found { return log.AddError(errlog.ErrorUnknownField, loc, name) } } copyExprType(et, target) // Do not use stack order on temporary values. et.PointerDestStackOrder = nil return nil } } return log.AddError(errlog.ErrorIncompatibleTypes, loc) } func checkExprIntType(et *ExprType, loc errlog.LocationRange, log *errlog.ErrorLog) error { target := &ExprType{Type: PrimitiveTypeInt} return checkExprEqualType(target, et, Assignable, loc, log) } func checkIntegerBoundaries(bigint *big.Int, bits uint, loc errlog.LocationRange, log *errlog.ErrorLog) error { if bigint.IsInt64() { i := bigint.Int64() if i <= (1<<(bits-1))-1 && i >= -(1<<(bits-1)) { return nil } } return log.AddError(errlog.ErrorNumberOutOfRange, loc, bigint.String()) } func checkUIntegerBoundaries(bigint *big.Int, bits uint, loc errlog.LocationRange, log *errlog.ErrorLog) error { if bigint.IsUint64() { i := bigint.Uint64() if i <= (1<<(bits))-1 { return nil } } return log.AddError(errlog.ErrorNumberOutOfRange, loc, bigint.String()) } func checkExprStringType(et *ExprType, loc errlog.LocationRange, log *errlog.ErrorLog) error { target := &ExprType{Type: PrimitiveTypeString} return checkExprEqualType(target, et, Assignable, loc, log) }
internal/types/exprtype.go
0.655667
0.541833
exprtype.go
starcoder
package helpers import ( cryptoRand "crypto/rand" "fmt" "io" "math/rand" "reflect" "strconv" "time" "github.com/spf13/cast" ) func Empty(val interface{}) bool { if val == nil { return true } v := reflect.ValueOf(val) switch v.Kind() { case reflect.String, reflect.Array: return v.Len() == 0 case reflect.Map, reflect.Slice: return v.Len() == 0 || v.IsNil() case reflect.Bool: return !v.Bool() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() == 0 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return v.Uint() == 0 case reflect.Float32, reflect.Float64: return v.Float() == 0 case reflect.Interface, reflect.Ptr: return v.IsNil() } return reflect.DeepEqual(val, reflect.Zero(v.Type()).Interface()) } func MicrosecondsStr(elapsed time.Duration) string { return fmt.Sprintf("%.3fms", float64(elapsed.Nanoseconds())/1e6) } func RandomNumber(length int) string { table := [...]byte{'1', '2', '3', '4', '5', '6', '7', '8', '9', '0'} b := make([]byte, length) n, err := io.ReadAtLeast(cryptoRand.Reader, b, length) if n != length { panic(err) } for i := 0; i < len(b); i++ { b[i] = table[int(b[i])%len(table)] } return string(b) } func RandomNumberRange(min float64, max float64, precision int) float64 { rand.Seed(time.Now().UnixNano()) data := min + rand.Float64()*(max-min) value := strconv.FormatFloat(data, 'f', precision, 64) return cast.ToFloat64(value) } func RandomWeightedIndex(weights []float32) int { if len(weights) == 1 { return 0 } var sum float32 = 0.0 for _, w := range weights { sum += w } rand.Seed(time.Now().UnixNano()) r := rand.Float32() * sum var t float32 = 0.0 for i, w := range weights { t += w if t > r { return i } } return len(weights) - 1 } func FirstElement(args []string) string { if len(args) > 0 { return args[0] } return "" } func RandomString(length int) string { rand.Seed(time.Now().UnixNano()) letters := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" b := make([]byte, length) for i := range b { b[i] = letters[rand.Intn(len(letters))] } return string(b) } func Contains(s []string, str string) bool { for _, v := range s { if v == str { return true } } return false }
helpers/helpers.go
0.647687
0.404978
helpers.go
starcoder
package opt import ( "github.com/cockroachdb/cockroach/pkg/util" "github.com/cockroachdb/errors" ) // ColSet efficiently stores an unordered set of column ids. type ColSet struct { set util.FastIntSet } // MakeColSet returns a set initialized with the given values. func MakeColSet(vals ...ColumnID) ColSet { var res ColSet for _, v := range vals { res.Add(v) } return res } // Add adds a column to the set. No-op if the column is already in the set. func (s *ColSet) Add(col ColumnID) { s.set.Add(int(col)) } // Remove removes a column from the set. No-op if the column is not in the set. func (s *ColSet) Remove(col ColumnID) { s.set.Remove(int(col)) } // Contains returns true if the set contains the column. func (s ColSet) Contains(col ColumnID) bool { return s.set.Contains(int(col)) } // Empty returns true if the set is empty. func (s ColSet) Empty() bool { return s.set.Empty() } // Len returns the number of the columns in the set. func (s ColSet) Len() int { return s.set.Len() } // Next returns the first value in the set which is >= startVal. If there is no // such column, the second return value is false. func (s ColSet) Next(startVal ColumnID) (ColumnID, bool) { c, ok := s.set.Next(int(startVal)) return ColumnID(c), ok } // ForEach calls a function for each column in the set (in increasing order). func (s ColSet) ForEach(f func(col ColumnID)) { s.set.ForEach(func(i int) { f(ColumnID(i)) }) } // Copy returns a copy of s which can be modified independently. func (s ColSet) Copy() ColSet { return ColSet{set: s.set.Copy()} } // UnionWith adds all the columns from rhs to this set. func (s *ColSet) UnionWith(rhs ColSet) { s.set.UnionWith(rhs.set) } // Union returns the union of s and rhs as a new set. func (s ColSet) Union(rhs ColSet) ColSet { return ColSet{set: s.set.Union(rhs.set)} } // IntersectionWith removes any columns not in rhs from this set. func (s *ColSet) IntersectionWith(rhs ColSet) { s.set.IntersectionWith(rhs.set) } // Intersection returns the intersection of s and rhs as a new set. func (s ColSet) Intersection(rhs ColSet) ColSet { return ColSet{set: s.set.Intersection(rhs.set)} } // DifferenceWith removes any elements in rhs from this set. func (s *ColSet) DifferenceWith(rhs ColSet) { s.set.DifferenceWith(rhs.set) } // Difference returns the elements of s that are not in rhs as a new set. func (s ColSet) Difference(rhs ColSet) ColSet { return ColSet{set: s.set.Difference(rhs.set)} } // Intersects returns true if s has any elements in common with rhs. func (s ColSet) Intersects(rhs ColSet) bool { return s.set.Intersects(rhs.set) } // Equals returns true if the two sets are identical. func (s ColSet) Equals(rhs ColSet) bool { return s.set.Equals(rhs.set) } // SubsetOf returns true if rhs contains all the elements in s. func (s ColSet) SubsetOf(rhs ColSet) bool { return s.set.SubsetOf(rhs.set) } // String returns a list representation of elements. Sequential runs of positive // numbers are shown as ranges. For example, for the set {1, 2, 3 5, 6, 10}, // the output is "(1-3,5,6,10)". func (s ColSet) String() string { return s.set.String() } // SingleColumn returns the single column in s. Panics if s does not contain // exactly one column. func (s ColSet) SingleColumn() ColumnID { if s.Len() != 1 { panic(errors.AssertionFailedf("expected a single column but found %d columns", s.Len())) } col, _ := s.Next(0) return col }
pkg/sql/opt/colset.go
0.851537
0.456168
colset.go
starcoder
package simplersa import ( "bytes" "crypto/rand" "fmt" "math/big" ) // PublicKey is the public part of an RSA key pair. type PublicKey struct { N *big.Int E *big.Int } // PrivateKey is the private part of an RSA key pair. According to RFC 2313 we // could include the prime factors of N and other data here to make decryption // faster, but N and D are sufficient for decrypting messages. type PrivateKey struct { N *big.Int D *big.Int } // GenerateKeys generates a public/private key pair for RSA // encryption/decryption with the given bitlen. See RFC 2313 section 6. func GenerateKeys(bitlen int) (*PublicKey, *PrivateKey, error) { numRetries := 0 for { numRetries++ if numRetries == 10 { panic("retrying too many times, something is wrong") } // We need a result pq with b bits, so we generate p and q with b/2 bits // each. If the top bit of p and q are set, the result will have b bits. // Otherwise, we'll retry. rand.Prime should return primes with their top // bit set, so in practice there will be no retries. p, err := rand.Prime(rand.Reader, bitlen/2) if err != nil { return nil, nil, err } q, err := rand.Prime(rand.Reader, bitlen/2) if err != nil { return nil, nil, err } // n is pq n := new(big.Int).Set(p) n.Mul(n, q) if n.BitLen() != bitlen { continue } // theta(n) = (p-1)(q-1) p.Sub(p, big.NewInt(1)) q.Sub(q, big.NewInt(1)) totient := new(big.Int).Set(p) totient.Mul(totient, q) // e as recommended by PKCS#1 (RFC 2313) e := big.NewInt(65537) // Calculate the modular multiplicative inverse of e such that: // de = 1 (mod totient) // If gcd(e, totient)=1, then e is guaranteed to have a unique inverse, but // since p-1 or q-1 could theoretically have e as a factor, this may fail // once in a while (likely to be exceedingly rare). d := new(big.Int).ModInverse(e, totient) if d == nil { continue } pub := &PublicKey{N: n, E: e} priv := &PrivateKey{N: n, D: d} return pub, priv, nil } } // encrypt performs encryption of the message m using a public key, and returns // the encrypted cipher. Encoding the message as a big.Int is the caller's // responsibility. func encrypt(pub *PublicKey, m *big.Int) *big.Int { c := new(big.Int) c.Exp(m, pub.E, pub.N) return c } // decrypt performs decryption of the cipher c using a private key, and returns // the decrypted message. func decrypt(priv *PrivateKey, c *big.Int) *big.Int { m := new(big.Int) m.Exp(c, priv.D, priv.N) return m } // EncryptRSA encrypts the message m using public key pub and returns the // encrypted bytes. The length of m must be <= size_in_bytes(pub.N) - 11, // otherwise an error is returned. The encryption block format is based on // PKCS #1 v1.5 (RFC 2313). func EncryptRSA(pub *PublicKey, m []byte) ([]byte, error) { // Compute length of key in bytes, rounding up. keyLen := (pub.N.BitLen() + 7) / 8 if len(m) > keyLen-11 { return nil, fmt.Errorf("len(m)=%v, too long", len(m)) } // Following RFC 2313, using block type 02 as recommended for encryption: // EB = 00 || 02 || PS || 00 || D psLen := keyLen - len(m) - 3 eb := make([]byte, keyLen) eb[0] = 0x00 eb[1] = 0x02 // Fill PS with random non-zero bytes. for i := 2; i < 2+psLen; { _, err := rand.Read(eb[i : i+1]) if err != nil { return nil, err } if eb[i] != 0x00 { i++ } } eb[2+psLen] = 0x00 // Copy the message m into the rest of the encryption block. copy(eb[3+psLen:], m) // Now the encryption block is complete; we take it as a m-byte big.Int and // RSA-encrypt it with the public key. mnum := new(big.Int).SetBytes(eb) c := encrypt(pub, mnum) // The result is a big.Int, which we want to convert to a byte slice of // length keyLen. It's highly likely that the size of c in bytes is keyLen, // but in rare cases we may need to pad it on the left with zeros (this only // happens if the whole MSB of c is zeros, meaning that it's more than 256 // times smaller than the modulus). padLen := keyLen - len(c.Bytes()) for i := 0; i < padLen; i++ { eb[i] = 0x00 } copy(eb[padLen:], c.Bytes()) return eb, nil } // DecryptRSA decrypts the message c using private key priv and returns the // decrypted bytes, based on block 02 from PKCS #1 v1.5 (RCS 2313). // It expects the length in bytes of the private key modulo to be len(eb). // Important: this is a simple implementation not designed to be resilient to // timing attacks. func DecryptRSA(priv *PrivateKey, c []byte) ([]byte, error) { keyLen := (priv.N.BitLen() + 7) / 8 if len(c) != keyLen { return nil, fmt.Errorf("len(c)=%v, want keyLen=%v", len(c), keyLen) } // Convert c into a bit.Int and decrypt it using the private key. cnum := new(big.Int).SetBytes(c) mnum := decrypt(priv, cnum) // Write the bytes of mnum into m, left-padding if needed. m := make([]byte, keyLen) copy(m[keyLen-len(mnum.Bytes()):], mnum.Bytes()) // Expect proper block 02 beginning. if m[0] != 0x00 { return nil, fmt.Errorf("m[0]=%v, want 0x00", m[0]) } if m[1] != 0x02 { return nil, fmt.Errorf("m[1]=%v, want 0x02", m[1]) } // Skip over random padding until a 0x00 byte is reached. +2 adjusts the index // back to the full slice. endPad := bytes.IndexByte(m[2:], 0x00) + 2 if endPad < 2 { return nil, fmt.Errorf("end of padding not found") } return m[endPad+1:], nil }
2019/rsa/simplersa.go
0.633183
0.429728
simplersa.go
starcoder
package camera import ( "github.com/anthonyrego/gosmf/shader" "github.com/go-gl/gl/v4.1-core/gl" "github.com/go-gl/mathgl/mgl32" ) var state struct { activeCam *Camera } func init() { state.activeCam = &Camera{} } // GetActiveCamera get the current camera in the state func GetActiveCamera() *Camera { return state.activeCam } // Camera type for camera making type Camera struct { Bounds mgl32.Vec3 projection mgl32.Mat4 viewMatrix mgl32.Mat4 zDepth float32 } // New creates a new camera // Set setActive to true if you want to have it start as the render camera. func New(setActive bool) *Camera { cam := Camera{} if setActive { cam.SetActive() } return &cam } // SetActive will set current camera as the render camera func (cam *Camera) SetActive() { state.activeCam = cam if s := shader.GetActive(); s != nil { gl.UniformMatrix4fv(s.GetUniform("projection"), 1, false, &cam.projection[0]) gl.UniformMatrix4fv(s.GetUniform("camera"), 1, false, &cam.viewMatrix[0]) } } // SetOrtho will set the camera to an orthogonal projection func (cam *Camera) SetOrtho(w int, h int, zDepth int) { cam.zDepth = float32(zDepth) cam.Bounds = mgl32.Vec3{float32(w), float32(h), float32(zDepth)} cam.projection = mgl32.Ortho(0, float32(w), float32(h), 0, -1, cam.zDepth) cam.update() } // SetPerspective set a perspective projection func (cam *Camera) SetPerspective(angle float32, w int, h int, zDepth int) { cam.zDepth = float32(zDepth) cam.Bounds = mgl32.Vec3{float32(w), float32(h), float32(zDepth)} cam.projection = mgl32.Perspective(mgl32.DegToRad(angle), float32(w)/float32(h), 0.1, float32(zDepth)) cam.update() } // SetPosition2D will adjust the camera for ortho viewing to specified location func (cam *Camera) SetPosition2D(x float32, y float32) { cam.viewMatrix = mgl32.LookAtV(mgl32.Vec3{x, y, cam.zDepth}, mgl32.Vec3{x, y, 0}, mgl32.Vec3{0, 1, 0}) cam.update() } // SetViewMatrix will set the lookAt and up vector as well as the position of the camera func (cam *Camera) SetViewMatrix(position mgl32.Vec3, lookAt mgl32.Vec3, up mgl32.Vec3) { cam.viewMatrix = mgl32.LookAtV(position, lookAt, up) cam.update() } // GetViewMatrix will get matrix for the camera func (cam *Camera) GetViewMatrix() mgl32.Mat4 { return cam.viewMatrix } // GetProjectionMatrix will get projection matrix for the camera func (cam *Camera) GetProjectionMatrix() mgl32.Mat4 { return cam.projection } func (cam *Camera) update() { if s := shader.GetActive(); s != nil && cam == state.activeCam { gl.UniformMatrix4fv(s.GetUniform("projection"), 1, false, &cam.projection[0]) gl.UniformMatrix4fv(s.GetUniform("camera"), 1, false, &cam.viewMatrix[0]) } }
camera/camera.go
0.830388
0.436562
camera.go
starcoder
package msgraph // RiskEventType undocumented type RiskEventType int const ( // RiskEventTypeVUnlikelyTravel undocumented RiskEventTypeVUnlikelyTravel RiskEventType = 0 // RiskEventTypeVAnonymizedIPAddress undocumented RiskEventTypeVAnonymizedIPAddress RiskEventType = 1 // RiskEventTypeVMaliciousIPAddress undocumented RiskEventTypeVMaliciousIPAddress RiskEventType = 2 // RiskEventTypeVUnfamiliarFeatures undocumented RiskEventTypeVUnfamiliarFeatures RiskEventType = 3 // RiskEventTypeVMalwareInfectedIPAddress undocumented RiskEventTypeVMalwareInfectedIPAddress RiskEventType = 4 // RiskEventTypeVSuspiciousIPAddress undocumented RiskEventTypeVSuspiciousIPAddress RiskEventType = 5 // RiskEventTypeVLeakedCredentials undocumented RiskEventTypeVLeakedCredentials RiskEventType = 6 // RiskEventTypeVInvestigationsThreatIntelligence undocumented RiskEventTypeVInvestigationsThreatIntelligence RiskEventType = 7 // RiskEventTypeVGeneric undocumented RiskEventTypeVGeneric RiskEventType = 8 // RiskEventTypeVAdminConfirmedUserCompromised undocumented RiskEventTypeVAdminConfirmedUserCompromised RiskEventType = 9 // RiskEventTypeVMcasImpossibleTravel undocumented RiskEventTypeVMcasImpossibleTravel RiskEventType = 10 // RiskEventTypeVMcasSuspiciousInboxManipulationRules undocumented RiskEventTypeVMcasSuspiciousInboxManipulationRules RiskEventType = 11 // RiskEventTypeVInvestigationsThreatIntelligenceSigninLinked undocumented RiskEventTypeVInvestigationsThreatIntelligenceSigninLinked RiskEventType = 12 // RiskEventTypeVMaliciousIPAddressValidCredentialsBlockedIP undocumented RiskEventTypeVMaliciousIPAddressValidCredentialsBlockedIP RiskEventType = 13 // RiskEventTypeVUnknownFutureValue undocumented RiskEventTypeVUnknownFutureValue RiskEventType = 14 ) // RiskEventTypePUnlikelyTravel returns a pointer to RiskEventTypeVUnlikelyTravel func RiskEventTypePUnlikelyTravel() *RiskEventType { v := RiskEventTypeVUnlikelyTravel return &v } // RiskEventTypePAnonymizedIPAddress returns a pointer to RiskEventTypeVAnonymizedIPAddress func RiskEventTypePAnonymizedIPAddress() *RiskEventType { v := RiskEventTypeVAnonymizedIPAddress return &v } // RiskEventTypePMaliciousIPAddress returns a pointer to RiskEventTypeVMaliciousIPAddress func RiskEventTypePMaliciousIPAddress() *RiskEventType { v := RiskEventTypeVMaliciousIPAddress return &v } // RiskEventTypePUnfamiliarFeatures returns a pointer to RiskEventTypeVUnfamiliarFeatures func RiskEventTypePUnfamiliarFeatures() *RiskEventType { v := RiskEventTypeVUnfamiliarFeatures return &v } // RiskEventTypePMalwareInfectedIPAddress returns a pointer to RiskEventTypeVMalwareInfectedIPAddress func RiskEventTypePMalwareInfectedIPAddress() *RiskEventType { v := RiskEventTypeVMalwareInfectedIPAddress return &v } // RiskEventTypePSuspiciousIPAddress returns a pointer to RiskEventTypeVSuspiciousIPAddress func RiskEventTypePSuspiciousIPAddress() *RiskEventType { v := RiskEventTypeVSuspiciousIPAddress return &v } // RiskEventTypePLeakedCredentials returns a pointer to RiskEventTypeVLeakedCredentials func RiskEventTypePLeakedCredentials() *RiskEventType { v := RiskEventTypeVLeakedCredentials return &v } // RiskEventTypePInvestigationsThreatIntelligence returns a pointer to RiskEventTypeVInvestigationsThreatIntelligence func RiskEventTypePInvestigationsThreatIntelligence() *RiskEventType { v := RiskEventTypeVInvestigationsThreatIntelligence return &v } // RiskEventTypePGeneric returns a pointer to RiskEventTypeVGeneric func RiskEventTypePGeneric() *RiskEventType { v := RiskEventTypeVGeneric return &v } // RiskEventTypePAdminConfirmedUserCompromised returns a pointer to RiskEventTypeVAdminConfirmedUserCompromised func RiskEventTypePAdminConfirmedUserCompromised() *RiskEventType { v := RiskEventTypeVAdminConfirmedUserCompromised return &v } // RiskEventTypePMcasImpossibleTravel returns a pointer to RiskEventTypeVMcasImpossibleTravel func RiskEventTypePMcasImpossibleTravel() *RiskEventType { v := RiskEventTypeVMcasImpossibleTravel return &v } // RiskEventTypePMcasSuspiciousInboxManipulationRules returns a pointer to RiskEventTypeVMcasSuspiciousInboxManipulationRules func RiskEventTypePMcasSuspiciousInboxManipulationRules() *RiskEventType { v := RiskEventTypeVMcasSuspiciousInboxManipulationRules return &v } // RiskEventTypePInvestigationsThreatIntelligenceSigninLinked returns a pointer to RiskEventTypeVInvestigationsThreatIntelligenceSigninLinked func RiskEventTypePInvestigationsThreatIntelligenceSigninLinked() *RiskEventType { v := RiskEventTypeVInvestigationsThreatIntelligenceSigninLinked return &v } // RiskEventTypePMaliciousIPAddressValidCredentialsBlockedIP returns a pointer to RiskEventTypeVMaliciousIPAddressValidCredentialsBlockedIP func RiskEventTypePMaliciousIPAddressValidCredentialsBlockedIP() *RiskEventType { v := RiskEventTypeVMaliciousIPAddressValidCredentialsBlockedIP return &v } // RiskEventTypePUnknownFutureValue returns a pointer to RiskEventTypeVUnknownFutureValue func RiskEventTypePUnknownFutureValue() *RiskEventType { v := RiskEventTypeVUnknownFutureValue return &v }
v1.0/RiskEventTypeEnum.go
0.601828
0.41253
RiskEventTypeEnum.go
starcoder
package velocypack import ( "fmt" "io" ) // vpackAssert panics if v is false. func vpackAssert(v bool) { if !v { panic("VELOCYPACK_ASSERT failed") } } // readBytes reads bytes from the given reader until the given slice is full. func readBytes(dst []byte, r io.Reader) error { offset := 0 l := len(dst) for { n, err := r.Read(dst[offset:]) offset += n l -= n if l == 0 { // We're done return nil } if err != nil { return WithStack(err) } } } // read an unsigned little endian integer value of the // specified length, starting at the specified byte offset func readIntegerFixed(start []byte, length uint) uint64 { return readIntegerNonEmpty(start, length) } // read an unsigned little endian integer value of the // specified length, starting at the specified byte offset func readIntegerFixedFromReader(r io.Reader, length uint) (uint64, []byte, error) { buf := make([]byte, length) if err := readBytes(buf, r); err != nil { return 0, nil, WithStack(err) } return readIntegerFixed(buf, length), buf, nil } // read an unsigned little endian integer value of the // specified length, starting at the specified byte offset func readIntegerNonEmpty(s []byte, length uint) uint64 { x := uint(0) v := uint64(0) for i := uint(0); i < length; i++ { v += uint64(s[i]) << x x += 8 } return v } // read an unsigned little endian integer value of the // specified length, starting at the specified byte offset func readIntegerNonEmptyFromReader(r io.Reader, length uint) (uint64, []byte, error) { buf := make([]byte, length) if err := readBytes(buf, r); err != nil { return 0, nil, WithStack(err) } return readIntegerNonEmpty(buf, length), buf, nil } func toInt64(v uint64) int64 { shift2 := uint64(1) << 63 shift := int64(shift2 - 1) if v >= shift2 { return (int64(v-shift2) - shift) - 1 } else { return int64(v) } } func toUInt64(v int64) uint64 { // If v is negative, we need to add 2^63 to make it positive, // before we can cast it to an uint64_t: if v >= 0 { return uint64(v) } shift2 := uint64(1) << 63 shift := int64(shift2 - 1) return uint64((v+shift)+1) + shift2 // return v >= 0 ? static_cast<uint64_t>(v) // : static_cast<uint64_t>((v + shift) + 1) + shift2; // Note that g++ and clang++ with -O3 compile this away to // nothing. Further note that a plain cast from int64_t to // uint64_t is not guaranteed to work for negative values! } // read a variable length integer in unsigned LEB128 format func readVariableValueLength(source []byte, offset ValueLength, reverse bool) ValueLength { length := ValueLength(0) p := uint(0) for { v := ValueLength(source[offset]) length += (v & 0x7f) << p p += 7 if reverse { offset-- } else { offset++ } if v&0x80 == 0 { break } } return length } // read a variable length integer in unsigned LEB128 format func readVariableValueLengthFromReader(r io.Reader, reverse bool) (ValueLength, []byte, error) { if reverse { return 0, nil, WithStack(fmt.Errorf("reverse is not supported")) } length := ValueLength(0) p := uint(0) buf := make([]byte, 1) bytes := make([]byte, 0, 8) for { if n, err := r.Read(buf); n != 1 { if err != nil { return 0, nil, WithStack(err) } else { return 0, nil, WithStack(fmt.Errorf("failed to read 1 byte")) } } bytes = append(bytes, buf[0]) v := ValueLength(buf[0]) length += (v & 0x7f) << p p += 7 if v&0x80 == 0 { break } } return length, bytes, nil } // store a variable length integer in unsigned LEB128 format func storeVariableValueLength(dst []byte, offset, value ValueLength, reverse bool) { vpackAssert(value > 0) idx := offset if reverse { for value >= 0x80 { dst[idx] = byte(value | 0x80) idx-- value >>= 7 } dst[idx] = byte(value & 0x7f) } else { for value >= 0x80 { dst[idx] = byte(value | 0x80) idx++ value >>= 7 } dst[idx] = byte(value & 0x7f) } } // optionalBool returns the first arg element if available, otherwise returns defaultValue. func optionalBool(arg []bool, defaultValue bool) bool { if len(arg) == 0 { return defaultValue } return arg[0] } // alignAt returns the first number >= value that is aligned at the given alignment. // alignment must be a power of 2. func alignAt(value, alignment uint) uint { mask := ^(alignment - 1) return (value + alignment - 1) & mask }
deps/github.com/arangodb/go-velocypack/util.go
0.7237
0.416441
util.go
starcoder
package mysql_nodejs import ( . "github.com/gocircuit/circuit/gocircuit.org/render" ) func RenderBoot() string { return RenderHtml("Boot the circuit cluster", Render(bootBody, nil)) } const bootBody = ` <h1>Boot the circuit cluster</h1> <p>Booting the cluster involves starting your hosts, running the circuit on each of them, and making sure that all circuit daemons are connected. <p>For the tutorial we need two hosts ideally, but you can start any desired number 1, 2, 3, … <h3>Start the host instance</h3> <p>Begin by starting a new EC2 instance, using the image <a href="tutorial-mysql-nodejs-image.html">we created</a>. <p>This and all subsequent instances will belong to the same virtual private network inside EC2. EC2 will, as default, give two IP addresses to each host — one private, one public. <p>Host instances on your network will be able to connect to all ports on other host instances, using their private IP address. Hosts outside EC2 can connect to a restricted set of ports on the public IP addresses. <p>When configuring the first host instance before launch, in addition to the requisite SSH port, make sure to leave open another TCP port (say 11022) if you would like to be able to connect to the circuit directly from your notebook. These configurations are accomplished in the “security group” section on EC2. <h3>Start the circuit server</h3> <p>Once the host instance is running, connect into it using SSH. <p>Discover the private address of the EC2 host instance, and save it into a variable: <pre> # ip_address=` + "`" + `curl http://169.254.169.254/latest/meta-data/local-ipv4` + "`" + ` </pre> <p>Start the circuit server, instructing it to listen on the private IP address of this host, on port 11022. (The port choice is arbitrary.) <pre> # circuit start -a ${ip_address}:11022 1> /var/circuit/address 2> /var/circuit/log & </pre> <p>When the server starts it will print its circuit address (a URL-type string that looks like <code>circuit://…</code>) to standard output and we save this into the host-local file <code>/var/circuit/address</code> for future use. The server logs all commands and other events that happen to it to standard error. Respectively, we redirect it to a host-local log file named <code>/var/circuit/log</code>. <p>This start-up procedure can be summarized in a shell script, which you can locate in the source repo at <pre> $GOPATH/src/github.com/gocircuit/circuit/tutorial/ec2/start-first-server.sh </pre> <p>Note that we are starting the circuit server as a singleton server, without specifying an automatic method for discovering other servers. This is because Amazon EC2 does not support Multicast UDP, which is needed for automatic server-server discovery. <p>Launch the remaining hosts and their respective circuit servers in the manner described here. Naturally if you are building a production system, you would preconfigure the host image to start the circuit server automatically upon booting. <p>Suppose you launch a total of three hosts using this method. You now have three hosts with circuit server running on each. At this stage each circuit server is a singleton member in a network of one. Our next step will be to join all three of them in a single network of three circuit servers. <h3>Connect circuit servers into a single cluster</h3> <p>Obtain the circuit address of each of the running circuit servers. On any given host you can accomplish this by saying <pre> # cat /var/circuit/address </pre> <p>You should expect to see a URL-type string that looks like this in return: <pre> circuit://172.16.31.10:11022/12477/Q15d828c92f4c90ff </pre> <p>This URL uniquely identifies the circuit server process that created it, as well as describes how to connect to the server. <p>Next we are going to use the circuit client to log into one of the servers, say <code>host1</code>, and instruct it to join the network of each of the other two servers. Note that joining is a symmetric operation. If server A is instructed to join server B, the result is that the networks that A and B are part of are merged into one. (And if A and B were already part of the same network, no change occurs.) <p>Log into <code>host1</code> and, for convenience, save the circuit address of the local server into a shell variable <code>H1</code>: <pre> host1# H1=$(cat /var/circuit/address) </pre> <p>Use the circuit client to connect into the local circuit server and list all servers that it sees in its own network: <pre> host1# circuit ls -d $H1 / </pre> <p>You should see a single member — the circuit server running on <code>host1</code> itself — listed by its name in the circuit's virtual file system. The output should look something like: <pre> /Xfea8b5b798f2fc09 </pre> <p>Place the circuit addresses of the other two hosts, whatever they might be in your case, in the variables <code>H2</code> and <code>H3</code>. Using the circuit client again, instruct the local circuit server (the on <code>host1</code>) to join the networks of the other two: <pre> host1# circuit join -d $H1 /Xfea8b5b798f2fc09 $H1 host1# circuit join -d $H1 /Xfea8b5b798f2fc09 $H2 </pre> <p>Let us break down these command lines. The part <code>-d $H1</code> tells the client how to connect into the local server (the circuit address <code>$H1</code> contains the host and port of the running server). Then it instructs the server with virtual name <code>/Xfea8b5b798f2fc09</code> — which happens to be the local server we are connected into — to join its network into that of <code>$H1</code> and <code>$H2</code> respectively. <p>As a result, all three servers will become part of the same network, and you can verify this by listing the members of the network again. <pre> host1# circuit ls -d $H1 / </pre> <p>This time you should see three entries, along the lines of: <pre> /X2987b5b023f2f988 /Xca2b345798112c09 /Xfea8b5b798f2fc09 </pre> <p>If you were to log into any of the other hosts and use the circuit client from there, say from the second host: <pre> host2# circuit ls -d $H2 / </pre> <p>You should see the exact same list of three members. <p>At this stage the circuit cluster is connected and ready to be used. You should not have to restart and rejoin circuit servers unless a host dies and/or you are adding a new one to the cluster. `
gocircuit.org/tutorial/mysql-nodejs/boot.go
0.707506
0.663584
boot.go
starcoder
package siafile import ( "path/filepath" "gitlab.com/NebulousLabs/errors" "gitlab.com/NebulousLabs/writeaheadlog" "go.sia.tech/siad/modules" ) // CombinedChunkIndex is a helper method which translates a chunk's index to the // corresponding combined chunk index dependng on the number of combined chunks. func CombinedChunkIndex(numChunks, chunkIndex uint64, numCombinedChunks int) int { if numCombinedChunks == 1 && chunkIndex == numChunks-1 { return 0 } if numCombinedChunks == 2 && chunkIndex == numChunks-2 { return 0 } if numCombinedChunks == 2 && chunkIndex == numChunks-1 { return 1 } return -1 } // Merge merges two PartialsSiafiles into one, returning a map which translates // chunk indices in newFile to indices in sf. func (sf *SiaFile) Merge(newFile *SiaFile) (map[uint64]uint64, error) { sf.mu.Lock() defer sf.mu.Unlock() return sf.merge(newFile) } // addCombinedChunk adds a new combined chunk to a combined Siafile. This can't // be called on a regular SiaFile. func (sf *SiaFile) addCombinedChunk() ([]writeaheadlog.Update, error) { if sf.deleted { return nil, errors.New("can't add combined chunk to deleted file") } if filepath.Ext(sf.siaFilePath) != modules.PartialsSiaFileExtension { return nil, errors.New("can only call addCombinedChunk on combined SiaFiles") } // Create updates to add a chunk and return index of that new chunk. updates, err := sf.growNumChunks(uint64(sf.numChunks) + 1) return updates, err } // merge merges two PartialsSiafiles into one, returning a map which translates // chunk indices in newFile to indices in sf. func (sf *SiaFile) merge(newFile *SiaFile) (map[uint64]uint64, error) { if sf.deleted { return nil, errors.New("can't merge into deleted file") } if filepath.Ext(sf.siaFilePath) != modules.PartialsSiaFileExtension { return nil, errors.New("can only call merge on PartialsSiaFile") } if filepath.Ext(newFile.SiaFilePath()) != modules.PartialsSiaFileExtension { return nil, errors.New("can only merge PartialsSiafiles into a PartialsSiaFile") } newFile.mu.Lock() defer newFile.mu.Unlock() if newFile.deleted { return nil, errors.New("can't merge deleted file") } var newChunks []chunk indexMap := make(map[uint64]uint64) ncb := sf.numChunks err := newFile.iterateChunksReadonly(func(chunk chunk) error { newIndex := sf.numChunks indexMap[uint64(chunk.Index)] = uint64(newIndex) chunk.Index = newIndex newChunks = append(newChunks, chunk) return nil }) if err != nil { sf.numChunks = ncb return nil, err } return indexMap, sf.saveFile(newChunks) }
modules/renter/filesystem/siafile/partialssiafile.go
0.562898
0.441793
partialssiafile.go
starcoder
package kl import "github.com/rannoch/cldr" var calendar = cldr.Calendar{ Formats: cldr.CalendarFormats{ Date: cldr.CalendarDateFormat{Full: "EEEE dd MMMM y", Long: "dd MMMM y", Medium: "MMM dd, y", Short: "y-MM-dd"}, Time: cldr.CalendarDateFormat{Full: "h:mm:ss a zzzz", Long: "h:mm:ss a z", Medium: "h:mm:ss a", Short: "h:mm a"}, DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"}, }, FormatNames: cldr.CalendarFormatNames{ Months: cldr.CalendarMonthFormatNames{ Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "jan", Feb: "feb", Mar: "mar", Apr: "apr", May: "maj", Jun: "jun", Jul: "jul", Aug: "aug", Sep: "sep", Oct: "okt", Nov: "nov", Dec: "dec"}, Narrow: cldr.CalendarMonthFormatNameValue{Jan: "J", Feb: "F", Mar: "M", Apr: "A", May: "M", Jun: "J", Jul: "J", Aug: "A", Sep: "S", Oct: "O", Nov: "N", Dec: "D"}, Short: cldr.CalendarMonthFormatNameValue{}, Wide: cldr.CalendarMonthFormatNameValue{Jan: "januari", Feb: "februari", Mar: "martsi", Apr: "aprili", May: "maji", Jun: "juni", Jul: "juli", Aug: "augustusi", Sep: "septemberi", Oct: "oktoberi", Nov: "novemberi", Dec: "decemberi"}, }, Days: cldr.CalendarDayFormatNames{ Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "sab", Mon: "ata", Tue: "mar", Wed: "pin", Thu: "sis", Fri: "tal", Sat: "arf"}, Narrow: cldr.CalendarDayFormatNameValue{Sun: "S", Mon: "A", Tue: "M", Wed: "P", Thu: "S", Fri: "T", Sat: "A"}, Short: cldr.CalendarDayFormatNameValue{Sun: "sab", Mon: "ata", Tue: "mar", Wed: "pin", Thu: "sis", Fri: "tal", Sat: "arf"}, Wide: cldr.CalendarDayFormatNameValue{Sun: "sabaat", Mon: "ataasinngorneq", Tue: "marlunngorneq", Wed: "pingasunngorneq", Thu: "sisamanngorneq", Fri: "tallimanngorneq", Sat: "arfininngorneq"}, }, Periods: cldr.CalendarPeriodFormatNames{ Abbreviated: cldr.CalendarPeriodFormatNameValue{AM: "u.t.", PM: "u.k."}, Narrow: cldr.CalendarPeriodFormatNameValue{}, Short: cldr.CalendarPeriodFormatNameValue{}, Wide: cldr.CalendarPeriodFormatNameValue{AM: "ulloqeqqata-tungaa", PM: "ulloqeqqata-kingorna"}, }, }, }
resources/locales/kl/calendar.go
0.526099
0.462109
calendar.go
starcoder
package common import ( "fmt" "github.com/cadmean-ru/amphion/common/a" ) // Boundary represents the boundaries of an object, like collider in unity type Boundary interface { IsPointInside(point a.Vector3) bool IsPointInside2D(point a.Vector3) bool } // RectBoundary represents a boundary in 3D space type RectBoundary struct { X, Y, Z FloatRange } // IsPointInside checks if specific point is inside the boundary func (b *RectBoundary) IsPointInside(v a.Vector3) bool { return b.X.IsValueInside(v.X) && b.Y.IsValueInside(v.Y) && b.Z.IsValueInside(v.Z) } // IsPointInside2D checks if specific point is inside the boundary ignoring z position func (b *RectBoundary) IsPointInside2D(v a.Vector3) bool { return b.X.IsValueInside(v.X) && b.Y.IsValueInside(v.Y) } //IsRectInside checks if another rect is fully inside this rect. func (b *RectBoundary) IsRectInside(rect *RectBoundary) bool { return b.X.IsRangeInside(rect.X) && b.Y.IsRangeInside(rect.Y) && b.Z.IsRangeInside(rect.Z) } //Move shifts all coordinates of the rect by the given vector. func (b *RectBoundary) Move(by a.Vector3) { b.X.Move(by.X) b.Y.Move(by.Y) b.Z.Move(by.Z) } func (b *RectBoundary) GetMin() a.Vector3 { return a.NewVector3(b.X.Min, b.Y.Min, b.Z.Min) } func (b *RectBoundary) GetMax() a.Vector3 { return a.NewVector3(b.X.Max, b.Y.Max, b.Z.Max) } func (b *RectBoundary) GetSize() a.Vector3 { return a.NewVector3(b.X.GetLength(), b.Y.GetLength(), b.Z.GetLength()) } func (b *RectBoundary) ToString() string { return fmt.Sprintf("(%s %s %s)", b.X.ToString(), b.Y.ToString(), b.Z.ToString()) } func (b *RectBoundary) String() string { return b.ToString() } func NewRectBoundary(minX, maxX, minY, maxY, minZ, maxZ float32) *RectBoundary { return &RectBoundary{ X: NewFloatRange(minX, maxX), Y: NewFloatRange(minY, maxY), Z: NewFloatRange(minZ, maxZ), } } func NewRectBoundaryXY(minX, maxX, minY, maxY float32) *RectBoundary { return &RectBoundary{ X: NewFloatRange(minX, maxX), Y: NewFloatRange(minY, maxY), Z: NewFloatRange(0, 0), } } func NewRectBoundaryFromPositionAndSize(position a.Vector3, size a.Vector3) *RectBoundary { return NewRectBoundary(position.X, position.X + size.X, position.Y, position.Y + size.Y, position.Z, position.Z + size.Z) }
common/boundary.go
0.874185
0.722992
boundary.go
starcoder
package matchers import ( "fmt" "github.com/onsi/gomega" "github.com/onsi/gomega/format" "github.com/onsi/gomega/types" "reflect" ) type orderconsistofMatcher struct { Elements []interface{} } func OrderedConsistOf(elements ...interface{}) types.GomegaMatcher { return &orderconsistofMatcher{ Elements: elements, } } func isArrayOrSlice(a interface{}) bool { if a == nil { return false } switch reflect.TypeOf(a).Kind() { case reflect.Array, reflect.Slice: return true default: return false } } func (matcher *orderconsistofMatcher) Match(actual interface{}) (success bool, err error) { if !isArrayOrSlice(actual) { return false, fmt.Errorf("ConsistOf matcher expects an array/slice/map. Got:\n%s", format.Object(actual, 1)) } elements := matcher.Elements if len(matcher.Elements) == 1 && isArrayOrSlice(matcher.Elements[0]) { elements = []interface{}{} value := reflect.ValueOf(matcher.Elements[0]) for i := 0; i < value.Len(); i++ { elements = append(elements, value.Index(i).Interface()) } } values := []interface{}{} value := reflect.ValueOf(actual) for i := 0; i < value.Len(); i++ { values = append(values, value.Index(i).Interface()) } matchers := []types.GomegaMatcher{} for _, element := range elements { matcher, isMatcher := element.(types.GomegaMatcher) if !isMatcher { matcher = gomega.BeEquivalentTo(element) } matchers = append(matchers, matcher) } if reflect.ValueOf(actual).Len() != len(matchers) { return false, nil } for j, matcher := range matchers { matched, err := matcher.Match(values[j]) if err != nil { return false, nil } if !matched { return false, nil } } return true, nil } func (matcher *orderconsistofMatcher) FailureMessage(actual interface{}) (message string) { return format.Message(actual, "to consist exactly of", matcher.Elements) } func (matcher *orderconsistofMatcher) NegatedFailureMessage(actual interface{}) (message string) { return format.Message(actual, "not to consist exactly of", matcher.Elements) }
vmlifecycle/matchers/ordered_consist_of.go
0.705379
0.423875
ordered_consist_of.go
starcoder
package templates import ( "fmt" "reflect" "strconv" "time" ) func templateToInt(of interface{}) int { t := reflect.ValueOf(of).Kind() if t == reflect.Int || t == reflect.Int16 || t == reflect.Int32 || t == reflect.Int64 || t == reflect.Float32 || t == reflect.Float64 || t == reflect.Uint || t == reflect.Uint8 || t == reflect.Uint16 || t == reflect.Uint32 || t == reflect.Uint64 { return int(reflect.ValueOf(of).Int()) } else if t == reflect.String { result, _ := strconv.ParseInt(of.(string), 10, 64) return int(reflect.ValueOf(result).Int()) } else { return 0 } } func ToInt64(from interface{}) int64 { switch t := from.(type) { case int: return int64(t) case int8: return int64(t) case int16: return int64(t) case int32: return int64(t) case int64: return int64(t) case float32: return int64(t) case float64: return int64(t) case uint: return int64(t) case uint32: return int64(t) case uint64: return int64(t) case string: parsed, _ := strconv.ParseInt(t, 10, 64) return parsed case time.Duration: return int64(t) default: return 0 } } func ToFloat64(from interface{}) float64 { switch t := from.(type) { case int: return float64(t) case int32: return float64(t) case int64: return float64(t) case float32: return float64(t) case float64: return float64(t) case uint: return float64(t) case uint32: return float64(t) case uint64: return float64(t) case string: parsed, _ := strconv.ParseFloat(t, 64) return parsed case time.Duration: return float64(t) default: return 0 } } func ToString(from interface{}) string { switch t := from.(type) { case int: return strconv.Itoa(t) case int8: return strconv.FormatInt(int64(t), 10) case int16: return strconv.FormatInt(int64(t), 10) case int32: return strconv.FormatInt(int64(t), 10) case int64: return strconv.FormatInt(t, 10) case float32: return strconv.FormatFloat(float64(t), 'e', -1, 32) case float64: return strconv.FormatFloat(t, 'e', -1, 64) case uint: return strconv.FormatUint(uint64(t), 10) case uint8: return strconv.FormatUint(uint64(t), 10) case uint32: return strconv.FormatUint(uint64(t), 10) case uint64: return strconv.FormatUint(uint64(t), 10) case []rune: return string(t) case []byte: return string(t) case fmt.Stringer: return t.String() case string: return t default: return "" } }
common/templates/general.go
0.523177
0.614886
general.go
starcoder
package bitmap import ( "errors" "fmt" "math" ) // Bitmap is a alias for map[int]bool used for better code reading. type Bitmap = map[int]bool const ( _bitmapLength = 8 _bitsInByte = 8 _firstByteIndex = 0 _firstBitOffset = 7 _lastBitOffset = 0 ) var ( // ErrBitmapISOWrongLength exported error for asserting. ErrBitmapISOWrongLength = errors.New("wrong bitmap length input") // ErrBitmapISOBadBitmapPosition exported error for asserting. ErrBitmapISOBadBitmapPosition = errors.New("bad bitmap position input") // ErrBitmapISOImpossibleBitmap exported error for asserting. ErrBitmapISOImpossibleBitmap = errors.New("impossible generate bitmap, lowest and highest limits too far") // ErrBitmapISOFirstBitProhibited exported error for asserting. ErrBitmapISOFirstBitProhibited = errors.New("first bit can be setted manually in input") ) // ISO8583FromBytes indicates which elements of a ISO8583 message are present. // It receives a 8 byte long ISO8583 bitmap with a position (to indicate if its the first or second). // Returns a map[int]bool to allow searching by element. func ISO8583FromBytes(b []byte, bitmapPosition int) (presentElements Bitmap, nextBitmapPresent bool, returnErr error) { const nextBitmapIndicator = 1 // Validate input if len(b) != _bitmapLength { return nil, false, fmt.Errorf("%w: should be %v, but its %v", ErrBitmapISOWrongLength, _bitmapLength, len(b)) } if bitmapPosition < 1 { return nil, false, fmt.Errorf("%w: should not be lower than 1, but its %v", ErrBitmapISOBadBitmapPosition, bitmapPosition) } rawBitmap := FromBytes(b) isoBitmap := make(Bitmap) for k, v := range rawBitmap { // Next bitmap indicator is returned separately in the second return value if k != nextBitmapIndicator { isoBitmap[_bitsInByte*_bitmapLength*(bitmapPosition-1)+k] = v } } return isoBitmap, rawBitmap[nextBitmapIndicator], nil } // ISO8583ToBytes creates a bitmap in byte format. // Map key 1 must not be present. func ISO8583ToBytes(b Bitmap, nextBitmapPresent bool) ([]byte, error) { // Find the highest and lowest element in map lowestElement, highestElement := Extremities(b) inferiorLimit := 1 for checkedLimit := 1; checkedLimit < lowestElement; checkedLimit += 64 { inferiorLimit = checkedLimit } superiorLimit := inferiorLimit + 63 if superiorLimit < highestElement { return nil, fmt.Errorf("%w: lowest limit %v (element %v), highest limit %v (element %v)", ErrBitmapISOImpossibleBitmap, inferiorLimit, lowestElement, superiorLimit, highestElement) } if _, exist := b[inferiorLimit]; exist { return nil, fmt.Errorf("%w: position %v", ErrBitmapISOFirstBitProhibited, inferiorLimit) } bmap := b bmap[inferiorLimit] = nextBitmapPresent if _, exist := bmap[superiorLimit]; !exist { bmap[superiorLimit] = false } byt := ToBytes(bmap) return byt[len(byt)-_bitmapLength:], nil } // FromBytes given bytes it returns a map[int]bool indicating which biy is on or off. Most left is 1. func FromBytes(b []byte) Bitmap { availableElements := make(Bitmap) // Iterate over each bit of the input from most left to most right for bytePosition := _firstByteIndex; bytePosition < len(b); bytePosition++ { for bitOffset := _firstBitOffset; bitOffset >= _lastBitOffset; bitOffset-- { // Calculate element position and save in map previousBytesSummary := _bitsInByte * bytePosition bitPosition := _bitsInByte - bitOffset availableElements[bitPosition+previousBytesSummary] = hasBitSet(b[bytePosition], uint(bitOffset)) } } return availableElements } // ToBytes creates a bitmap in []byte format. Most left is 1. func ToBytes(b Bitmap) []byte { getPosition := func(byt, bit int) int { return bit + byt*_bitsInByte } _, highestElement := Extremities(b) bmap := make([]byte, int(math.Ceil(float64(highestElement)/_bitsInByte))) for bytePosition := _firstByteIndex; bytePosition < len(bmap); bytePosition++ { for bitOffset := _firstBitOffset; bitOffset >= _lastBitOffset; bitOffset-- { if isOn, exist := b[getPosition(bytePosition, _bitsInByte-bitOffset)]; exist && isOn { bmap[bytePosition] = setBit(bmap[bytePosition], uint(bitOffset)) } } } return bmap } // Returns if the indicated bit is on. func hasBitSet(n byte, pos uint) bool { val := n & (1 << pos) return val > 0 } // Sets the bit at pos in the integer n. func setBit(n byte, pos uint) byte { n |= 1 << pos return n } // Extremities returns the lowest and highest elements of a bitmap. func Extremities(b Bitmap) (low, high int) { firstIteration := true for k := range b { if k > high { high = k } if k < low || firstIteration { low = k } firstIteration = false } return low, high }
pkg/bitmap/bitmap.go
0.791902
0.478651
bitmap.go
starcoder
package interval import ( "math" "sync" ) type color int const ( red color = 0 black color = 1 sentinelPayload string = "sentinel" ) type ErrNotFound string func (e ErrNotFound) Error() string { return string(e) } // Tree represents an Interval tree with a root node and Mutex to // protect concurrent access. type Tree struct { lock sync.RWMutex root *node sentinel *node } // Result is a search result when looking up an interval in the tree. type Result struct { Interval Interval Payload interface{} } // NewIntervalTree returns an initialized but empty interval tree. func NewIntervalTree() *Tree { sentinel := &node{color: black, payload: sentinelPayload} return &Tree{ lock: sync.RWMutex{}, root: sentinel, sentinel: sentinel, } } // Root returns a Result of the payload of the root node of the tree or an // ErrNotFound if the tree is empty. func (t *Tree) Root() (Result, error) { t.lock.RLock() defer t.lock.RUnlock() if t.root == t.sentinel { return Result{}, ErrNotFound("tree is empty") } return Result{ Interval: t.root.key, Payload: t.root.payload, }, nil } // Height returns the height (max depth) of the tree. Returns -1 if the tree // has no nodes. A (rooted) tree with only a single node has a height of zero. func (t *Tree) Height() int { t.lock.RLock() defer t.lock.RUnlock() return int(t.height(t.root)) } func (t *Tree) height(node *node) float64 { if node == t.sentinel { return -1 } return 1 + math.Max(t.height(node.left), t.height(node.right)) } // Min returns a Result of the lowest interval in the tree or an ErrNotFound if // the tree is empty. func (t *Tree) Min() (Result, error) { t.lock.RLock() defer t.lock.RUnlock() n := t.min(t.root) if n == t.sentinel { return Result{}, ErrNotFound("tree is empty") } return Result{ Interval: n.key, Payload: n.payload, }, nil } func (t *Tree) rotateLeft(x *node) { // y's left subtree will be x's right subtree. y := x.right x.right = y.left if y.left != t.sentinel { y.left.parent = x } // Restore parent relationships. y.parent = x.parent switch { case x.parent == t.sentinel: t.root = y case x.parent.left == x: x.parent.left = y default: x.parent.right = y } // x will be y's new left-child. y.left = x x.parent = y t.updateMax(x) } func (t *Tree) rotateRight(x *node) { y := x.left x.left = y.right if y.right != t.sentinel { y.right.parent = x } y.parent = x.parent switch { case x.parent == t.sentinel: t.root = y case x.parent.left == x: x.parent.left = y default: x.parent.right = y } y.right = x x.parent = y t.updateMax(y) } func (t *Tree) newLeaf(key Interval, p interface{}) *node { return &node{ key: key, payload: p, left: t.sentinel, right: t.sentinel, max: key.high, } } func (t *Tree) isLeaf(z *node) bool { return z.left == t.sentinel && z.right == t.sentinel } func (t *Tree) min(z *node) *node { for z != t.sentinel && z.left != t.sentinel { z = z.left } return z } func (t *Tree) updateMax(z *node) { z.max = z.key.high if z.right != t.sentinel && z.right.max.After(z.max) { z.max = z.right.max } if z.left != t.sentinel && z.left.max.After(z.max) { z.max = z.left.max } }
interval/tree.go
0.872673
0.516474
tree.go
starcoder
package editdist import ( "unicode/utf8" ) type eventType uint8 const ( none eventType = iota same subst ins del ) func (e eventType) String() string { switch e { case subst: return "subst" case ins: return "ins" case del: return "del" default: return "" } } // ComputeDistanceMax computes the levenshtein distance between the two // strings passed as an argument. It stops execution if edit distance grows // a certain max value. It returns edit distance and a boolean. The boolean is // true when calculation was aborted by the `max` value. func ComputeDistanceMax(a, b string, max int) (int, bool) { if len(a) == 0 { dist := utf8.RuneCountInString(b) if max > 0 && dist > max { return max, true } return dist, false } if len(b) == 0 { dist := utf8.RuneCountInString(a) if max > 0 && dist > max { return max, true } return dist, false } if a == b { return 0, false } // We need to convert to []rune if the strings are non-ASCII. // This could be avoided by using utf8.RuneCountInString // and then doing some juggling with rune indices, // but leads to far more bounds checks. It is a reasonable trade-off. s1 := []rune(a) s2 := []rune(b) // swap to save some memory O(min(a,b)) instead of O(a) if len(s1) > len(s2) { s1, s2 = s2, s1 } lenS1 := len(s1) lenS2 := len(s2) // init the row x := make([]uint8, lenS1+1) // we start from 1 because index 0 is already 0. for i := 1; i < len(x); i++ { x[i] = uint8(i) } // make a dummy bounds check to prevent the 2 bounds check down below. // The one inside the loop is particularly costly. _ = x[lenS1] // fill in the rest var rowDist uint8 for i := 1; i <= lenS2; i++ { prev := uint8(i) rowDist = 255 for j := 1; j <= lenS1; j++ { current := x[j-1] // match if s2[i-1] != s1[j-1] { current = min( min(x[j-1]+1, // substitution prev+1), // insertion x[j]+1) // deletion } if current < rowDist || rowDist == 255 { rowDist = current } x[j-1] = prev prev = current } if max > 0 && rowDist > uint8(max) { return max, true } x[lenS1] = prev } return int(x[lenS1]), false } // ComputeDistance computes the levenshtein distance between the two // strings passed as arguments. The third argument is a flag that // would trigger creation of tagged strings that show how exactly // the two strings differ. If the diff artument is true, the tagged // strings will be provided in the output. func ComputeDistance(a, b string, diff bool) (int, string, string) { if a == b { return 0, a, b } if len(a) == 0 { return utf8.RuneCountInString(b), "<del>" + b + "</del>", "<ins>" + b + "</ins>" } if len(b) == 0 { return utf8.RuneCountInString(a), "<ins>" + a + "</ins>", "<del>" + a + "</del>" } // We need to convert to []rune if the strings are non-ASCII. // This could be avoided by using utf8.RuneCountInString // and then doing some juggling with rune indices, // but leads to far more bounds checks. It is a reasonable trade-off. s1 := []rune(a) s2 := []rune(b) lenS1 := len(s1) lenS2 := len(s2) rl := lenS1 + 1 cl := lenS2 + 1 var m []uint8 if diff { m = make([]uint8, 0, cl*rl) } // init the row x := make([]uint8, lenS1+1) // we start from 1 because index 0 is already 0. for i := 1; i < len(x); i++ { x[i] = uint8(i) } if diff { m = append(m, x...) } // make a dummy bounds check to prevent the 2 bounds check down below. // The one inside the loop is particularly costly. _ = x[lenS1] // fill in the rest for i := 1; i <= lenS2; i++ { prev := uint8(i) for j := 1; j <= lenS1; j++ { current := x[j-1] // match if s2[i-1] != s1[j-1] { current = min( x[j-1]+1, // substitution (go left) min(prev+1, // insertion (go diag) x[j]+1), // deletion (go up) ) } x[j-1] = prev prev = current } x[lenS1] = prev if diff { m = append(m, x...) } } var d1, d2 string if diff { d1, d2 = traceBack(s1, s2, m) } return int(x[lenS1]), d1, d2 } func min(a, b uint8) uint8 { if b < a { return b } return a } func traceBack(s1, s2 []rune, m []uint8) (string, string) { var e eventType var dist, prevDist int var iDel, jDel, iIns, jIns, iSubst, jSubst int lenS1 := len(s1) lenS2 := len(s2) rl := lenS1 + 1 events := make([]eventType, 0, lenS1+lenS2) i := lenS2 j := lenS1 prevDist = int(m[rl*i+j]) for !(i == 0 && j == 0) { e = same iDel, jDel = i-1, j iIns, jIns = i, j-1 iSubst, jSubst = i-1, j-1 i, j = iSubst, jSubst distSubst, distIns, distDel := -1, -1, -1 if iSubst >= 0 && jSubst >= 0 { distSubst = int(m[rl*iSubst+jSubst]) } if jIns >= 0 { distIns = int(m[rl*iIns+jIns]) } if iDel >= 0 { distDel = int(m[rl*iDel+jDel]) } dist = prevDist if distSubst >= 0 && distSubst < dist { e = subst dist = distSubst } if distIns >= 0 && distIns < dist { e = ins i, j = iIns, jIns dist = distIns } if distDel >= 0 && distDel < dist { e = del i, j = iDel, jDel dist = distDel } prevDist = dist events = append(events, e) } return diffs(s1, s2, events) } func diffs(s1, s2 []rune, events []eventType) (string, string) { var prev, event eventType var deletes, inserts int lenS1 := len(s1) lenS2 := len(s2) d1 := make([]rune, 0, (lenS1+lenS2)*2) d2 := make([]rune, 0, (lenS1+lenS2)*2) i := 0 for j := len(events) - 1; j >= 0; j-- { event = events[j] // init prev event if prev == none { if event != same { d1 = append(d1, []rune("<"+event.String()+">")...) d2 = append(d2, []rune("<"+invert(event).String()+">")...) } } else if event != prev { if prev != same { d1 = append(d1, []rune("</"+prev.String()+">")...) d2 = append(d2, []rune("</"+invert(prev).String()+">")...) } if event != same { d1 = append(d1, []rune("<"+event.String()+">")...) d2 = append(d2, []rune("<"+invert(event).String()+">")...) } } switch event { case del: c1 := s2[i-inserts] c2 := s2[i-inserts] d1 = append(d1, c1) d2 = append(d2, c2) deletes++ case ins: c1 := s1[i-deletes] c2 := s1[i-deletes] d1 = append(d1, c1) d2 = append(d2, c2) inserts++ default: c1 := s1[i-deletes] c2 := s2[i-inserts] d1 = append(d1, c1) d2 = append(d2, c2) } prev = event i++ } if event != same { d1 = append(d1, []rune("</"+event.String()+">")...) d2 = append(d2, []rune("</"+invert(event).String()+">")...) } return string(d1), string(d2) } func invert(e eventType) eventType { if e == ins { return del } if e == del { return ins } return e }
ent/editdist/editdist.go
0.609524
0.411702
editdist.go
starcoder
package intertitle import ( "time" cabiriaImage "github.com/liampulles/cabiria/pkg/image" cabiriaTime "github.com/liampulles/cabiria/pkg/time" "github.com/liampulles/cabiria/pkg/time/period" ) // Range defines a set of frames which encapsulate an intertitle. // Range can be used as a Period. type Range struct { StartFrame int EndFrame int FPS float64 Style Style } // Valid will return true if a range is valid, otherwise false. func (ir Range) Valid() bool { return ir.FPS > 0.0 && ir.StartFrame >= 0 && ir.EndFrame >= 0 && ir.StartFrame <= ir.EndFrame } // Start returns a time representation of the start frame of a Range, // using the FPS. func (ir Range) Start() time.Time { return cabiriaTime.FromFrameAndFPS(ir.StartFrame, ir.FPS) } // End returns a time representation of the end frame of a Range, // using the FPS. func (ir Range) End() time.Time { return cabiriaTime.FromFrameAndFPS(ir.EndFrame, ir.FPS) } // TransformToNew computes a new Range given the desired start and end times, // calculating frame numbers using the FPS. func (ir Range) TransformToNew(start, end time.Time) period.Period { return Range{ StartFrame: fromTimeAndFPS(start, ir.FPS), EndFrame: fromTimeAndFPS(end, ir.FPS), FPS: ir.FPS, } } // MapRanges takes an array of intertitle frames and an fps, and reduces it // to an array of Ranges. func MapRanges(intertitles []bool, fps float64, framePaths []string) ([]Range, error) { transitions := make([]Range, 0) last := false start := -1 for i, current := range intertitles { // Start of intertitle if !last && current { start = i } // End of intertitle if last && !current { style, err := getStyle(start, i-1, framePaths) if err != nil { return nil, err } transitions = appendIntertitle(transitions, start, i-1, fps, style) start = -1 } last = current } // Close off end, if applicable style, err := getStyle(start, len(intertitles)-1, framePaths) if err != nil { return nil, err } transitions = appendIntertitle(transitions, start, len(intertitles)-1, fps, style) return transitions, nil } func appendIntertitle(transitions []Range, start, end int, fps float64, style Style) []Range { if start < 0 { return transitions } new := Range{ StartFrame: start, EndFrame: end, FPS: fps, Style: style, } return append(transitions, new) } func fromTimeAndFPS(t time.Time, fps float64) int { hours := time.Duration(t.Hour()) * time.Hour minutes := time.Duration(t.Minute()) * time.Minute seconds := time.Duration(t.Second()) * time.Second nano := time.Duration(t.Nanosecond()) * time.Nanosecond totalSeconds := float64(hours+minutes+seconds+nano) / float64(time.Second) return int(totalSeconds * fps) } func getStyle(start, end int, framePaths []string) (Style, error) { if start < 0 { return Style{}, nil } midPoint := (start + end) / 2 img, err := cabiriaImage.GetPNG(framePaths[midPoint]) if err != nil { return Style{}, err } foreground, background, err := cabiriaImage.GetForegroundAndBackground(img) if err != nil { return Style{}, err } return Style{ ForegroundColor: foreground, BackgroundColor: background, }, nil }
pkg/intertitle/range.go
0.842896
0.407274
range.go
starcoder
package DG2D import ( "fmt" "math" "github.com/notargets/gocfd/DG1D" "github.com/notargets/gocfd/utils" ) type Basis2D interface { PolynomialTerm(r, s float64, i, j int) (p float64) PolynomialTermDr(r, s float64, i, j int) (dr float64) PolynomialTermDs(r, s float64, i, j int) (ds float64) GetInterpMatrix(R, S utils.Vector) (Interp utils.Matrix) } type JacobiBasis2D struct { P int // Order Np int // Dimension V, Vinv, Vr, Vs utils.Matrix } func NewJacobiBasis2D(P int, R, S utils.Vector) (jb2d *JacobiBasis2D) { jb2d = &JacobiBasis2D{ P: P, Np: (P + 1) * (P + 2) / 2, } jb2d.V = jb2d.Vandermonde2D(P, R, S) jb2d.Vinv = jb2d.V.InverseWithCheck() jb2d.Vr, jb2d.Vs = jb2d.GradVandermonde2D(P, R, S) return } func (jb2d *JacobiBasis2D) Vandermonde2D(N int, R, S utils.Vector) (V2D utils.Matrix) { V2D = utils.NewMatrix(R.Len(), jb2d.Np) var sk int for i := 0; i <= N; i++ { for j := 0; j <= (N - i); j++ { V2D.SetCol(sk, jb2d.Simplex2DP(R, S, i, j)) sk++ } } return } func (jb2d *JacobiBasis2D) GradVandermonde2D(N int, R, S utils.Vector) (V2Dr, V2Ds utils.Matrix) { var ( Np = (N + 1) * (N + 2) / 2 Nr = R.Len() ) V2Dr, V2Ds = utils.NewMatrix(Nr, Np), utils.NewMatrix(Nr, Np) var sk int for i := 0; i <= N; i++ { for j := 0; j <= (N - i); j++ { ddr, dds := jb2d.GradSimplex2DP(R, S, i, j) V2Dr.M.SetCol(sk, ddr) V2Ds.M.SetCol(sk, dds) sk++ } } return } func (jb2d *JacobiBasis2D) Simplex2DP(R, S utils.Vector, i, j int) (P []float64) { var ( A, B = RStoAB(R, S) Np = A.Len() bd = B.DataP ) h1 := DG1D.JacobiP(A, 0, 0, i) h2 := DG1D.JacobiP(B, float64(2*i+1), 0, j) P = make([]float64, Np) sq2 := math.Sqrt(2) for ii := range h1 { tv1 := sq2 * h1[ii] * h2[ii] tv2 := utils.POW(1-bd[ii], i) P[ii] = tv1 * tv2 } return } func (jb2d *JacobiBasis2D) GradSimplex2DP(R, S utils.Vector, id, jd int) (ddr, dds []float64) { var ( A, B = RStoAB(R, S) ad, bd = A.DataP, B.DataP ) fa := DG1D.JacobiP(A, 0, 0, id) dfa := DG1D.GradJacobiP(A, 0, 0, id) gb := DG1D.JacobiP(B, 2*float64(id)+1, 0, jd) dgb := DG1D.GradJacobiP(B, 2*float64(id)+1, 0, jd) // r-derivative // d/dr = da/dr d/da + db/dr d/db = (2/(1-s)) d/da = (2/(1-B)) d/da ddr = make([]float64, len(gb)) for i := range ddr { ddr[i] = dfa[i] * gb[i] if id > 0 { ddr[i] *= utils.POW(0.5*(1-bd[i]), id-1) } // Normalize ddr[i] *= math.Pow(2, float64(id)+0.5) } // s-derivative // d/ds = ((1+A)/2)/((1-B)/2) d/da + d/db dds = make([]float64, len(gb)) for i := range dds { dds[i] = 0.5 * dfa[i] * gb[i] * (1 + ad[i]) if id > 0 { dds[i] *= utils.POW(0.5*(1-bd[i]), id-1) } tmp := dgb[i] * utils.POW(0.5*(1-bd[i]), id) if id > 0 { tmp -= 0.5 * float64(id) * gb[i] * utils.POW(0.5*(1-bd[i]), id-1) } dds[i] += fa[i] * tmp // Normalize dds[i] *= math.Pow(2, float64(id)+0.5) } return } func (jb2d *JacobiBasis2D) PolynomialTerm(r, s float64, i, j int) (P float64) { P = jb2d.Simplex2DP(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j)[0] return } func (jb2d *JacobiBasis2D) PolynomialTermDr(r, s float64, i, j int) (dr float64) { ddrV, _ := jb2d.GradSimplex2DP(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j) return ddrV[0] } func (jb2d *JacobiBasis2D) PolynomialTermDs(r, s float64, i, j int) (ds float64) { _, ddsV := jb2d.GradSimplex2DP(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j) return ddsV[0] } func (jb2d *JacobiBasis2D) GetInterpMatrix(R, S utils.Vector) (Interp utils.Matrix) { /* Uses Jacobi polynomials as the basis function Compose a matrix of interpolating polynomials where each row represents one [r,s] location to be interpolated This matrix can then be multiplied by a single vector of function values at the polynomial nodes to produce a vector of interpolated values, one for each interpolation location */ var ( N = jb2d.P Np = jb2d.Np ) // First compute polynomial terms, used by all polynomials polyTerms := make([]float64, R.Len()*Np) var sk int for ii, r := range R.DataP { s := S.DataP[ii] for i := 0; i <= N; i++ { for j := 0; j <= (N - i); j++ { polyTerms[sk] = jb2d.PolynomialTerm(r, s, i, j) sk++ } } } ptV := utils.NewMatrix(R.Len(), Np, polyTerms).Transpose() Interp = jb2d.Vinv.Transpose().Mul(ptV).Transpose() return } type LagrangeBasis2D struct { P, Np int // Order RNodes, SNodes []float64 // Nodes at which basis is defined JB2D *JacobiBasis2D } func NewLagrangeBasis2D(P int, R, S utils.Vector) (lb2d *LagrangeBasis2D) { /* From Karniadakis and Sherwin's "Spectral/hp Element Methods for CFD" on page 124: "Since there is not a closed form expression for the Lagrange polynomial through an arbitrary set of points in the triangular region, it is necessary to express the Lagrange polynomial in terms of another polynomial which has a closed form definition..." Here we use the 2D simplex orthogonal polynomial from Hesthaven to produce a generalized Vandermonde matrix, from which we can use the inverse of the Vandermonde to multiply the Hestaven basis at locations of our choosing to obtain the Lagrange basis coefficients at that location, indirectly through the original basis. */ var ( Np = (P + 1) * (P + 2) / 2 ) // Dimension: Np = (N+1)*(N+2)/2 if Np != R.Len() || Np != S.Len() { err := fmt.Errorf("wrong length of arrays defining basis nodes, should be %d, is [Rlen,Slen] = [%d,%d]", Np, R.Len(), S.Len()) panic(err) } lb2d = &LagrangeBasis2D{ P: P, Np: Np, RNodes: R.DataP, SNodes: S.DataP, JB2D: NewJacobiBasis2D(P, R, S), } return } func (lb2d *LagrangeBasis2D) GetInterpMatrix(R, S utils.Vector) (Interp utils.Matrix) { var ( Np = lb2d.Np N = lb2d.P ) Interp = utils.NewMatrix(R.Len(), Np) // Will transpose after fill var sk int for I := 0; I <= N; I++ { for J := 0; J <= N-I; J++ { Interp.SetCol(sk, lb2d.JB2D.Simplex2DP(R, S, I, J)) sk++ } } Interp = lb2d.JB2D.Vinv.Transpose().Mul(Interp.Transpose()).Transpose() return } func (lb2d *LagrangeBasis2D) GetGradInterpMatrices(R, S utils.Vector) (InterpDR, InterpDS utils.Matrix) { var ( Np = lb2d.Np N = lb2d.P ) InterpDR = utils.NewMatrix(R.Len(), Np) // Will transpose after fill InterpDS = utils.NewMatrix(R.Len(), Np) // Will transpose after fill var sk int for I := 0; I <= N; I++ { for J := 0; J <= N-I; J++ { dR, dS := lb2d.JB2D.GradSimplex2DP(R, S, I, J) InterpDR.SetCol(sk, dR) InterpDS.SetCol(sk, dS) sk++ } } InterpDR = lb2d.JB2D.Vinv.Transpose().Mul(InterpDR.Transpose()).Transpose() InterpDS = lb2d.JB2D.Vinv.Transpose().Mul(InterpDS.Transpose()).Transpose() return } func (lb2d *LagrangeBasis2D) BasisPolynomial(R, S utils.Vector, i, j int) (P []float64) { /* [i,j] are the coordinates of the basis polynomial term R and S are the location to get values from the polynomial */ Interp := lb2d.GetInterpMatrix(R, S) //fmt.Printf("P = %d, term number[%d,%d] = %d\n", lb2d.P, i, j, lb2d.getTermNumber(i, j)) P = Interp.Col(lb2d.getTermNumber(i, j)).DataP return } func (lb2d *LagrangeBasis2D) PolynomialTerm(r, s float64, i, j int) (p float64) { return lb2d.BasisPolynomial(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j)[0] } func (lb2d *LagrangeBasis2D) PolynomialTermDr(r, s float64, i, j int) (dr float64) { DR, _ := lb2d.GradBasisPolynomial(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j) return DR[0] } func (lb2d *LagrangeBasis2D) PolynomialTermDs(r, s float64, i, j int) (ds float64) { _, DS := lb2d.GradBasisPolynomial(utils.NewVector(1, []float64{r}), utils.NewVector(1, []float64{s}), i, j) return DS[0] } func (lb2d *LagrangeBasis2D) GradBasisPolynomial(R, S utils.Vector, i, j int) (DrTerms, DsTerms []float64) { /* [i,j] are the coordinates of the basis polynomial term R and S are the location to get values from the polynomial */ InterpDR, InterpDS := lb2d.GetGradInterpMatrices(R, S) InterpDR = InterpDR InterpDS = InterpDS DrTerms = InterpDR.Col(lb2d.getTermNumber(i, j)).DataP DsTerms = InterpDS.Col(lb2d.getTermNumber(i, j)).DataP return } func (lb2d *LagrangeBasis2D) getTermNumber(i, j int) (sk int) { var ( N = lb2d.P ) for I := 0; I <= N; I++ { for J := 0; J <= N-I; J++ { if I == i && J == j { return } sk++ } } sk = -1 return }
DG2D/BasisFunctions.go
0.57344
0.64919
BasisFunctions.go
starcoder
package Summary //InferredDecision is a simple struct that contains a legal summary the classification of it. type InferredDecision struct { Summary string `json:"summary,omitempty"` Class string `json:"classificcao,omitempty"` } //Classify attempts up to four times to classify a text of an legal summary. //Every time it tries a longer length on the function ReturnSummaryClass func Classify(summary string) (InferredDecision, error) { class, err := firstTry(summary) if err != nil { return InferredDecision{}, err } if class != "" { return InferredDecision{summary, class}, nil } class, err = secondTry(summary) if err != nil { return InferredDecision{}, err } if class != "" { return InferredDecision{summary, class}, nil } class, err = thirdTry(summary) if err != nil { return InferredDecision{}, err } if class != "" { return InferredDecision{summary, class}, nil } class, err = lastTry(summary) if err != nil { return InferredDecision{}, err } if class != "" { return InferredDecision{summary, class}, nil } return InferredDecision{summary, NotMapped}, nil } //The first try attempts to classify the summary utilizing only the las 16 char func firstTry(summary string) (string, error) { class, err := ReturnSummaryClass(summary, 16) if err != nil { return "", err } if class != NotMapped { return class, nil } return "", nil } //The second try attempts to classify the summary utilizing 1/3 of total length func secondTry(summary string) (string, error) { class, err := ReturnSummaryClass(summary, len(summary)/3) if err != nil { return "", err } if class != NotMapped { return class, nil } return "", nil } //The third try attempts to classify the summary utilizing 1/2 of total length func thirdTry(summary string) (string, error) { class, err := ReturnSummaryClass(summary, len(summary)/2) if err != nil { return "", err } if class != NotMapped { return class, nil } return "", nil } //The third try attempts to classify the summary utilizing the full length func lastTry(summary string) (string, error) { class, err := ReturnSummaryClass(summary, len(summary)) if err != nil { return "", err } if class != NotMapped { return class, nil } return "", nil }
Summary/summary.go
0.77768
0.443359
summary.go
starcoder
package timeframe import ( "strconv" "strings" "time" ) //Absolute parses a token like 2017-03-18 and returns the Range it represents func Absolute(s string, loc *time.Location) (Range, error) { if len(s) < 4 || len(s) > 23 { //2017, 2017-03-18T22:50:00.000 return err() //cannot be a valid structure } if loc == nil { loc = time.Local } i := strings.IndexByte(s, 0x2d /*-*/) switch i { case -1: switch len(s) { case 4: //2017 return parseYear(s, loc) case 6: //201703 if allowYYYYMM { return parseMonth(s[:4], s[4:], loc) } case 7: //2017W11, 2017077 if s[4] == 0x57 /*W*/ { //2017W11 return parseISOWeek(s[:4], s[5:], loc) } return parseOrdinalDate(s[:4], s[4:], loc) case 8: //20170318, 2017W116 if s[4] == 0x57 /*W*/ { //2017W116 return parseISOWeekDate(s[:4], s[5:7], s[7:], loc) } return parseDate("20060102", s, loc) case 11: return parseTime("20060102T15", s, time.Hour, loc) case 13: return parseTime("20060102T1504", s, time.Minute, loc) case 15: return parseTime("20060102T150405", s, time.Second, loc) case 19: return parseTime("20060102T150405.000", s, time.Millisecond, loc) } case 4: switch len(s) { case 7: //2017-03 return parseMonth(s[:4], s[5:], loc) case 8: //2017-W11, 2017-077 if s[5] == 0x57 /*W*/ { //2017W116 return parseISOWeek(s[:4], s[6:], loc) } return parseOrdinalDate(s[:4], s[5:], loc) case 10: //2017-03-18, 2017-W11-6 if s[5] == 0x57 /*W*/ && s[8] == 0x2d /*-*/ { //2017-W11-6 return parseISOWeekDate(s[:4], s[6:8], s[9:], loc) } return parseDate("2006-01-02", s, loc) case 13: return parseTime("2006-01-02T15", s, time.Hour, loc) case 16: return parseTime("2006-01-02T15:04", s, time.Minute, loc) case 19: return parseTime("2006-01-02T15:04:05", s, time.Second, loc) case 23: return parseTime("2006-01-02T15:04:05.000", s, time.Millisecond, loc) } } return err() } func parse(s string, min, max int) int { i, e := strconv.Atoi(s) if e != nil || i < min || i > max { return -1 } return i } func parseYear(sy string, loc *time.Location) (Range, error) { y := parse(sy, minYear, maxYear) if y == -1 { return err() } return year(y, 1, loc) } func parseMonth(sy, sm string, loc *time.Location) (Range, error) { y := parse(sy, minYear, maxYear) m := parse(sm, 1, 12) if y == -1 || m == -1 { return err() } return month(y, m, 1, loc) } func parseISOWeek(sy, sw string, loc *time.Location) (Range, error) { y := parse(sy, minYear, maxYear) w := parse(sw, 1, 53) if y == -1 || w == -1 { return err() } return week(y, w, 0, 7, loc) } func parseISOWeekDate(sy, sw, swd string, loc *time.Location) (Range, error) { y := parse(sy, minYear, maxYear) w := parse(sw, 1, 53) dw := parse(swd, 1, 7) if y == -1 || w == -1 || dw == -1 { return err() } return week(y, w, dw-1, 1, loc) } func parseOrdinalDate(sy, sdy string, loc *time.Location) (Range, error) { y := parse(sy, minYear, maxYear) dy := parse(sdy, 1, 366) if y == -1 || dy == -1 { return err() } return day(y, 1, dy, 1, loc) } func parseDate(layout, value string, loc *time.Location) (Range, error) { t, e := time.ParseInLocation(layout, value, loc) if e != nil { return err() } return Range{ LowerInc: t, UpperExc: t.AddDate(0, 0, 1), }, nil } func parseTime(layout, value string, d time.Duration, loc *time.Location) (Range, error) { t, e := time.ParseInLocation(layout, value, loc) if e != nil { return err() } return Range{ LowerInc: t, UpperExc: t.Add(d), }, nil }
absolute.go
0.598782
0.455622
absolute.go
starcoder
package storage import ( "encoding/json" "fmt" "testing" "github.com/ingrammicro/cio/api/types" "github.com/ingrammicro/cio/utils" "github.com/stretchr/testify/assert" ) // TODO exclude from release compile // ListVolumesMocked test mocked function func ListVolumesMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumesIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", APIPathStorageVolumes).Return(dIn, 200, nil) volumesOut, err := ds.ListVolumes("") assert.Nil(err, "Error getting volume list") assert.Equal(volumesIn, volumesOut, "ListVolumes returned different volumes") return volumesOut } func ListVolumesMockedFilteredByServer(t *testing.T, volumesIn []*types.Volume) []*types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumesIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathCloudServerVolumes, volumesIn[0].AttachedServerID)).Return(dIn, 200, nil) volumesOut, err := ds.ListVolumes(volumesIn[0].AttachedServerID) assert.Nil(err, "Error getting volume list filtered by server") assert.Equal(volumesIn, volumesOut, "ListVolumes returned different volumes") return volumesOut } // ListVolumesFailErrMocked test mocked function func ListVolumesFailErrMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumesIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", APIPathStorageVolumes).Return(dIn, 200, fmt.Errorf("mocked error")) volumesOut, err := ds.ListVolumes("") assert.NotNil(err, "We are expecting an error") assert.Nil(volumesOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return volumesOut } // ListVolumesFailStatusMocked test mocked function func ListVolumesFailStatusMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumesIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", APIPathStorageVolumes).Return(dIn, 499, nil) volumesOut, err := ds.ListVolumes("") assert.NotNil(err, "We are expecting an status code error") assert.Nil(volumesOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return volumesOut } // ListVolumesFailJSONMocked test mocked function func ListVolumesFailJSONMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Get", APIPathStorageVolumes).Return(dIn, 200, nil) volumesOut, err := ds.ListVolumes("") assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(volumesOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return volumesOut } // GetVolumeMocked test mocked function func GetVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 200, nil) volumeOut, err := ds.GetVolume(volumeIn.ID) assert.Nil(err, "Error getting volume") assert.Equal(*volumeIn, *volumeOut, "GetVolume returned different volumes") return volumeOut } // GetVolumeFailErrMocked test mocked function func GetVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error")) volumeOut, err := ds.GetVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an error") assert.Nil(volumeOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return volumeOut } // GetVolumeFailStatusMocked test mocked function func GetVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 499, nil) volumeOut, err := ds.GetVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an status code error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return volumeOut } // GetVolumeFailJSONMocked test mocked function func GetVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Get", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 200, nil) volumeOut, err := ds.GetVolume(volumeIn.ID) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return volumeOut } // CreateVolumeMocked test mocked function func CreateVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", APIPathStorageVolumes, mapIn).Return(dOut, 200, nil) volumeOut, err := ds.CreateVolume(mapIn) assert.Nil(err, "Error creating volume list") assert.Equal(volumeIn, volumeOut, "CreateVolume returned different volumes") return volumeOut } // CreateVolumeFailErrMocked test mocked function func CreateVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", APIPathStorageVolumes, mapIn).Return(dOut, 200, fmt.Errorf("mocked error")) volumeOut, err := ds.CreateVolume(mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(volumeOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return volumeOut } // CreateVolumeFailStatusMocked test mocked function func CreateVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", APIPathStorageVolumes, mapIn).Return(dOut, 499, nil) volumeOut, err := ds.CreateVolume(mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return volumeOut } // CreateVolumeFailJSONMocked test mocked function func CreateVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Post", APIPathStorageVolumes, mapIn).Return(dIn, 200, nil) volumeOut, err := ds.CreateVolume(mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return volumeOut } // UpdateVolumeMocked test mocked function func UpdateVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID), mapIn).Return(dOut, 200, nil) volumeOut, err := ds.UpdateVolume(volumeIn.ID, mapIn) assert.Nil(err, "Error updating volume list") assert.Equal(volumeIn, volumeOut, "UpdateVolume returned different volumes") return volumeOut } // UpdateVolumeFailErrMocked test mocked function func UpdateVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID), mapIn).Return(dOut, 200, fmt.Errorf("mocked error")) volumeOut, err := ds.UpdateVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(volumeOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return volumeOut } // UpdateVolumeFailStatusMocked test mocked function func UpdateVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID), mapIn).Return(dOut, 499, nil) volumeOut, err := ds.UpdateVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return volumeOut } // UpdateVolumeFailJSONMocked test mocked function func UpdateVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Put", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID), mapIn).Return(dIn, 200, nil) volumeOut, err := ds.UpdateVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(volumeOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return volumeOut } // AttachVolumeMocked test mocked function func AttachVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Server { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID}) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID), mapIn).Return(dOut, 200, nil) serverOut, err := ds.AttachVolume(volumeIn.ID, mapIn) assert.Nil(err, "Error attaching volume") assert.Equal(volumeIn.AttachedServerID, serverOut.ID, "AttachVolume returned invalid values") return serverOut } // AttachVolumeFailErrMocked test mocked function func AttachVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Server { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID}) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID), mapIn). Return(dOut, 200, fmt.Errorf("mocked error")) serverOut, err := ds.AttachVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(serverOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return serverOut } // AttachVolumeFailStatusMocked test mocked function func AttachVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Server { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // to json dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID}) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID), mapIn).Return(dOut, 499, nil) serverOut, err := ds.AttachVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(serverOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return serverOut } // AttachVolumeFailJSONMocked test mocked function func AttachVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Server { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*volumeIn) assert.Nil(err, "Volume test data corrupted") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Post", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID), mapIn).Return(dIn, 200, nil) serverOut, err := ds.AttachVolume(volumeIn.ID, mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(serverOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return serverOut } // DetachVolumeMocked test mocked function func DetachVolumeMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID)).Return(dIn, 200, nil) err = ds.DetachVolume(volumeIn.ID) assert.Nil(err, "Error detaching volume") } // DetachVolumeFailErrMocked test mocked function func DetachVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID)). Return(dIn, 200, fmt.Errorf("mocked error")) err = ds.DetachVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an error") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") } // DetachVolumeFailStatusMocked test mocked function func DetachVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeAttachedServer, volumeIn.ID)).Return(dIn, 499, nil) err = ds.DetachVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an status code error") assert.Contains(err.Error(), "499", "Error should contain http code 499") } // DeleteVolumeMocked test mocked function func DeleteVolumeMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 200, nil) err = ds.DeleteVolume(volumeIn.ID) assert.Nil(err, "Error deleting volume") } // DeleteVolumeFailErrMocked test mocked function func DeleteVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error")) err = ds.DeleteVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an error") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") } // DeleteVolumeFailStatusMocked test mocked function func DeleteVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolume, volumeIn.ID)).Return(dIn, 499, nil) err = ds.DeleteVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an status code error") assert.Contains(err.Error(), "499", "Error should contain http code 499") } // DiscardVolumeMocked test mocked function func DiscardVolumeMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeDiscard, volumeIn.ID)).Return(dIn, 200, nil) err = ds.DiscardVolume(volumeIn.ID) assert.Nil(err, "Error discarding volume") } // DiscardVolumeFailErrMocked test mocked function func DiscardVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeDiscard, volumeIn.ID)). Return(dIn, 200, fmt.Errorf("mocked error")) err = ds.DiscardVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an error") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") } // DiscardVolumeFailStatusMocked test mocked function func DiscardVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewVolumeService(cs) assert.Nil(err, "Couldn't load volume service") assert.NotNil(ds, "Volume service not instanced") // to json dIn, err := json.Marshal(volumeIn) assert.Nil(err, "Volume test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathStorageVolumeDiscard, volumeIn.ID)).Return(dIn, 499, nil) err = ds.DiscardVolume(volumeIn.ID) assert.NotNil(err, "We are expecting an status code error") assert.Contains(err.Error(), "499", "Error should contain http code 499") }
api/storage/volumes_api_mocked.go
0.5144
0.435181
volumes_api_mocked.go
starcoder
package mol import "github.com/fogleman/ln/ln" type Atom struct { X, Y, Z float64 Symbol string } type Bond struct { I, J int Type int } type Molecule struct { Atoms []Atom Bonds []Bond } type Sphere struct { Center Vector Radius float64 Symbol string } type Cylinder struct { A, B Vector Radius float64 Type int } func (m *Molecule) Solids() ([]Sphere, []Cylinder) { spheres := make([]Sphere, len(m.Atoms)) cylinders := make([]Cylinder, len(m.Bonds)) for i, atom := range m.Atoms { center := Vector{atom.X, atom.Y, atom.Z} radius := float64(AtomicRadii[atom.Symbol]) / 100 spheres[i] = Sphere{center, radius, atom.Symbol} } for i, bond := range m.Bonds { s0 := spheres[bond.I] s1 := spheres[bond.J] radius := float64(bond.Type) / 16 cylinders[i] = Cylinder{s0.Center, s1.Center, radius, bond.Type} } return spheres, cylinders } func (m *Molecule) Camera() Camera { points := make([]Vector, len(m.Atoms)) for i, atom := range m.Atoms { points[i] = Vector{atom.X, atom.Y, atom.Z} } return MakeCamera(points) } func (m *Molecule) Paths(width, height float64) ln.Paths { scene := ln.Scene{} camera := m.Camera() eye := camera.Eye.ln() center := camera.Center.ln() up := camera.Up.ln() fovy := camera.Fovy spheres, cylinders := m.Solids() for _, s := range spheres { scene.Add(ln.NewOutlineSphere(eye, up, s.Center.ln(), s.Radius*0.5)) } for _, c := range cylinders { scene.Add(ln.NewTransformedOutlineCylinder(eye, up, c.A.ln(), c.B.ln(), c.Radius)) } return scene.Render(eye, center, up, width, height, fovy, 0.1, 100, 0.01) } func (m *Molecule) Render(path string, width, height float64) { paths := m.Paths(width, height) paths.WriteToPNG(path, width, height) } var AtomicRadii = map[string]int{ "H": 53, "He": 31, "Li": 167, "Be": 112, "B": 87, "C": 67, "N": 56, "O": 48, "F": 42, "Ne": 38, "Na": 190, "Mg": 145, "Al": 118, "Si": 111, "P": 98, "S": 88, "Cl": 79, "Ar": 71, "K": 243, "Ca": 194, "Sc": 184, "Ti": 176, "V": 171, "Cr": 166, "Mn": 161, "Fe": 156, "Co": 152, "Ni": 149, "Cu": 145, "Zn": 142, "Ga": 136, "Ge": 125, "As": 114, "Se": 103, "Br": 94, "Kr": 88, "Rb": 265, "Sr": 219, "Y": 212, "Zr": 206, "Nb": 198, "Mo": 190, "Tc": 183, "Ru": 178, "Rh": 173, "Pd": 169, "Ag": 165, "Cd": 161, "In": 156, "Sn": 145, "Sb": 133, "Te": 123, "I": 115, "Xe": 108, "Cs": 298, "Ba": 253, "Pr": 247, "Nd": 206, "Pm": 205, "Sm": 238, "Eu": 231, "Gd": 233, "Tb": 225, "Dy": 228, "Er": 226, "Tm": 222, "Yb": 222, "Lu": 217, "Hf": 208, "Ta": 200, "W": 193, "Re": 188, "Os": 185, "Ir": 180, "Pt": 177, "Au": 174, "Hg": 171, "Tl": 156, "Pb": 154, "Bi": 143, "Po": 135, "Rn": 120, }
vendor/github.com/fogleman/mol/mol/model.go
0.596786
0.51623
model.go
starcoder
// A sequence of elements supporting sequential and parallel aggregate // operations. The following example illustrates an aggregate operation using // SEE java/util/function/Consumer.java package consumer import ( "github.com/searKing/golang/go/error/exception" "github.com/searKing/golang/go/util/class" "github.com/searKing/golang/go/util/object" ) /** * Represents an operation that accepts a single input argument and returns no * result. Unlike most other functional interfaces, {@code Consumer} is expected * to operate via side-effects. * * <p>This is a <a href="package-summary.html">functional interface</a> * whose functional method is {@link #accept(Object)}. * * @param <T> the type of the input to the operation * * @since 1.8 */ type Consumer interface { /** * Performs this operation on the given argument. * * @param t the input argument */ Accept(t interface{}) /** * Returns a composed {@code Consumer} that performs, in sequence, this * operation followed by the {@code after} operation. If performing either * operation throws an exception, it is relayed to the caller of the * composed operation. If performing this operation throws an exception, * the {@code after} operation will not be performed. * * @param after the operation to perform after this operation * @return a composed {@code Consumer} that performs in sequence this * operation followed by the {@code after} operation * @throws NullPointerException if {@code after} is null */ AndThen(after Consumer) Consumer } type ConsumerFunc func(t interface{}) // Accept calls f(t). func (f ConsumerFunc) Accept(t interface{}) { f(t) } func (f ConsumerFunc) AndThen(after Consumer) Consumer { object.RequireNonNil(after) return ConsumerFunc(func(t interface{}) { f.Accept(t) after.Accept(t) }) } type TODO struct { class.Class } func (consumer *TODO) Accept(t interface{}) { panic(exception.NewIllegalStateException1("called wrong Accept method")) } func (consumer *TODO) AndThen(after Consumer) Consumer { object.RequireNonNil(after) return ConsumerFunc(func(t interface{}) { consumer.GetDerived().(Consumer).Accept(t) after.Accept(t) }) }
go/util/function/consumer/consumer.go
0.840423
0.409929
consumer.go
starcoder
package main import ( "image" "image/color" ) const ( VRAMTilePattern = 0x8000 // 0x8000-0x97FF VRAMTilePatternEnd = 0x97FF // 0x8000-0x97FF VRAMBackgroundMap = 0x9800 // 0x9800-0x9BFF VRAMBackgroundMapEnd = 0x9BFF // 0x9800-0x9BFF ) const ( TileWidth = 8 // Each tile is 8 pixels across TileHeight = 8 // Each tile is 8 pixels tall MapWidth = 32 // The map is 32 tiles across MapHeight = 32 // The map is 32 tiles tall ) // paletteMap maps color index to RGBA color. TODO: use register values var paletteMap = map[uint8]color.RGBA{ 0x00: color.RGBA{0x00, 0x00, 0x00, 0xff}, 0x01: color.RGBA{0x33, 0x33, 0x33, 0xff}, 0x02: color.RGBA{0xcc, 0xcc, 0xcc, 0xff}, 0x03: color.RGBA{0xff, 0xff, 0xff, 0xff}, } // selectSemiNibble picks the semi-nibble from input given index func selectSemiNibble(input uint8, index uint8) uint8 { return uint8(input>>uint8(index*2)) & 0x03 } func compositePixel(lsb, hsb, index uint8) uint8 { return (lsb>>index)&0x1 | (hsb>>index)&0x1<<1 } // tileToPixel returns an 8x8 array of RGBA pixel values. It inputs a tile index. // This is then converted to the raw pointer to the tile data which is a set of // 8 16-bit unsigned integers. From here it iterates through each 2-bit pixel // value and translates that into a 32-bit 8x8 array func tileToPixel(tileIndex uint8, mem *GBMem) [TileHeight][TileWidth]color.RGBA { var pixels [TileHeight][TileWidth]color.RGBA for y := 0; y < TileHeight; y++ { var line [TileWidth]color.RGBA lsb := mem.vram[int(tileIndex)*16+2*y] hsb := mem.vram[int(tileIndex)*16+2*y+1] for x := 0; x < TileWidth; x++ { line[TileWidth-1-x] = paletteMap[compositePixel(lsb, hsb, uint8(x))] } pixels[y] = line } return pixels } // drawTilePixels draws an 8x8 tile onto an image func drawTilePixels(image *image.RGBA, pixel [8][8]color.RGBA, xOffset int, yOffset int) *image.RGBA { for x := 0; x < TileWidth; x++ { for y := 0; y < TileHeight; y++ { image.SetRGBA(xOffset*TileWidth+x, yOffset*TileHeight+y, pixel[y][x]) } } return image } // drawBackground draws the background tile map onto an image func drawBackground(image *image.RGBA, mem *GBMem) *image.RGBA { for x := 0; x < MapWidth; x++ { for y := 0; y < MapHeight; y++ { // get tile index tileIndex := mem.read(uint16(VRAMBackgroundMap + x + (y * MapHeight))) // get pixels corresponding to tile index pixels := tileToPixel(tileIndex, mem) // draw pixels drawTilePixels(image, pixels, x, y) } } return image }
goboy/video.go
0.570571
0.425904
video.go
starcoder
package handy import ( "strings" "unicode/utf8" ) const ( // TransformNone No transformations are ordered. Only constraints maximum length // TransformNone turns all other flags OFF. TransformNone = 1 // TransformFlagTrim Trim spaces before and after process the input // TransformFlagTrim Trims the string, removing leading and trailing spaces TransformFlagTrim = 2 // TransformFlagLowerCase Makes the string lowercase // If case transformation flags are combined, the last one remains, considering the following order: TransformFlagTitleCase, TransformFlagLowerCase and TransformFlagUpperCase. TransformFlagLowerCase = 4 // TransformFlagUpperCase Makes the string uppercase // If case transformation flags are combined, the last one remains, considering the following order: TransformFlagTitleCase, TransformFlagLowerCase and TransformFlagUpperCase. TransformFlagUpperCase = 8 // TransformFlagOnlyDigits Removes all non-numeric characters TransformFlagOnlyDigits = 16 // TransformFlagOnlyLetters Removes all non-letter characters TransformFlagOnlyLetters = 32 // TransformFlagOnlyLettersAndDigits Leaves only letters and numbers TransformFlagOnlyLettersAndDigits = 64 // TransformFlagHash After process all other flags, applies SHA256 hashing on string for output // The routine applies handy.StringHash() on given string TransformFlagHash = 128 // TransformFlagTitleCase Makes the string uppercase // If case transformation flags are combined, the last one remains, considering the following order: TransformFlagTitleCase, TransformFlagLowerCase and TransformFlagUpperCase. TransformFlagTitleCase = 256 // TransformFlagRemoveDigits Removes all digit characters, without to touch on any other // If combined with TransformFlagOnlyLettersAndDigits, TransformFlagOnlyDigits or TransformFlagOnlyLetters, it's ineffective TransformFlagRemoveDigits = 512 ) // Transform handles a string according given flags/parametrization, as follows: // The transformations are made in arbitrary order, what can result in unexpected output. It the input matters, use TransformSerially instead. // If maxLen==0, truncation is skipped // The last operations are, by order, truncation and trimming. func Transform(s string, maxLen int, transformFlags uint) string { if s == "" { return s } if transformFlags&TransformNone == TransformNone { if maxLen > 0 && utf8.RuneCountInString(s) > maxLen { s = string([]rune(s)[:maxLen]) } return s } if (transformFlags & TransformFlagOnlyLettersAndDigits) == TransformFlagOnlyLettersAndDigits { s = OnlyLettersAndNumbers(s) } if (transformFlags & TransformFlagOnlyDigits) == TransformFlagOnlyDigits { s = OnlyDigits(s) } if (transformFlags & TransformFlagOnlyLetters) == TransformFlagOnlyLetters { s = OnlyLetters(s) } if (transformFlags & TransformFlagRemoveDigits) == TransformFlagRemoveDigits { s = RemoveDigits(s) } // Have to trim before and after, to avoid issues with string truncation and new leading/trailing spaces if (transformFlags & TransformFlagTrim) == TransformFlagTrim { s = strings.TrimSpace(s) } if (transformFlags & TransformFlagTitleCase) == TransformFlagTitleCase { s = strings.Title(strings.ToLower(s)) } if (transformFlags & TransformFlagLowerCase) == TransformFlagLowerCase { s = strings.ToLower(s) } if (transformFlags & TransformFlagUpperCase) == TransformFlagUpperCase { s = strings.ToUpper(s) } if (transformFlags & TransformFlagHash) == TransformFlagHash { s = StringHash(s) } if s == "" { return s } if maxLen > 0 && utf8.RuneCountInString(s) > maxLen { s = string([]rune(s)[:maxLen]) } // Have to trim before and after, to avoid issues with string truncation and new leading/trailing spaces if (transformFlags & TransformFlagTrim) == TransformFlagTrim { s = strings.TrimSpace(s) } return s } // TransformSerially reformat given string according parameters, in the order these params were sent // Example: TransformSerially("uh lalah 123", 4, TransformFlagOnlyDigits,TransformFlagHash,TransformFlagUpperCase) // First remove non-digits, then hashes string and after make it all uppercase. // If maxLen==0, truncation is skipped // Truncation is the last operation func TransformSerially(s string, maxLen int, transformFlags ...uint) string { if s == "" { return s } for _, flag := range transformFlags { switch flag { case TransformFlagOnlyLettersAndDigits: s = OnlyLettersAndNumbers(s) case TransformFlagOnlyDigits: s = OnlyDigits(s) case TransformFlagOnlyLetters: s = OnlyLetters(s) case TransformFlagTrim: s = strings.TrimSpace(s) case TransformFlagTitleCase: s = strings.ToTitle(s) case TransformFlagLowerCase: s = strings.ToLower(s) case TransformFlagUpperCase: s = strings.ToUpper(s) case TransformFlagHash: s = StringHash(s) } } if maxLen > 0 && utf8.RuneCountInString(s) > maxLen { s = string([]rune(s)[:maxLen]) } return s }
transform.go
0.538255
0.491456
transform.go
starcoder
package operator import ( "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/vm/process" ) func ColXorCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { lvs, rvs := lv.Col.([]bool), rv.Col.([]bool) n := len(lvs) vec, err := proc.AllocVector(lv.Typ, int64(n)*1) if err != nil { return nil, err } col := make([]bool, len(lvs)) for i := 0; i < len(lvs); i++ { col[i] = (lvs[i] || rvs[i]) && !(lvs[i] && rvs[i]) } nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp) vector.SetCol(vec, col) return vec, nil } func ColXorConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { lvs, rvs := lv.Col.([]bool), rv.Col.([]bool) n := len(lvs) vec, err := proc.AllocVector(lv.Typ, int64(n)*1) if err != nil { return nil, err } rb := rvs[0] col := make([]bool, len(lvs)) for i := 0; i < len(lvs); i++ { col[i] = (lvs[i] || rb) && !(lvs[i] && rb) } nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp) vector.SetCol(vec, col) return vec, nil } func ColXorNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { lvs := lv.Col.([]bool) n := len(lvs) vec, err := proc.AllocVector(lv.Typ, int64(n)*1) if err != nil { return nil, err } col := make([]bool, len(lvs)) for i := 0; i < len(lvs); i++ { nulls.Add(vec.Nsp, uint64(i)) } vector.SetCol(vec, col) return vec, nil } func ConstXorCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { return ColXorConst(rv, lv, proc) } func ConstXorConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { lvs, rvs := lv.Col.([]bool), rv.Col.([]bool) vec := proc.AllocScalarVector(lv.Typ) vector.SetCol(vec, []bool{(lvs[0] || rvs[0]) && !(lvs[0] && rvs[0])}) return vec, nil } func ConstXorNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { return proc.AllocScalarNullVector(lv.Typ), nil } func NullXorCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { return ColXorNull(rv, lv, proc) } func NullXorConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { return ConstXorNull(rv, lv, proc) } func NullXorNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) { return proc.AllocScalarNullVector(lv.Typ), nil } type XorFunc = func(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) var XorFuncMap = map[int]XorFunc{} var XorFuncVec = []XorFunc{ ColXorCol, ColXorConst, ColXorNull, ConstXorCol, ConstXorConst, ConstXorNull, NullXorCol, NullXorConst, NullXorNull, } func InitXorFuncMap() { for i := 0; i < len(XorFuncVec); i++ { XorFuncMap[i] = XorFuncVec[i] } } func Xor(vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) { lv := vectors[0] rv := vectors[1] lt, rt := GetTypeID(lv), GetTypeID(rv) vec, err := XorFuncMap[lt*3+rt](lv, rv, proc) if err != nil { return nil, err } return vec, nil }
pkg/sql/plan2/function/operator/xor.go
0.545044
0.494507
xor.go
starcoder
package csv import ( "bufio" "encoding/csv" "fmt" "os" "pinmap/pm" ) // CSVReader reads the EC pin references from a comma separated // values file. type CSVReader struct { } // Name returns the name of this reader. func (r *CSVReader) Name() string { return "csv" } // Read reads the CSV file (provided as the argument) and extracts // the pin reference data. The first line is expected to be column // titles that are used to identify the columns. func (r *CSVReader) Read(chipName, arg string) (*pm.Pins, error) { f, err := os.Open(arg) if err != nil { return nil, err } defer f.Close() rdr := csv.NewReader(bufio.NewReader(f)) data, err := rdr.ReadAll() if err != nil { return nil, err } if len(data) < 2 { return nil, fmt.Errorf("no data in file") } // Put the CSV headers into a map. cmap := make(map[string]int) for c, s := range data[0] { cmap[s] = c } // Find the matching columns that are needed. signal, ok := cmap["Signal Name"] if !ok { return nil, fmt.Errorf("missing 'Signal Name' column") } // Find chip column chip, ok := cmap[chipName] if !ok { return nil, fmt.Errorf("missing '%s' chip column", chipName) } ptype, ok := cmap["Type"] if !ok { return nil, fmt.Errorf("missing 'Type' column") } enum, ok := cmap["Enum"] if !ok { return nil, fmt.Errorf("missing 'Enum' column") } var pins pm.Pins // Read the rest of the rows. for i, row := range data[1:] { p := new(pm.Pin) switch row[ptype] { default: fmt.Printf("%s:%d: Unknown signal type (%s) - ignored", arg, i+1, row[ptype]) continue case "OTHER": // Skipped continue case "ADC": p.PinType = pm.ADC pins.Adc = append(pins.Adc, p) case "PWM": p.PinType = pm.PWM pins.Pwm = append(pins.Pwm, p) case "PWM_INVERT": p.PinType = pm.PWM_INVERT pins.Pwm = append(pins.Pwm, p) case "I2C_DATA": // Only the clock pin is used for the config continue case "I2C_CLOCK": p.PinType = pm.I2C pins.I2c = append(pins.I2c, p) case "INPUT": p.PinType = pm.Input pins.Gpio = append(pins.Gpio, p) case "INPUT_PU": p.PinType = pm.InputPU pins.Gpio = append(pins.Gpio, p) case "INPUT_PD": p.PinType = pm.InputPD pins.Gpio = append(pins.Gpio, p) case "OUTPUT": p.PinType = pm.Output pins.Gpio = append(pins.Gpio, p) case "OUTPUT_ODL": p.PinType = pm.OutputODL pins.Gpio = append(pins.Gpio, p) case "OUTPUT_ODR": p.PinType = pm.OutputOD pins.Gpio = append(pins.Gpio, p) } p.Signal = row[signal] p.Pin = row[chip] p.Enum = row[enum] } return &pins, nil }
util/pinmap/readers/csv/csv.go
0.579995
0.400486
csv.go
starcoder
package heap import ( "math" ) // Comparator is the interface to be implemented by heap value comparators. type Comparator interface { // Equal returns whether i is equal to j. Equal(i, j interface{}) bool // Less returns whether is is lower than j. Less(i, j interface{}) bool } // parentIndex returns the index of the parent of the child located at the // specified index. func parentIndex(childIndex int) int { return int(math.Floor(float64(childIndex-1) / 2)) } // leftChildIndex returns the index of the left child of the parent located at // the given index. func leftChildIndex(parentIndex int) int { return 2*parentIndex + 1 } // Heap represents a simple datastructure that can contain entries for which the // following rule is always true: a parent's value is at least at large as any // of its direct children. Values stored in the heap should be of the same type // and comparable with the given comparator. type Heap struct { comp Comparator Slice []interface{} } // NewHeap creates a new empty Heap. func NewHeap(c Comparator) *Heap { return &Heap{ comp: c, } } // Heapify creates a new Heap given a slice and a value comparator. func Heapify(slice []interface{}, c Comparator) *Heap { cp := make([]interface{}, len(slice)) copy(cp, slice) heap := &Heap{ comp: c, Slice: cp, } end := len(slice) - 1 for start := parentIndex(end); start >= 0; start-- { heap.siftDown(start, end) } return heap } // Pop removes the root from the Heap and returns it. func (h *Heap) Pop() interface{} { if len(h.Slice) < 1 { return nil } end := len(h.Slice) - 1 root := h.Slice[0] if len(h.Slice) == 1 { h.Slice = nil return root } h.swapItems(0, end) h.Slice = h.Slice[:end] h.siftDown(0, len(h.Slice)-1) return root } // Push inserts a new value into the Heap. func (h *Heap) Push(val interface{}) { h.Slice = append(h.Slice, val) h.siftUp(len(h.Slice) - 1) } // RepairDown is used to sift down starting at index i. func (h *Heap) RepairDown(i int) { h.siftDown(i, len(h.Slice)-1) } // RepairUp is used to sift up starting at index i. func (h *Heap) RepairUp(i int) { h.siftUp(i) } // Sort is used to run the Heapsort algorithm in-place. The Heap's slice will // contain sorted values in ascending order. func (h *Heap) Sort() { for end := len(h.Slice) - 1; end >= 0; end-- { h.swapItems(0, end) h.siftDown(0, end-1) } } // siftDown is used to sift down starting at index start until the index end. func (h *Heap) siftDown(start, end int) { root := start for leftChildIndex(root) <= end { child := leftChildIndex(root) swap := root if h.comp.Less(h.Slice[swap], h.Slice[child]) { swap = child } if child+1 <= end && h.comp.Less(h.Slice[swap], h.Slice[child+1]) { swap = child + 1 } if swap == root { return } h.swapItems(root, swap) root = swap } } // siftUp is used to sift up starting at index start until index 0. func (h *Heap) siftUp(start int) { root := start for parentIndex(root) >= 0 { parent := parentIndex(root) swap := root if h.comp.Less(h.Slice[parent], h.Slice[swap]) { swap = parent } if swap == root { return } h.swapItems(root, swap) root = swap } } // swapItems execute an in-place swap of the two items located at index i and j. func (h *Heap) swapItems(i, j int) { h.Slice[i], h.Slice[j] = h.Slice[j], h.Slice[i] }
heap.go
0.851135
0.515254
heap.go
starcoder
package functions import ( "regexp" ) var FuncRegexpMatch = Function{ Description: `Reports whether the string <str> contains any match of the regular expression <pattern>. See https://golang.org/pkg/regexp/ for more details.`, Parameters: Parameters{{ Name: "pattern", }, { Name: "str", }}, }.MustWithFunc(func(pattern string, str string) (bool, error) { return regexp.MatchString(pattern, str) }) var FuncRegexpFindAll = Function{ Description: `Returns an array of all matches of the regular expression <pattern> in the input string. See https://golang.org/pkg/regexp/ for more details.`, Parameters: Parameters{{ Name: "pattern", }, { Name: "n", Description: "Limits the maximum number of matches that should be returned. If smaller then <0> it will be unlimited.", }, { Name: "str", }}, }.MustWithFunc(func(pattern string, n int, str string) ([]string, error) { if r, err := regexp.Compile(pattern); err != nil { return []string{}, err } else { return r.FindAllString(str, n), nil } }) var FuncRegexpFind = Function{ Description: `Returns the first match of the regular expression <pattern> in the input string. See https://golang.org/pkg/regexp/ for more details.`, Parameters: Parameters{{ Name: "pattern", }, { Name: "str", }}, }.MustWithFunc(func(pattern string, str string) (string, error) { if r, err := regexp.Compile(pattern); err != nil { return "", err } else { return r.FindString(str), nil } }) var FuncRegexpReplaceAll = Function{ Description: `Returns copy of src, replacing matches of the Regexp with the replacement string repl. Inside repl, $ signs are interpreted as in Expand, so for instance $1 represents the text of the first submatch. See https://golang.org/pkg/regexp/ for more details.`, Parameters: Parameters{{ Name: "pattern", }, { Name: "replacement", }, { Name: "str", }}, }.MustWithFunc(func(pattern string, replacement string, str string) (string, error) { if r, err := regexp.Compile(pattern); err != nil { return "", err } else { return r.ReplaceAllString(str, replacement), nil } }) var FuncRegexpSplit = Function{ Description: `Splits into substrings separated by the expression and returns a slice of the substrings between those expression matches. See https://golang.org/pkg/regexp/ for more details.`, Parameters: Parameters{{ Name: "pattern", }, { Name: "n", Description: "Limits the maximum number of parts that should be returned. If smaller then <0> it will be unlimited.", }, { Name: "str", }}, }.MustWithFunc(func(pattern string, n int, str string) ([]string, error) { if r, err := regexp.Compile(pattern); err != nil { return []string{}, err } else { return r.Split(str, n), nil } }) var FuncsRegexp = Functions{ "regexpMatch": FuncRegexpMatch, "regexpFindAll": FuncRegexpFindAll, "regexpFind": FuncRegexpFind, "regexpReplaceAll": FuncRegexpReplaceAll, "regexpSplit": FuncRegexpSplit, } var CategoryRegexp = Category{ Functions: FuncsRegexp, }
template/functions/regexp.go
0.76366
0.523664
regexp.go
starcoder
package dataorg import ( "github.com/erician/gpdDB/common/gpdconst" "github.com/erician/gpdDB/utils/byteutil" ) //dnode means data node, and it has the same header with node //dnode is leaf node //DNodeGetPairLen get the space that a pair occupies func DNodeGetPairLen(key string, value string) int { return len(key) + len(value) + int(NodeKeyLenSize) + int(NodeValueLenSize) } //DNodeFindInsertPos find the pos where the key shoud insert func DNodeFindInsertPos(node []byte, key string) (pos int, doesAlreadyExist bool) { insertKey := []byte(key) nodeLen := NodeGetLen(node) pos = int(NodeConstValueHeaderLen) for pos < int(nodeLen) { desKey := NodeGetKeyOrValue(node, pos) result := byteutil.ByteCmp(insertKey, desKey) if result <= 0 { if result == 0 { return pos, true } return pos, false } pos = NodeNextKey(node, pos) } return pos, false } //DNodeInsertPair insert a pair. //need log func DNodeInsertPair(node []byte, key string, value string, pos int) { if int(NodeGetLen(node)) != pos { DNodeRightShift(node, pos, DNodeGetPairLen(key, value)) } pos = NodeSetKeyOrValue(node, pos, []byte(key), 0, len(key)) NodeSetKeyOrValue(node, pos, []byte(value), 0, len(value)) } //DNodeRightShift logical right shift func DNodeRightShift(node []byte, pos int, distance int) { for i := int(NodeGetLen(node)) - 1; i >= pos; i-- { node[distance+i] = node[i] } } //DNodeLeftShift logical right shift func DNodeLeftShift(node []byte, pos int, distance int) { for i := pos; i < int(NodeGetLen(node)); i++ { node[i-distance] = node[i] } } //DNodeFindSplitPos split the srcNode into secNode //To be simple, just split from the middile, //NOTE: the splitPos will be bigger the gpdconst.BlockSize/2 //AND NEVER be the first pair, which is important in putPairInIndex to void same index func DNodeFindSplitPos(srcNode []byte) (splitPos int) { splitPos = int(NodeGetHeaderLen(srcNode)) for splitPos < int(gpdconst.BlockSize/2) { splitPos = NodeNextKey(srcNode, splitPos) } return } //DNodeDeletePair delete a pair //need log func DNodeDeletePair(node []byte, key string, pos int) string { nextKeyPos := NodeNextKey(node, pos) DNodeLeftShift(node, nextKeyPos, nextKeyPos-pos) NodeSetLen(node, NodeGetLen(node)+int16(nextKeyPos-pos)) return string(NodeGetKeyOrValue(node, NodeNextField(node, pos))[:]) }
dataorg/dnode.go
0.508544
0.57332
dnode.go
starcoder
package main import ( "encoding/binary" . "github.com/mmcloughlin/avo/build" . "github.com/mmcloughlin/avo/operand" . "github.com/mmcloughlin/avo/reg" ) // preamble loads the input data and returns variables referencing those values func preamble(dataByteCount, dataByteMask Mem) (encoded Mem, encodedCap Register, data Mem, dataLen Register, dataTail GPVirtual, ci GPVirtual, di GPVirtual, n GPVirtual, byteCountPtr Mem, byteMaskptr Mem) { encoded = Mem{Base: Load(Param("encoded").Base(), GP64())} encodedCap = Load(Param("encoded").Cap(), GP64()) Comment("Revert to scalar processing if we are within 16 bytes of the end.") SUBQ(Imm(16), encodedCap) data = Mem{Base: Load(Param("data").Base(), GP64())} dataLen = Load(Param("data").Len(), GP64()) dataTail = GP64() Comment("Revert to scalar processing if we have less than 4 values to process.") MOVQ(dataLen, dataTail) SUBQ(Imm(4), dataTail) Comment("Initialize the control index.") ci = GP64() XORQ(ci, ci) Comment("Initialize the data index. (len(data) + 3) >> 2") di = GP64() MOVQ(dataLen, di) ADDQ(Imm(3), di) SHRQ(Imm(2), di) Comment("Initialize the output index.") n = GP64() XORQ(n, n) Comment("The byte count lookup table.") byteCountPtr = Mem{Base: GP64()} LEAQ(dataByteCount, byteCountPtr.Base) Comment("The byte mask lookup table.") byteMaskptr = Mem{Base: GP64()} LEAQ(dataByteMask, byteMaskptr.Base) return encoded, encodedCap, data, dataLen, dataTail, ci, di, n, byteCountPtr, byteMaskptr } // decodeSIMDUint32 reads control byte and 4 uint32 from data bytes and returns the count of bytes read aong with the dataBytes func decodeSIMDUint32(encoded Mem, ci, di GPVirtual, byteCountPtr, byteMaskptr Mem) (VecVirtual, GPVirtual) { Comment("Load control byte.") cb := GP64() MOVBQZX(encoded.Idx(ci, 1), cb) INCQ(ci) Comment("Load 16 data bytes into XMM.") dataBytes := XMM() MOVOU(encoded.Idx(di, 1), dataBytes) Comment("Lookup count to increment data index.") byteCount := GP64() MOVBQZX(byteCountPtr.Idx(cb, 1), byteCount) Comment("Lookup the PSHUFB mask.") SHLQ(Imm(4), cb) Comment("Use mask to shuffle the relevant bytes into place.") PSHUFB(byteMaskptr.Idx(cb, 1), dataBytes) return dataBytes, byteCount } // decodeScalarUint32 reads control byte and returns the decoded uint32 value func decodeScalarUint32(n, ci, di GPVirtual, encoded, data Mem) (val GPVirtual) { Comment("Determine if we need to load a new control byte.") TESTQ(U32(3), n) JNE(LabelRef("loadBytes")) Comment("Load control byte.") cb := GP64() MOVBQZX(encoded.Idx(ci, 1), cb) INCQ(ci) Label("loadBytes") Comment("Switch on the low two bits of the control byte.") switchVal := GP64() MOVQ(cb, switchVal) ANDQ(Imm(3), switchVal) JE(LabelRef("oneByte")) CMPQ(switchVal, Imm(1)) JE(LabelRef("twoByte")) CMPQ(switchVal, Imm(2)) JE(LabelRef("threeByte")) val = GP32() Label("fourByte") MOVL(encoded.Idx(di, 1), val) // val = binary.LittleEndian.Uint32(encoded[di:]) ADDQ(Imm(4), di) // di += 4 JMP(LabelRef("shiftControl")) Label("threeByte") hi := GP32() MOVWLZX(encoded.Idx(di, 1), val) // val = uint32(binary.LittleEndian.Uint16(encoded[di:])) MOVBLZX(encoded.Idx(di, 1).Offset(2), hi) // hi = uint32(encoded[di+2]) SHLL(Imm(16), hi) // hi <<= 16 ORL(hi, val) // val = (hi | val) ADDQ(Imm(3), di) // di +=3 JMP(LabelRef("shiftControl")) Label("twoByte") MOVWLZX(encoded.Idx(di, 1), val) // val = uint32(binary.LittleEndian.Uint16(encoded[di:])) ADDQ(Imm(2), di) // di += 2 JMP(LabelRef("shiftControl")) Label("oneByte") MOVBLZX(encoded.Idx(di, 1), val) // val = uint32(encoded[di]) INCQ(di) // di++ Label("shiftControl") Comment("Shift control byte to get next value.") SHRQ(Imm(2), cb) return val } func prefixSumSIMD(dataBytes, previousX VecVirtual) { shifted := XMM() Comment("Calculate prefix sum.") //copy dataBytes to shifted MOVOU(dataBytes, shifted) Comment("(0, 0, delta_0, delta_1)") PSLLDQ(Imm(8), shifted) Comment("(delta_0, delta_1, delta_2 + delta_0, delta_3 + delta_1)") PADDD(shifted, dataBytes) // copy dataBytes to shifted MOVOU(dataBytes, shifted) Comment("(0, delta_0, delta_1, delta_2 + delta_0)") PSLLDQ(Imm(4), shifted) Comment("(delta_0, delta_0 + delta_1, delta_0 + delta_1 + delta_2, delta_0 + delta_1 + delta_2 + delta_delta_3)") PADDD(shifted, dataBytes) Comment("Add the previous last decoded value to all lanes.") PADDD(previousX, dataBytes) Comment("Propagate last decoded value to all lanes of previous.") PSHUFD(Imm(0b_11_11_11_11), dataBytes, previousX) } func zigzagDecodeScalar(val GPVirtual) { Comment("Zigzag decode.") tmp := GP32() MOVL(val, tmp) SHRL(Imm(1), tmp) ANDL(Imm(1), val) NEGL(val) XORL(tmp, val) } func zigzagDecodeSIMD(dataBytes VecVirtual) { Comment("Zigzag decode.") tmpX := XMM() MOVOU(dataBytes, tmpX) Comment("(x >> 1)") PSRLL(Imm(1), tmpX) oneX := XMM() Comment("Set to all ones.") PCMPEQL(oneX, oneX) Comment("Shift to one in each lane.") PSRLL(Imm(31), oneX) Comment("(x & 1)") PAND(dataBytes, oneX) Comment("Set to all zeroes.") PXOR(dataBytes, dataBytes) Comment("-(x & 1)") PSUBL(oneX, dataBytes) Comment("(x >> 1) ^ - (x & 1)") PXOR(tmpX, dataBytes) } func main() { // Lookup table of the count of data bytes (4 to 16) referenced by a control byte. dataByteCount := GLOBL("dataByteCount", RODATA|NOPTR) for i := 0; i < 256; i++ { count := byte(i&3) + byte((i>>2)&3) + byte((i>>4)&3) + byte((i>>6)&3) + 4 DATA(i, U8(count)) } // Lookup table of the PSUFB mask referenced by a control byte to move data bytes // into the correct location. dataByteMask := GLOBL("dataByteMask", RODATA|NOPTR) for i := 0; i < 256; i++ { curIndex, controlByte := byte(0), byte(i) mask := [16]byte{} for j := 0; j < 4; j++ { byteCount := controlByte & 3 for k := 0; k < 4; k++ { if k <= int(byteCount) { mask[4*j+k] = curIndex curIndex++ } else { mask[4*j+k] = 0xFF } } controlByte >>= 2 } lowerHalf := binary.LittleEndian.Uint64(mask[0:8]) upperHalf := binary.LittleEndian.Uint64(mask[8:16]) DATA(16*i, U64(lowerHalf)) DATA(16*i+8, U64(upperHalf)) } TEXT("decodeUint32SSE3", NOSPLIT, "func (data []uint32, encoded []byte)") Doc("decodeUint32SSE3 decodes 4 uint32 at a time using SSE3 instructions (PSHUFB)") { encoded, encodedCap, data, dataLen, dataTail, ci, di, n, byteCountPtr, byteMaskptr := preamble(dataByteCount, dataByteMask) Label("simd") Comment("Check if less than 16 encoded bytes remain and jump to scalar.") CMPQ(di, encodedCap) JGT(LabelRef("scalar")) Comment("Check if less than 4 values remain and jump to scalar.") CMPQ(n, dataTail) JGT(LabelRef("scalar")) dataBytes, bytecount := decodeSIMDUint32(encoded, ci, di, byteCountPtr, byteMaskptr) Comment("Store 4 uint32.") MOVOU(dataBytes, data.Idx(n, 4)) Comment("Increment the indices.") ADDQ(Imm(4), n) ADDQ(bytecount, di) JMP(LabelRef("simd")) Label("scalar") Comment("Process a single value at a time.") CMPQ(n, dataLen) JE(LabelRef("done")) val := decodeScalarUint32(n, ci, di, encoded, data) MOVL(val, data.Idx(n, 4)) // data[i] = val INCQ(n) JMP(LabelRef("scalar")) Label("done") RET() } TEXT("decodeDeltaUint32SSE3", NOSPLIT, "func (data []uint32, encoded []byte, previous uint32)") Doc("decodeDeltaUint32SSE3 decodes 4 uint32 at a time using SSE3 instructions (PSHUFB)") { encoded, encodedCap, data, dataLen, dataTail, ci, di, n, byteCountPtr, byteMaskptr := preamble(dataByteCount, dataByteMask) previous := Load(Param("previous"), GP32()) previousX := XMM() MOVD(previous, previousX) PSHUFD(Imm(0b_00_00_00_00), previousX, previousX) Label("simd") Comment("Check if less than 16 encoded bytes remain and jump to scalar.") CMPQ(di, encodedCap) JGT(LabelRef("scalar")) Comment("Check if less than 4 values remain and jump to scalar.") CMPQ(n, dataTail) JGT(LabelRef("scalar")) dataBytes, bytecount := decodeSIMDUint32(encoded, ci, di, byteCountPtr, byteMaskptr) prefixSumSIMD(dataBytes, previousX) MOVD(previousX, previous) Comment("Store 4 uint32.") MOVOU(dataBytes, data.Idx(n, 4)) Comment("Increment the indices.") ADDQ(Imm(4), n) ADDQ(bytecount, di) JMP(LabelRef("simd")) Label("scalar") Comment("Process a single value at a time.") CMPQ(n, dataLen) JE(LabelRef("done")) val := decodeScalarUint32(n, ci, di, encoded, data) Comment("Add the previous decoded value to the delta.") ADDL(val, previous) // previous += val MOVL(previous, data.Idx(n, 4)) // data[i] = val INCQ(n) JMP(LabelRef("scalar")) Label("done") RET() } TEXT("decodeInt32SSE3", NOSPLIT, "func (data []int32, encoded []byte)") Doc("decodeInt32SSE3 decodes 4 int32 at a time using SSE3 instructions (PSHUFB)") { encoded, encodedCap, data, dataLen, dataTail, ci, di, n, byteCountPtr, byteMaskptr := preamble(dataByteCount, dataByteMask) Label("simd") Comment("Check if less than 16 encoded bytes remain and jump to scalar.") CMPQ(di, encodedCap) JGT(LabelRef("scalar")) Comment("Check if less than 4 values remain and jump to scalar.") CMPQ(n, dataTail) JGT(LabelRef("scalar")) dataBytes, bytecount := decodeSIMDUint32(encoded, ci, di, byteCountPtr, byteMaskptr) zigzagDecodeSIMD(dataBytes) Comment("Store 4 uint32.") MOVOU(dataBytes, data.Idx(n, 4)) Comment("Increment the indices.") ADDQ(Imm(4), n) ADDQ(bytecount, di) JMP(LabelRef("simd")) Label("scalar") Comment("Process a single value at a time.") CMPQ(n, dataLen) JE(LabelRef("done")) val := decodeScalarUint32(n, ci, di, encoded, data) zigzagDecodeScalar(val) MOVL(val, data.Idx(n, 4)) // data[i] = val INCQ(n) JMP(LabelRef("scalar")) Label("done") RET() } TEXT("decodeDeltaInt32SSE3", NOSPLIT, "func (data []int32, encoded []byte, previous int32)") Doc("decodeDeltaInt32SSE3 decodes 4 int32 at a time using SSE3 instructions (PSHUFB)") { encoded, encodedCap, data, dataLen, dataTail, ci, di, n, byteCountPtr, byteMaskptr := preamble(dataByteCount, dataByteMask) previous := Load(Param("previous"), GP32()) previousX := XMM() MOVD(previous, previousX) PSHUFD(Imm(0b_00_00_00_00), previousX, previousX) Label("simd") Comment("Check if less than 16 encoded bytes remain and jump to scalar.") CMPQ(di, encodedCap) JGT(LabelRef("scalar")) Comment("Check if less than 4 values remain and jump to scalar.") CMPQ(n, dataTail) JGT(LabelRef("scalar")) dataBytes, bytecount := decodeSIMDUint32(encoded, ci, di, byteCountPtr, byteMaskptr) zigzagDecodeSIMD(dataBytes) prefixSumSIMD(dataBytes, previousX) MOVD(previousX, previous) Comment("Store 4 uint32.") MOVOU(dataBytes, data.Idx(n, 4)) Comment("Increment the indices.") ADDQ(Imm(4), n) ADDQ(bytecount, di) JMP(LabelRef("simd")) Label("scalar") Comment("Process a single value at a time.") CMPQ(n, dataLen) JE(LabelRef("done")) val := decodeScalarUint32(n, ci, di, encoded, data) zigzagDecodeScalar(val) Comment("Add the previous decoded value to the delta.") ADDL(val, previous) // previous += val MOVL(previous, data.Idx(n, 4)) // data[i] = previous INCQ(n) JMP(LabelRef("scalar")) Label("done") RET() } Generate() }
gen_decode_sse3.go
0.713132
0.464476
gen_decode_sse3.go
starcoder
package pbutil import ( "math" "math/big" "time" "google.golang.org/protobuf/types/known/timestamppb" ) // DuplicateTimestamp duplicates the Timestamp without any check. func DuplicateTimestamp(x *timestamppb.Timestamp) *timestamppb.Timestamp { if x == nil { return nil } return &timestamppb.Timestamp{ Seconds: x.Seconds, Nanos: x.Nanos, } } // IsTimestampZero checks whether the Timestamp is zero. // If the Timestamp is nil or zero as time.Time, it returns true. Otherwise, returns false. func IsTimestampZero(x *timestamppb.Timestamp) bool { if x == nil { return true } if x.IsValid() && x.AsTime().IsZero() { return true } return false } // TimestampAsNanos returns unix timestamp by nanoseconds as big.Int. func TimestampAsNanos(x *timestamppb.Timestamp) *big.Int { result := big.NewInt(x.GetSeconds()) result.Mul(result, big.NewInt(1e9)) result.Add(result, big.NewInt(int64(x.GetNanos()))) return result } func timestampAsAnyseconds(x *timestamppb.Timestamp, nanosDivider int64) int64 { b := TimestampAsNanos(x) b.Div(b, big.NewInt(nanosDivider)) result := b.Int64() if !b.IsInt64() { if b.Sign() >= 0 { result = math.MaxInt64 } else { result = math.MinInt64 } } return result } // TimestampAsNanoseconds returns unix timestamp by nanoseconds as int64. // If the result is out of range, it returns math.MaxInt64 or math.MinInt64. func TimestampAsNanoseconds(x *timestamppb.Timestamp) int64 { return timestampAsAnyseconds(x, 1) } // TimestampAsMicroseconds returns unix timestamp by microseconds as int64. // If the result is out of range, it returns math.MaxInt64 or math.MinInt64. func TimestampAsMicroseconds(x *timestamppb.Timestamp) int64 { return timestampAsAnyseconds(x, 1e3) } // TimestampAsMilliseconds returns unix timestamp by milliseconds as int64. // If the result is out of range, it returns math.MaxInt64 or math.MinInt64. func TimestampAsMilliseconds(x *timestamppb.Timestamp) int64 { return timestampAsAnyseconds(x, 1e6) } // TimestampAsSeconds returns unix timestamp by seconds as int64. // If the result is out of range, it returns math.MaxInt64 or math.MinInt64. func TimestampAsSeconds(x *timestamppb.Timestamp) int64 { return timestampAsAnyseconds(x, 1e9) } // NewTimestamp constructs a new Timestamp from the provided time t. // If t is zero, it returns nil. func NewTimestamp(t time.Time) *timestamppb.Timestamp { if t.IsZero() { return nil } return timestamppb.New(t) } func newTimestampByAnyseconds(d int64, nanosMultiplier int64) *timestamppb.Timestamp { b := big.NewInt(d) b = b.Mul(b, big.NewInt(nanosMultiplier)) b, m := b.DivMod(b, big.NewInt(1e9), new(big.Int)) secs, nanos := b.Int64(), m.Int64() if !b.IsInt64() { if b.Sign() >= 0 { secs = math.MaxInt64 } else { secs = math.MinInt64 } } return &timestamppb.Timestamp{ Seconds: secs, Nanos: int32(nanos), } } // NewTimestampByNanoseconds constructs a new Timestamp from the provided int64 unix timestamp by nanoseconds. func NewTimestampByNanoseconds(d int64) *timestamppb.Timestamp { return newTimestampByAnyseconds(d, 1) } // NewTimestampByMicroseconds constructs a new Timestamp from the provided int64 unix timestamp by microseconds. func NewTimestampByMicroseconds(d int64) *timestamppb.Timestamp { return newTimestampByAnyseconds(d, 1e3) } // NewTimestampByMilliseconds constructs a new Timestamp from the provided int64 unix timestamp by milliseconds. func NewTimestampByMilliseconds(d int64) *timestamppb.Timestamp { return newTimestampByAnyseconds(d, 1e6) } // NewTimestampBySeconds constructs a new Timestamp from the provided int64 unix timestamp by seconds. func NewTimestampBySeconds(d int64) *timestamppb.Timestamp { return newTimestampByAnyseconds(d, 1e9) }
timestamputils.go
0.850593
0.44559
timestamputils.go
starcoder
package iso20022 // Set of elements used to provide the total sum of entries per bank transaction code. type TotalsPerBankTransactionCode2 struct { // Number of individual entries for the bank transaction code. NumberOfEntries *Max15NumericText `xml:"NbOfNtries,omitempty"` // Total of all individual entries included in the report. Sum *DecimalNumber `xml:"Sum,omitempty"` // Total amount that is the result of the netted amounts for all debit and credit entries per bank transaction code. TotalNetEntryAmount *DecimalNumber `xml:"TtlNetNtryAmt,omitempty"` // Indicates whether the total net entry amount is a credit or a debit amount. CreditDebitIndicator *CreditDebitCode `xml:"CdtDbtInd,omitempty"` // Indicates whether the bank transaction code is related to booked or forecast items. ForecastIndicator *TrueFalseIndicator `xml:"FcstInd,omitempty"` // Set of elements used to fully identify the type of underlying transaction resulting in an entry. BankTransactionCode *BankTransactionCodeStructure4 `xml:"BkTxCd"` // Set of elements used to indicate when the booked amount of money will become available, that is can be accessed and starts generating interest. Availability []*CashBalanceAvailability2 `xml:"Avlbty,omitempty"` } func (t *TotalsPerBankTransactionCode2) SetNumberOfEntries(value string) { t.NumberOfEntries = (*Max15NumericText)(&value) } func (t *TotalsPerBankTransactionCode2) SetSum(value string) { t.Sum = (*DecimalNumber)(&value) } func (t *TotalsPerBankTransactionCode2) SetTotalNetEntryAmount(value string) { t.TotalNetEntryAmount = (*DecimalNumber)(&value) } func (t *TotalsPerBankTransactionCode2) SetCreditDebitIndicator(value string) { t.CreditDebitIndicator = (*CreditDebitCode)(&value) } func (t *TotalsPerBankTransactionCode2) SetForecastIndicator(value string) { t.ForecastIndicator = (*TrueFalseIndicator)(&value) } func (t *TotalsPerBankTransactionCode2) AddBankTransactionCode() *BankTransactionCodeStructure4 { t.BankTransactionCode = new(BankTransactionCodeStructure4) return t.BankTransactionCode } func (t *TotalsPerBankTransactionCode2) AddAvailability() *CashBalanceAvailability2 { newValue := new (CashBalanceAvailability2) t.Availability = append(t.Availability, newValue) return newValue }
TotalsPerBankTransactionCode2.go
0.781038
0.507751
TotalsPerBankTransactionCode2.go
starcoder
package horeb import ( "errors" "fmt" "math/rand" "sort" "strconv" ) // UnicodeBlock values represent a contiguous range of Unicode codepoints. type UnicodeBlock struct { Start, End rune } // Blocks is a map of short string labels to UnicodeBlock values. var Blocks = map[string]UnicodeBlock{ // Basic Multilingual Plane (0000-ffff) // https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane "hebrew": {0x0590, 0x05ff}, "currency": {0x20a0, 0x20cf}, "letterlike": {0x2100, 0x214f}, "arrows": {0x2190, 0x21ff}, "misc_technical": {0x2300, 0x23ff}, "geometric": {0x25a0, 0x25ff}, "misc_symbols": {0x2600, 0x26ff}, "dingbats": {0x2700, 0x27bf}, "cyrillic": {0x0400, 0x04ff}, "tibetian": {0x0f00, 0x0fff}, // Supplementary Multilingual Plane (10000-1ffff) // https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane "aegean_nums": {0x10100, 0x1013f}, "ancient_greek_nums": {0x10140, 0x1018f}, "phaistos_disc": {0x101d0, 0x101ff}, "math_alnum": {0x1d400, 0x1d7ff}, "emoji": {0x1f300, 0x1f5ff}, "mahjong": {0x1f000, 0x1f02f}, "dominos": {0x1f030, 0x1f09f}, "playing_cards": {0x1f0a0, 0x1f0ff}, "chess": {0x1fa00, 0x1fa53}, "mayan_numerals": {0x1d2e0, 0x1d2ff}, "gothic": {0x10330, 0x1034f}, } // RandomBlock returns a UnicodeBlock at random from a map[string]UnicodeBlock provided as argument. func RandomBlock(m map[string]UnicodeBlock) (UnicodeBlock, error) { nkeys := len(m) if nkeys < 1 { return UnicodeBlock{}, errors.New("Empty map provided") } var keys []string for k := range m { keys = append(keys, k) } randKey := keys[rand.Intn(nkeys)] return m[randKey], nil } // PrintBlocks prints known blocks. func PrintBlocks(all bool) { // Create a slice of alphabetically-sorted keys. var keys []string for k := range Blocks { keys = append(keys, k) } sort.Strings(keys) for _, k := range keys { b := Blocks[k] fmt.Printf("%5x %5x %s\n", b.Start, b.End, k) if all { b.Print() fmt.Println() } } } // RandomRune returns a rune at random from UnicodeBlock. func (b UnicodeBlock) RandomRune() rune { return rune(rand.Intn(int(b.End-b.Start)) + int(b.Start) + 1) } // Print prints all printable runes in UnicodeBlock. func (b UnicodeBlock) Print() { for i := b.Start; i <= b.End; i++ { // Only print printable runes. if !strconv.IsPrint(i) { continue } fmt.Printf("%c ", i) } fmt.Println() } // PrintRandom prints n random runes from UnicodeBlock. func (b UnicodeBlock) PrintRandom(n int, ofs string) { for i := 0; i < n; i++ { fmt.Printf("%c%s", b.RandomRune(), ofs) } fmt.Println() }
pkg/horeb/blocks.go
0.586878
0.412767
blocks.go
starcoder
package support import ( "fmt" "unsafe" ) // CheckSize checks if the length of a byte slice is equal to the expected length, // and panics when this is not the case. func CheckSize(buf []byte, expected int, descrip string) { if len(buf) != expected { panic(fmt.Sprintf("Incorrect %s buffer size, expected (%d), got (%d).", descrip, expected, len(buf))) } } // CheckSizeMin checks if the length of a byte slice is greater or equal than a minimum length, // and panics when this is not the case. func CheckSizeMin(buf []byte, min int, descrip string) { if len(buf) < min { panic(fmt.Sprintf("Incorrect %s buffer size, expected (>%d), got (%d).", descrip, min, len(buf))) } } // CheckIntInRange checks if the size of an integer is between a lower and upper boundaries. func CheckIntInRange(n int, min int, max int, descrip string) { if n < min || n > max { panic(fmt.Sprintf("Incorrect %s size, expected (%d - %d), got (%d).", descrip, min, max, n)) } } // CheckSizeInRange checks if the length of a byte slice is between a lower and upper boundaries. func CheckSizeInRange(buf []byte, min int, max int, descrip string) { if len(buf) < min || len(buf) > max { panic(fmt.Sprintf("Incorrect %s buffer size, expected (%d - %d), got (%d).", descrip, min, max, len(buf))) } } // CheckSizeGreaterOrEqual checks if the length of a byte slice is greater or equal to that of a second byte slice. func CheckSizeGreaterOrEqual(a, b []byte, aDescription, bDescription string) { if len(a) < len(b) { panic(fmt.Sprintf("%s smaller than %s", aDescription, bDescription)) } } // NilPanic is a shorthand that results in a panic when called with true. func NilPanic(t bool, description string) { if t { panic(description + " is a nil pointer") } } // BytePointer returns a pointer to the start of a byte slice, or nil when the slice is empty. func BytePointer(b []byte) *uint8 { if len(b) > 0 { return &b[0] } else { return nil } } // AlignedSlice returns a memory aligned slice func AlignedSlice(size, alignment int) []byte { slice := make([]byte, size+alignment) offset := alignment - int(uintptr(unsafe.Pointer(&slice[0])))%alignment return slice[offset : offset+size] }
vendor/github.com/GoKillers/libsodium-go/support/support.go
0.72331
0.609437
support.go
starcoder
package leetcode // https://leetcode-cn.com/problems/asteroid-collision/ /** 735. 行星碰撞 给定一个整数数组 asteroids,表示在同一行的行星。 对于数组中的每一个元素,其绝对值表示行星的大小,正负表示行星的移动方向(正表示向右移动,负表示向左移动)。每一颗行星以相同的速度移动。 找出碰撞后剩下的所有行星。碰撞规则:两个行星相互碰撞,较小的行星会爆炸。如果两颗行星大小相同,则两颗行星都会爆炸。两颗移动方向相同的行星,永远不会发生碰撞。 示例 1: 输入: asteroids = [5, 10, -5] 输出: [5, 10] 解释: 10 和 -5 碰撞后只剩下 10。 5 和 10 永远不会发生碰撞。 示例 2: 输入: asteroids = [8, -8] 输出: [] 解释: 8 和 -8 碰撞后,两者都发生爆炸。 示例 3: 输入: asteroids = [10, 2, -5] 输出: [10] 解释: 2 和 -5 发生碰撞后剩下 -5。10 和 -5 发生碰撞后剩下 10。 示例 4: 输入: asteroids = [-2, -1, 1, 2] 输出: [-2, -1, 1, 2] 解释: -2 和 -1 向左移动,而 1 和 2 向右移动。 由于移动方向相同的行星不会发生碰撞,所以最终没有行星发生碰撞。 说明: 数组 asteroids 的长度不超过 10000。 每一颗行星的大小都是非零整数,范围是 [-1000, 1000] 。 */ func asteroidCollision(asteroids []int) []int { var ( res []int stack []int index int = -1 ) for i := range asteroids { if index == -1 && asteroids[i] < 0 { res = append(res, asteroids[i]) } else if asteroids[i] > 0 { index++ stack = append(stack, asteroids[i]) } else { flag := false for index > -1 { delta := stack[index] + asteroids[i] if delta == 0 { index-- flag = true break } else if delta > 0 { break } else { index-- } } if !flag && index == -1 { res = append(res, asteroids[i]) } if index > -1 { stack = stack[:index+1] } else { stack = []int{} } } } for i := range stack { res = append(res, stack[i]) } if len(res) == 0 { res = []int{} } return res } // 2. 优化下空间复杂度,好像也没啥变化 func asteroidCollision2(asteroids []int) []int { var ( stack []int index int = -1 ) for i := range asteroids { if index != -1 && stack[index] > 0 && asteroids[i] < 0 { var delta int for index > -1 && stack[index] > 0 { delta = stack[index] + asteroids[i] if delta == 0 { index-- break } else if delta > 0 { break } else { index-- } } if index > -1 { stack = stack[:index+1] } else { stack = []int{} } if delta < 0 { stack = append(stack, asteroids[i]) index++ } } else { stack = append(stack, asteroids[i]) index++ } } return stack }
leetcode/golang/735_asteroid_collision.go
0.532911
0.538194
735_asteroid_collision.go
starcoder
package schemax /* Macros is a map structure associating a string value known as "macro" name with a dot-delimited ASN.1 object identifier. Use of this is limited to scenarios involving LDAP implementations that support OID macros for certain schema definition elements. */ type Macros map[string]OID func (r *Macros) IsZero() bool { return r == nil } func isAnAlias(key string) (alias, rest string, is, ok bool) { if 0 == len(key) || len(key) > 512 { return } if isNumericalOID(key) { ok = true rest = key return } var done bool var idx int = -1 for _, c := range key { if done { break } ch := rune(c) switch { case ch == ':' || ch == '.': idx = indexRune(key, ch) done = true case runeIsLetter(ch) || runeIsDigit(ch) || ch == '-': alias += string(ch) default: // unsupported char alias = `` return } } is = len(alias) > 0 ok = (len(rest) > 0 || is) // no delim detected but there was // an alias (could be literal alias // to oid w/o leaf node) ... if idx >= 0 { rest = key[idx+1:] } return } /* Set assigns the provided key and oid to the receiver instance. The arguments must both be strings. The first argument must be the alias name, while the second must be its ASN.1 dotted object identifier. Subsequent values are ignored. */ func (r *Macros) Set(als ...interface{}) { if r.IsZero() { *r = NewMacros() } if len(als) < 2 { return } var alias string var oid string var ok bool if alias, ok = als[0].(string); !ok { return } if oid, ok = als[1].(string); !ok { return } if !isNumericalOID(oid) { return } // avoid duplicate alias or oid entries ... for i, v := range *r { if v.String() == oid || equalFold(alias, i) { continue } } R := *r R[alias] = NewOID(oid) *r = R return } /* Resolve returns a registered OID and a boolean value indicative of a successful lookup. A search is conducted using the provided alias key name, with or without the colon:number suffix included. */ func (r Macros) Resolve(x interface{}) (oid OID, ok bool) { // If its already an OID, don't bother // doing a lookup, just return it as-is. if oid, ok = x.(OID); ok { return } // If it isn't a string-based name, // then we can't go any further. key, assert := x.(string) if !assert { return } var alias string var rest string var is bool alias, rest, is, ok = isAnAlias(key) if !is { return } switch { case is: oid, ok = r[alias] if !ok { return } if len(rest) > 0 { oid = NewOID(oid.String() + `.` + rest) } default: if len(rest) == 0 { return } oid = NewOID(rest) } ok = isNumericalOID(oid.String()) return } /* NewMacros returns a new instance of Macros, intended for use in resolving OID aliases. */ func NewMacros() Macros { return make(Macros, 0) } /* Extensions is a map structure associating a string extension name (e.g.: X-ORIGIN) with a non-zero string value. */ type Extensions map[string][]string /* Exists returns a boolean value indicative of whether the named label exists within the receiver instance. */ func (r Extensions) Exists(label string) (exists bool) { for k, _ := range r { if k == label { exists = true return } } return } func (r Extensions) IsZero() bool { if r == nil { return true } return len(r) == 0 } /* Set assigns the provided label and value(s) to the receiver instance. The first argument must be a string and is interpreted as a label (e.g.: X-ORIGIN). All subsequent values (strings or slices of strings) are interpreted as values to be assigned to said label. */ func (r Extensions) Set(x ...interface{}) { if len(x) < 2 { return } label, ok := x[0].(string) if !ok { return } if r.Exists(label) || !r.labelIsValid(label) { return } var values []string for i := 1; i < len(x); i++ { switch tv := x[i].(type) { case string: if len(tv) == 0 { return } values = append(values, tv) case []string: for _, z := range tv { if len(z) == 0 { return } values = append(values, z) } default: return } } if ok = len(values) > 0; ok { r[label] = values } return } /* Equals compares the receiver to the provided interface value (which must be of the same effective type). A comparison is performed, and an equality-indicative boolean value is returned. */ func (r Extensions) Equal(x interface{}) (equals bool) { if assert, ok := x.(Extensions); ok { if len(assert) != len(r) { return } for k, v := range r { v2, exists := assert[k] if !exists { return } if len(v2) != len(v) { return } for i, z := range v2 { if equals = equalFold(z, v[i]); !equals { return } } } equals = true } return } /* String is a stringer method that returns the receiver data as a compliant schema definition component. */ func (r Extensions) String() (exts string) { for _, v := range r.strings() { exts += v + ` ` } if len(exts) == 0 { return } if exts[len(exts)-1] == ' ' { exts = exts[:len(exts)-1] } return } func (r Extensions) strings() (exts []string) { exts = make([]string, len(r)) ct := 0 for k, v := range r { if len(v) == 1 { exts[ct] = k + ` '` + v[0] + `'` } else if len(v) > 1 { vals := make([]string, len(v)) for mi, mv := range v { val := `'` + mv + `'` vals[mi] = val } exts[ct] = k + ` ( ` + join(vals, ` `) + ` )` } ct++ } return } /* NewExtensions returns a new instance of Extensions, intended for assignment to any definition type. */ func NewExtensions() Extensions { return make(Extensions, 0) }
map.go
0.783906
0.403332
map.go
starcoder
package hierarchical import ( "fmt" "github.com/knightjdr/prohits-viz-analysis/pkg/heatmap/dimensions" "github.com/knightjdr/prohits-viz-analysis/pkg/interactive" "github.com/knightjdr/prohits-viz-analysis/pkg/minimap" "github.com/knightjdr/prohits-viz-analysis/pkg/svg" "github.com/knightjdr/prohits-viz-analysis/pkg/svg/heatmap" "github.com/knightjdr/prohits-viz-analysis/pkg/treeview" "github.com/knightjdr/prohits-viz-analysis/pkg/types" ) // WriteDistance images. func WriteDistance(data *SortedData, clusteredData HclustData, settings types.Settings) { if settings.WriteDistance { if len(data.ConditionDist) > 0 { createDistanceSVG(data.ConditionDist, data.Matrices.Conditions, settings, settings.Condition) createDistanceLegend(settings, settings.Condition) createDistanceMinimap(data.ConditionDist, settings, settings.Condition) createDistanceInteractive(data.ConditionDist, data.Matrices.Conditions, settings, settings.Condition) createConditionDistanceTreeview(data, clusteredData, settings) } if len(data.ReadoutDist) > 0 { createDistanceSVG(data.ReadoutDist, data.Matrices.Readouts, settings, settings.Readout) createDistanceLegend(settings, settings.Readout) createDistanceMinimap(data.ReadoutDist, settings, settings.Readout) createDistanceInteractive(data.ReadoutDist, data.Matrices.Readouts, settings, settings.Readout) createReadoutDistanceTreeview(data, clusteredData, settings) } } } func createDistanceSVG(matrix [][]float64, labels []string, settings types.Settings, filehandle string) { dims := dimensions.Calculate(matrix, labels, labels, false) filename := fmt.Sprintf("svg/%[1]s-%[1]s.svg", filehandle) heatmap := svg.InitializeHeatmap() heatmap.CellSize = dims.CellSize heatmap.Columns = labels heatmap.FillColor = "blue" heatmap.FillMax = 1 heatmap.FillMin = 0 heatmap.FontSize = dims.FontSize heatmap.Invert = true heatmap.LeftMargin = dims.LeftMargin heatmap.Matrix = matrix heatmap.PlotHeight = dims.PlotHeight heatmap.PlotWidth = dims.PlotWidth heatmap.Rows = labels heatmap.SvgHeight = dims.SvgHeight heatmap.SvgWidth = dims.SvgWidth heatmap.TopMargin = dims.TopMargin heatmap.XLabel = settings.Condition heatmap.YLabel = settings.Condition heatmap.Draw(filename) } func createDistanceLegend(settings types.Settings, title string) { legendData := heatmap.Legend{ Filename: fmt.Sprintf("svg/%s-distance-legend.svg", title), NumColors: 101, Settings: types.Settings{ FillColor: "blue", FillMax: 1, FillMin: 0, InvertColor: true, }, Title: fmt.Sprintf("Distance - %s", title), } heatmap.CreateLegend(legendData) } func createDistanceMinimap(matrix [][]float64, settings types.Settings, title string) { minimapData := &minimap.Data{ DownsampleThreshold: 1000, Filename: fmt.Sprintf("minimap/%s.png", title), ImageType: "heatmap", Matrices: &types.Matrices{ Abundance: matrix, }, Settings: types.Settings{ FillColor: "blue", FillMax: 1, FillMin: 0, InvertColor: true, }, } minimap.Create(minimapData) } func createDistanceInteractive(matrix [][]float64, labels []string, settings types.Settings, title string) { interactiveData := &interactive.HeatmapData{ AnalysisType: "heatmap", Filename: fmt.Sprintf("interactive/%[1]s-%[1]s.json", title), Matrices: &types.Matrices{ Abundance: matrix, Conditions: labels, Readouts: labels, }, Minimap: fmt.Sprintf("minimap/%s.png", title), Parameters: settings, Settings: map[string]interface{}{ "abundanceCap": 1, "abundanceType": "positive", "fillColor": "blue", "fillMax": 1, "fillMin": 0, "imageType": "heatmap", "invertColor": true, "minAbundance": 0, "primaryFilter": 0, }, } interactiveData.Parameters.XLabel = title interactiveData.Parameters.YLabel = title interactive.CreateHeatmap(interactiveData) } func createConditionDistanceTreeview(data *SortedData, clusteredData HclustData, settings types.Settings) { treeviewData := treeview.Data{ Filename: fmt.Sprintf("treeview/%[1]s-%[1]s", settings.Condition), Matrix: data.ConditionDist, Names: treeview.Names{ Columns: clusteredData.Tree["condition"].Order, Rows: clusteredData.Tree["condition"].Order, UnsortedColumns: clusteredData.UnsortedNames["condition"], UnsortedRows: clusteredData.UnsortedNames["condition"], }, Trees: treeview.Trees{ Column: clusteredData.Dendrogram["condition"], Row: clusteredData.Dendrogram["condition"], }, } treeview.Export(treeviewData) } func createReadoutDistanceTreeview(data *SortedData, clusteredData HclustData, settings types.Settings) { treeviewData := treeview.Data{ Filename: fmt.Sprintf("treeview/%[1]s-%[1]s", settings.Readout), Matrix: data.ReadoutDist, Names: treeview.Names{ Columns: clusteredData.Tree["readout"].Order, Rows: clusteredData.Tree["readout"].Order, UnsortedColumns: clusteredData.UnsortedNames["readout"], UnsortedRows: clusteredData.UnsortedNames["readout"], }, Trees: treeview.Trees{ Column: clusteredData.Dendrogram["readout"], Row: clusteredData.Dendrogram["readout"], }, } treeview.Export(treeviewData) }
pkg/tools/analyze/dotplot/hierarchical/writedistance.go
0.540924
0.464234
writedistance.go
starcoder
package sensors import ( "encoding/binary" "fmt" "github.com/tinogoehlert/gobuki/utils" ) // GyroData stores XYZ Data from Gyro type GyroData struct { X float64 Y float64 Z float64 } const digitToDPS float64 = 0.00875 // ToVelocity rotates 90 degree counterclockwise about z-axis. // Sensing axis of 3d gyro is not match with robot. So, below conversion will needed. func (gd *GyroData) ToVelocity(x, y, z uint16) { gd.X = -digitToDPS * float64(y) gd.Y = digitToDPS * float64(x) gd.Z = digitToDPS * float64(z) } // Gyro Raw ADC data of digital 3D gyro; L3G4200D // Due to difference of acquisition rate and update rate, 2-3 data will be arrived at once. // Digit to deg/s ratio is 0.00875, it comes from datasheet of 3d gyro. type Gyro struct { FrameID uint8 Data []GyroData newest *GyroData last *GyroData delta GyroData tolerance GyroData bucketSize int readCount int calibrateSamples int calibrated bool } // NewGyroADC generates a new Gyro Objects from bytes func NewGyroADC(bucketSize, calibrateSamples int) *Gyro { gyro := Gyro{ Data: make([]GyroData, bucketSize), bucketSize: bucketSize, calibrateSamples: calibrateSamples, } return &gyro } // Changed check if values changed significantly func (g *Gyro) Changed(tolerance float64) bool { if g.last == nil || g.newest == nil { return false } if !utils.Cmpf(g.newest.X, g.last.X, tolerance) || !utils.Cmpf(g.newest.Z, g.last.Z, tolerance) || !utils.Cmpf(g.newest.Z, g.last.Z, tolerance) { return true } return false } // GetNewData returns newest gyro data func (g *Gyro) GetNewData() *GyroData { return &GyroData{ X: g.newest.X, Y: g.newest.Y, Z: g.newest.Z, } } // FromBytes reads raw gyro data from bytes func (g *Gyro) FromBytes(p []byte) error { if len(p) < 8 { return fmt.Errorf("gyroscope currupt read") } g.FrameID = p[0] dataLen := int(p[1]) if dataLen <= 0 || dataLen > len(p) { return fmt.Errorf("gyroscope length missmatch: %d > %d", dataLen, len(p)) } ec := int(dataLen / 3) for i := 0; i < ec; i++ { if g.readCount >= g.bucketSize { g.readCount = 0 } v := p[2+i*6:] g.Data[g.readCount].ToVelocity( binary.LittleEndian.Uint16(v), binary.LittleEndian.Uint16(v[2:]), binary.LittleEndian.Uint16(v[4:]), ) g.last = g.newest if g.calibrated { g.Data[g.readCount].X = (g.Data[g.readCount].X - g.delta.X) * digitToDPS g.Data[g.readCount].Y = (g.Data[g.readCount].Y - g.delta.Y) * digitToDPS g.Data[g.readCount].Z = (g.Data[g.readCount].Y - g.delta.Z) * digitToDPS g.newest = &g.Data[g.readCount] } g.readCount++ } if g.readCount >= g.calibrateSamples && !g.calibrated { g.Calibrate(g.calibrateSamples) } return nil } // Calibrate algorithm func (g *Gyro) Calibrate(samples int) { if samples > g.readCount || g.calibrated { return } // Reset values var ( sumX float64 sumY float64 sumZ float64 sigmaX float64 sigmaY float64 sigmaZ float64 ) // Read n-samples for i := 0; i < samples; i++ { sumX += g.Data[i].X sumY += g.Data[i].Y sumZ += g.Data[i].Z sigmaX += g.Data[i].X * g.Data[i].X sigmaY += g.Data[i].Y * g.Data[i].Y sigmaZ += g.Data[i].Z * g.Data[i].Z } fsamples := float64(samples) // Calculate delta vectors g.delta.X = sumX / fsamples g.delta.Y = sumY / fsamples g.delta.Y = sumZ / fsamples g.calibrated = true } // GetID Gets the Name of the SubPacket func (g *Gyro) GetID() byte { return 0x0D } // GetName Gets the Name of the SubPacket func (g Gyro) GetName() string { return "Gyro" }
sensors/gyro.go
0.76454
0.544378
gyro.go
starcoder
package awsutil import ( "fmt" "github.com/aws/aws-sdk-go/aws/endpoints" "reflect" "sort" "strconv" "strings" "time" ) var ( scalarTypes = []string{ "bool", "int", "int8", "int16", "int32", "int64", "uint", "uint8", "uint16", "uint32", "uint64", "float32", "float64", "string", } complexTypes = []string{ "slice", "struct", "map", } ) func resolveParameter(parent string, parameter string, shape reflect.Value) interface{} { // AssumeRolePolicyDocument comes as a string. we MUST NOT modify it // This may apply to other objects that AWS expects to receive as a JSON-string // reference: https://docs.aws.amazon.com/sdk-for-go/api/service/iam/#CreateRoleInput if strings.Contains(parameter, "Statement") { return parameter } if strings.Contains(parameter, " ") { return resolveListMapParameter(parent, strings.Split(parameter, " "), shape) } if strings.Contains(parameter, ",") && !strings.Contains(parameter, "=") { return strings.Split(parameter, ",") } return parameter } func resolveListMapParameter(parent string, parameters []string, shape reflect.Value) []map[string]string { parameterListMap := make([]map[string]string, 0) for _, parameter := range parameters { parameterListMap = append(parameterListMap, resolveMapParameter(parent, parameter, shape)) } return parameterListMap } func underlayParameter(parameter string, shape reflect.Value) string { shapeKind := getShapeKind(shape) parameter = strings.ReplaceAll(parameter, "\t", "") parameter = strings.ReplaceAll(parameter, "\n", "") if strings.Contains(parameter, "{") || strings.Contains(parameter, "[") { parameter = strings.Join(strings.FieldsFunc(parameter, func(c rune) bool { return c == ' ' }), "") } if shapeKind == reflect.Struct || shapeKind == reflect.Map { structPrefixIndex := strings.IndexByte(parameter, '{') structSuffixIndex := strings.IndexByte(parameter, '}') if structPrefixIndex == 0 && structSuffixIndex == len(parameter)-1 { parameter = parameter[1:structSuffixIndex] } } else if shapeKind == reflect.Slice { slicePrefixIndex := strings.IndexByte(parameter, '[') sliceSuffixIndex := strings.LastIndexByte(parameter, ']') if slicePrefixIndex == 0 && sliceSuffixIndex == len(parameter)-1 { parameter = parameter[1:sliceSuffixIndex] } } return strings.TrimSpace(parameter) } func resolveMapParameter(parent string, parameter string, shape reflect.Value) map[string]string { unpacked := make(map[string]string) var concatItems []string lastItemIndex := -1 mapIndex := -1 sliceIndex := -1 for _, keyValue := range strings.SplitAfter(parameter, ",") { keyValue = strings.ReplaceAll(keyValue, ",", "") if sliceIndex == -1 && mapIndex == -1 && strings.Contains(keyValue, "=") { concatItems = append(concatItems, keyValue) if mapIndex == -1 && sliceIndex == -1 { lastItemIndex++ } } else { if concatItems == nil || 0 > lastItemIndex || lastItemIndex >= len(concatItems) { continue } concatItems[lastItemIndex] += fmt.Sprintf("%s%s", ",", keyValue) } if strings.Contains(keyValue, "[") { sliceIndex += 1 } if strings.Contains(keyValue, "{") { mapIndex += 1 } if strings.Contains(keyValue, "}") { mapIndex -= 1 } if strings.Contains(keyValue, "]") { sliceIndex -= 1 } } for _, keyValue := range concatItems { itemKeyValue := strings.Split(keyValue, "=") key, value := itemKeyValue[0], itemKeyValue[1] if len(itemKeyValue) > 2 { value = strings.Join(itemKeyValue[1:], "=") } unpacked[key] = value } return unpacked } func correctShape(shape reflect.Value) reflect.Value { if !shape.IsValid() { return reflect.ValueOf(nil) } shapeKind := shape.Kind() if shapeKind == reflect.Invalid { return reflect.ValueOf(nil) } for shapeKind == reflect.Ptr || shapeKind == reflect.UnsafePointer || shapeKind == reflect.Uintptr { shapeKind = shape.Type().Elem().Kind() shape = reflect.Indirect(reflect.New(shape.Type().Elem())) } return shape } func getShapeKind(shape reflect.Value) reflect.Kind { if !shape.IsValid() { return reflect.Invalid } shapeKind := shape.Kind() if shapeKind == reflect.Invalid { return reflect.Invalid } shapeType := shape.Type() for shapeKind == reflect.Ptr || shapeKind == reflect.UnsafePointer || shapeKind == reflect.Uintptr { shapeKind = shapeType.Elem().Kind() shapeType = shapeType.Elem() } return shapeKind } func getSliceKind(shape reflect.Value) reflect.Kind { shapeKind := shape.Kind() if shapeKind != reflect.Slice { return reflect.Invalid } shapeType := shape.Type().Elem() for shapeKind == reflect.Slice || shapeKind == reflect.Ptr || shapeKind == reflect.UnsafePointer || shapeKind == reflect.Uintptr { shapeKind = shapeType.Elem().Kind() shapeType = shapeType.Elem() } return shapeKind } func getSliceShape(shape reflect.Value) reflect.Value { shapeKind := shape.Kind() if shapeKind != reflect.Slice { return reflect.ValueOf(nil) } shapeType := shape.Type().Elem() for shapeKind == reflect.Slice || shapeKind == reflect.Ptr || shapeKind == reflect.UnsafePointer || shapeKind == reflect.Uintptr { shapeKind = shapeType.Elem().Kind() shapeType = shapeType.Elem() } return reflect.Indirect(reflect.New(shapeType)) } func isScalar(shapeType reflect.Kind) bool { for _, scalarType := range scalarTypes { if strings.EqualFold(scalarType, shapeType.String()) { return true } } return false } func isComplex(shapeType reflect.Kind) bool { for _, complexType := range complexTypes { if strings.EqualFold(complexType, shapeType.String()) { return true } } return false } func unpackStruct(parameters map[string]string, shape reflect.Value) map[string]interface{} { unpacked := make(map[string]interface{}) for key, value := range parameters { structField, ok := shape.Type().FieldByNameFunc(func(s string) bool { return strings.EqualFold(s, key) }) if !ok { continue } structFieldShape := shape.FieldByIndex(structField.Index) shapeKind := getShapeKind(structFieldShape) unpacked[structField.Name] = UnpackParameter(key, value, structFieldShape, shapeKind) } return unpacked } func getSlices(parameter string) []string { spaceIndexes := getAllIndexes(parameter, ' ') delimiterIndexes := getAllIndexes(parameter, ',') slicePrefixIndexes := getAllIndexes(parameter, '[') sliceSuffixIndexes := getAllIndexes(parameter, ']') mapPrefixIndexes := getAllIndexes(parameter, '{') mapSuffixIndexes := getAllIndexes(parameter, '}') realDelimiterIndexes := make([]int, 0) if len(slicePrefixIndexes) == 0 && len(sliceSuffixIndexes) == 0 && len(mapPrefixIndexes) == 0 && len(mapSuffixIndexes) == 0 { if spaceIndexes != nil { realDelimiterIndexes = spaceIndexes } else { realDelimiterIndexes = delimiterIndexes } } else { for _, index := range delimiterIndexes { betweenSlices := betweenPrefixAndSuffix(index, slicePrefixIndexes, sliceSuffixIndexes) betweenMaps := betweenPrefixAndSuffix(index, mapPrefixIndexes, mapSuffixIndexes) if betweenSlices { realDelimiterIndexes = append(realDelimiterIndexes, index) } else if betweenMaps { realDelimiterIndexes = append(realDelimiterIndexes, index) } } } slices := make([]string, 0) lastIndex := 0 for _, delimiterIndex := range realDelimiterIndexes { delimiterIndex -= 1 slices = append(slices, parameter[lastIndex:delimiterIndex]) lastIndex = delimiterIndex + 1 } slices = append(slices, parameter[lastIndex:]) return slices } func unpackSlice(parent string, parameter string, shape reflect.Value) []interface{} { shapeKind := getSliceKind(shape) if shapeKind == reflect.Invalid { return nil } slicedShape := getSliceShape(shape) slices := getSlices(parameter) resolvedList := make([]interface{}, 0) for _, item := range slices { unpackedParameter := UnpackParameter(parent, item, slicedShape, shapeKind) switch unpackedParameter.(type) { case []string: for _, unpackedItem := range unpackedParameter.([]string) { resolvedList = append(resolvedList, unpackedItem) } default: resolvedList = append(resolvedList, unpackedParameter) } } return resolvedList } func unpackTime(parameter string) *time.Time { timeParameter := strings.TrimSpace(parameter) if strings.Contains(timeParameter, " ") { timeParameter = strings.ReplaceAll(timeParameter, "T", "") timeParameterSplit := strings.Split(timeParameter, " ") if len(timeParameterSplit) == 2 { timeParameter = fmt.Sprintf("%sT%s", timeParameterSplit[0], timeParameterSplit[1]) } if !strings.HasSuffix(timeParameter, "Z") { timeParameter += "Z" } } parsedTime, err := time.Parse(time.RFC3339, timeParameter) if err == nil { return &parsedTime } return nil } func unpackScalar(parent string, parameter string, shape reflect.Value, scalarType reflect.Kind) interface{} { unpacked := interface{}(parameter) switch scalarType { case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: unpacked, _ = strconv.ParseInt(parameter, 10, 64) case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: unpacked, _ = strconv.ParseUint(parameter, 10, 64) case reflect.Float32, reflect.Float64: unpacked, _ = strconv.ParseFloat(parameter, 64) case reflect.Bool: unpacked, _ = strconv.ParseBool(parameter) case reflect.String: unpacked = resolveParameter(parent, parameter, shape) } return unpacked } func unpackComplex(parent string, parameter string, shape reflect.Value) interface{} { complexKind := getShapeKind(shape) switch complexKind { case reflect.Invalid: return parameter case reflect.Map: return resolveMapParameter(parent, parameter, shape) case reflect.Struct: return unpackStruct(resolveMapParameter(parent, parameter, shape), shape) case reflect.Slice: return unpackSlice(parent, parameter, shape) } return nil } func UnpackParameter(parent string, parameter string, shape reflect.Value, shapeKind reflect.Kind) interface{} { if shapeKind == reflect.Invalid { return parameter } shape = correctShape(shape) parameter = underlayParameter(parameter, shape) if shape.Type().Name() == reflect.TypeOf(time.Time{}).Name() { if unpackedTime := unpackTime(parameter); unpackedTime != nil { return unpackedTime } } if isScalar(shapeKind) { return unpackScalar(parent, parameter, shape, shapeKind) } if isComplex(shapeKind) { return unpackComplex(parent, parameter, shape) } return parameter } func unpackParameters(parameters map[string]string, shape interface{}) map[string]interface{} { shapeValue := reflect.ValueOf(shape) shapeKind := getShapeKind(shapeValue) unpackedParameters := make(map[string]interface{}) for key, value := range parameters { unpackedParameters[key] = value } if shapeKind == reflect.Invalid { return unpackedParameters } if shapeKind == reflect.Struct { return unpackStruct(parameters, shapeValue) } return unpackedParameters } func UnpackParameters(parameters map[string]interface{}, shape interface{}) map[string]interface{} { parametersMap := make(map[string]string) for key, value := range parameters { switch value.(type) { case string: parametersMap[key] = value.(string) default: parametersMap[key] = fmt.Sprintf("%v", value) } } return unpackParameters(parametersMap, shape) } func GetServiceRegions(serviceName string) []string { awsPartition := endpoints.AwsPartition() operationServiceRegions := awsPartition.Regions() services := awsPartition.Services() if operationService, ok := services[strings.ToLower(serviceName)]; ok { operationServiceRegions = operationService.Regions() } operationRegions := make([]string, 0) for _, region := range operationServiceRegions { operationRegions = append(operationRegions, region.ID()) } return operationRegions } func GetActionTypeFromShape(shape interface{}) string { shapeValue := reflect.ValueOf(shape) shapeValue = correctShape(shapeValue) shapeKind := getShapeKind(shapeValue) return getActionTypeFromShapeKind(shapeValue, shapeKind) } func getActionTypeFromShapeKind(shape reflect.Value, shapeKind reflect.Kind) string { switch shapeKind { case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: return "int" case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: return "uint" case reflect.Float32, reflect.Float64: return "float" case reflect.Bool: return "bool" case reflect.String: return "string" case reflect.Map: return "code:map" case reflect.Struct: return "code:json" case reflect.Slice: sliceKind := getSliceKind(shape) slicedShape := getSliceShape(shape) return getActionTypeFromShapeKind(slicedShape, sliceKind) } return "string" } func getAllIndexes(str string, c byte) []int { var indexes []int lastIndex := 0 for len(str) > 0 { index := strings.IndexByte(str, c) + 1 if index == 0 { break } indexes = append(indexes, lastIndex+index) str = str[index:] lastIndex += index } sort.Ints(indexes) return indexes } func betweenPrefixAndSuffix(index int, prefixIndexes []int, suffixIndexes []int) bool { nearPrefix, nearSuffix := false, false for _, prefix := range prefixIndexes { if prefix == index+1 { nearPrefix = true } } for _, suffix := range suffixIndexes { if suffix == index-1 { nearSuffix = true } } return nearPrefix && nearSuffix }
aws/awsutil/blinkutil.go
0.653459
0.402157
blinkutil.go
starcoder
package buildin import ( "fmt" "math/big" ) // Number represents a casted integer type Number struct { Value *big.Int } // NewNumber creates new Number object as pointer func NewNumber(x int64) *Number { n := new(Number) n.Value = big.NewInt(x) return n } // SetUint64 sets uint64 to Number object func (x *Number) SetUint64(u uint64) { x.Value.SetUint64(u) } // SetInt64 sets int64 to Number object func (x *Number) SetInt64(i int64) { x.Value.SetInt64(i) } // IsGreater returns true if x > y func (x *Number) IsGreater(y *Number) bool { return x.Value.Cmp(y.Value) == 1 } // IsLess returns true if x < y func (x *Number) IsLess(y *Number) bool { return x.Value.Cmp(y.Value) == -1 } // IsEqual returns true if x == y func (x *Number) IsEqual(y *Number) bool { return x.Value.Cmp(y.Value) == 0 } // IsGreaterOrEqual returns true if x >= y func (x *Number) IsGreaterOrEqual(y *Number) bool { if x.IsGreater(y) { return true } if x.IsEqual(y) { return true } return false } // IsLessOrEqual returns true if x <= y func (x *Number) IsLessOrEqual(y *Number) bool { if x.IsLess(y) { return true } if x.IsEqual(y) { return true } return false } // InRange returns true if min < x < y func (x *Number) InRange(min, max *Number) bool { return x.IsGreater(min) && x.IsLess(max) } // InRangeOrEqual returns true if min <= x <= y func (x *Number) InRangeOrEqual(min, max *Number) bool { return x.IsGreaterOrEqual(min) && x.IsLessOrEqual(max) } // InSlice returns true if x presents in slice func (x *Number) InSlice(slice []*Number) bool { for _, el := range slice { if x.IsEqual(el) { return true } } return false } // IsNegative returns true when x < 0 func (x *Number) IsNegative() bool { return x.Value.Sign() == -1 } // IsPositive returns true when x >= 0 func (x *Number) IsPositive() bool { return !x.IsNegative() } // casts a into Number. Returns error if a is nil or not a integer func cast(a interface{}) (*Number, error) { n := NewNumber(0) if a == nil { return n, nil } switch a := a.(type) { case int8: n.SetInt64(int64(a)) return n, nil case int16: n.SetInt64(int64(a)) return n, nil case int32: n.SetInt64(int64(a)) return n, nil case int: n.SetInt64(int64(a)) return n, nil case int64: n.SetInt64(a) return n, nil case uint8: n.SetUint64(uint64(a)) return n, nil case uint16: n.SetUint64(uint64(a)) return n, nil case uint32: n.SetUint64(uint64(a)) return n, nil case uint: n.SetUint64(uint64(a)) return n, nil case uint64: n.SetUint64(a) return n, nil } return nil, ErrBadNumType } // casts slice into Number slice. Returns error if a is nil or not a integer slice func castSlice(slice interface{}) ([]*Number, error) { n := make([]*Number, 0) if slice == nil { return n, nil } switch field := slice.(type) { case []int8: for _, val := range field { m := NewNumber(0) m.SetInt64(int64(val)) n = append(n, m) } return n, nil case []int16: for _, val := range field { m := NewNumber(0) m.SetInt64(int64(val)) n = append(n, m) } return n, nil case []int32: for _, val := range field { m := NewNumber(0) m.SetInt64(int64(val)) n = append(n, m) } return n, nil case []int: for _, val := range field { m := NewNumber(0) m.SetInt64(int64(val)) n = append(n, m) } return n, nil case []int64: for _, val := range field { m := NewNumber(0) m.SetInt64(val) n = append(n, m) } return n, nil case []uintptr: for _, val := range field { m := NewNumber(0) m.SetUint64(uint64(val)) n = append(n, m) } return n, nil case []uint8: for _, val := range field { m := NewNumber(0) m.SetUint64(uint64(val)) n = append(n, m) } return n, nil case []uint16: for _, val := range field { m := NewNumber(0) m.SetUint64(uint64(val)) n = append(n, m) } return n, nil case []uint32: for _, val := range field { m := NewNumber(0) m.SetUint64(uint64(val)) n = append(n, m) } return n, nil case []uint: for _, val := range field { m := NewNumber(0) m.SetUint64(uint64(val)) n = append(n, m) } return n, nil case []uint64: for _, val := range field { m := NewNumber(0) m.SetUint64(val) n = append(n, m) } return n, nil } return n, ErrBadNumType } // NumFieldToString returns string representation of number field func NumFieldToString(field interface{}) string { if field == nil { return "0" } return fmt.Sprintf("%d", field) }
buildin/numbers.go
0.807764
0.47317
numbers.go
starcoder
package model // ParallelComponent is the option set for parallel component. type ParallelComponent struct { // Distance between parallel component and the left side of the container. // Left value can be instant pixel value like 20. // It can also be a percentage value relative to container width like '20%'; // and it can also be 'left', 'center', or 'right'. // If the left value is set to be 'left', 'center', or 'right', // then the component will be aligned automatically based on position. Left string `json:"left,omitempty"` // Distance between parallel component and the top side of the container. // Top value can be instant pixel value like 20. // It can also be a percentage value relative to container width like '20%'. // and it can also be 'top', 'middle', or 'bottom'. // If the left value is set to be 'top', 'middle', or 'bottom', // then the component will be aligned automatically based on position. Top string `json:"top,omitempty"` // Distance between parallel component and the right side of the container. // Right value can be instant pixel value like 20. // It can also be a percentage value relative to container width like '20%'. Right string `json:"right,omitempty"` // Distance between parallel component and the bottom side of the container. // Bottom value can be instant pixel value like 20. // It can also be a percentage value relative to container width like '20%'. Bottom string `json:"bottom,omitempty"` } // ParallelAxis is the option set for a parallel axis. type ParallelAxis struct { // Dimension index of coordinate axis. Dim int `json:"dim,omitempty"` // Name of axis. Name string `json:"name,omitempty"` // The maximum value of axis. // It can be set to a special value 'dataMax' so that the minimum value on this axis is set to be the maximum label. // It will be automatically computed to make sure axis tick is equally distributed when not set. // In category axis, it can also be set as the ordinal number. Max interface{} `json:"max,omitempty"` // Compulsively set segmentation interval for axis. Inverse bool `json:"inverse,omitempty"` // Location of axis name. Options: "start", "middle", "center", "end" // default "end" NameLocation string `json:"nameLocation,omitempty"` // Type of axis. // Options: // "value":Numerical axis, suitable for continuous data. // "category" Category axis, suitable for discrete category data. Category data can be auto retrieved from series. // "log" Log axis, suitable for log data. Type string `json:"type,omitempty"` // Category data,works on (type: "category"). Data interface{} `json:"data,omitempty"` }
model/parallel.go
0.802285
0.483222
parallel.go
starcoder
package vector import ( "math" "github.com/revzim/azmath" ) type ( Vector struct { X float64 `json:"x"` Y float64 `json:"y"` Z float64 `json:"z"` } ) func New(n ...float64) *Vector { if len(n) == 1 { return &Vector{n[0], n[0], n[0]} } else if len(n) == 3 { return &Vector{n[0], n[1], n[2]} } return nil } var ( ZeroVector = New(0) OneVector = New(1) UpVector = New(0, 0, 1) DownVector = New(0, 0, -1) ForwardVector = New(1, 0, 0) BackwardVector = New(-1, 0, 0) RightVector = New(0, 1, 0) LeftVector = New(0, -1, 0) XAxisVector = New(1, 0, 0) YAxisVector = New(0, 1, 0) ZAxisVector = New(0, 0, 1) ) func (v *Vector) Add(v2 Vector) *Vector { tmpVec := New( v.X+v2.X, v.Y+v2.Y, v.Z+v2.Z, ) return tmpVec } func (v *Vector) Subtract(v2 Vector) *Vector { tmpVec := New( v.X-v2.X, v.Y-v2.Y, v.Z-v2.Z, ) return tmpVec } func (v *Vector) Scale(v2 Vector) *Vector { tmpVec := New( v.X*v2.X, v.Y*v2.Y, v.Z*v2.Z, ) return tmpVec } func (v *Vector) Divide(v2 Vector) *Vector { tmpVec := New( v.X/v2.X, v.Y/v2.Y, v.Z/v2.Z, ) return tmpVec } func (v *Vector) AddConst(n float64) *Vector { tmpVec := New( v.X+n, v.Y+n, v.Z+n, ) return tmpVec } func (v *Vector) SubtractConst(n float64) *Vector { tmpVec := New( v.X-n, v.Y-n, v.Z-n, ) return tmpVec } func (v *Vector) ScaleConst(n float64) *Vector { tmpVec := New( v.X*n, v.Y*n, v.Z*n, ) return tmpVec } func (v *Vector) DivideConst(n float64) *Vector { if n == 0 { return ZeroVector } rScale := 1.0 / n tmpVec := New( v.X*rScale, v.Y*rScale, v.Z*rScale, ) return tmpVec } func (v *Vector) CrossProduct(v2 Vector) *Vector { tmpVec := New( v.Y*v2.Z-v.Z*v2.Y, v.Z*v2.X-v.X*v2.Z, v.X*v2.Y-v.Y*v2.X, ) return tmpVec } func (v *Vector) DotProduct(v2 Vector) float64 { ans := v.X*v2.X + v.Y*v2.Y + v.Z*v2.Z return ans } func (v *Vector) Equals(v2 Vector) bool { return v.X == v2.X && v.Y == v2.Y && v.Z == v2.Z } func (v *Vector) NearlyEquals(v2 Vector, tolerance float64) bool { return math.Abs(v.X-v2.X) <= tolerance && math.Abs(v.Y-v2.Y) <= tolerance && math.Abs(v.Z-v2.Z) <= tolerance } func (v *Vector) NotEqual(v2 Vector) bool { return v.X != v2.X || v.Y != v2.Y || v.Z != v2.Z } func (v *Vector) Negative() *Vector { tmpVec := New(-v.X, -v.Y, -v.Z) return tmpVec } func (v *Vector) Size() float64 { return math.Sqrt(v.X*v.X + v.Y*v.Y + v.Z*v.Z) } func (v *Vector) SizeSquare() float64 { return v.X*v.X + v.Y*v.Y + v.Z*v.Z } func (v *Vector) Size2D() float64 { return v.X*v.X + v.Y*v.Y } func (v *Vector) IsZero() bool { return v.X == 0 && v.Y == 0 && v.Z == 0 } func (v *Vector) NearlyZero(tolerance float64) bool { return math.Abs(v.X) <= tolerance && math.Abs(v.Y) <= tolerance && math.Abs(v.Z) <= tolerance } func (v *Vector) Normalize(tolerance float64) bool { squareSum := v.X*v.X + v.Y*v.Y + v.Z*v.Z if squareSum > tolerance { scale := azmath.FastInvSqrt64(squareSum) v.X *= scale v.Y *= scale v.Z *= scale return true } return false } func (v *Vector) Dist(v2 Vector) float64 { return math.Sqrt(v.DistSquared(v2)) } func (v *Vector) DistSquared(v2 Vector) float64 { return math.Pow(v2.X-v.X, 2) + math.Pow(v2.Y-v.Y, 2) + math.Pow(v2.Z-v.Z, 2) } func (v *Vector) DistXY(v2 Vector) float64 { return math.Sqrt(v.DistSquaredXY(v2)) } func (v *Vector) DistSquaredXY(v2 Vector) float64 { return math.Pow(v2.X-v.X, 2) + math.Pow(v2.Y-v.Y, 2) }
vector/vector.go
0.69368
0.608012
vector.go
starcoder
package oots import "math" type ConfidenceTree struct { Root *ConfidenceNode } func GetConfidenceTree() *ConfidenceTree { return &ConfidenceTree{Root:GetConfidenceNode()} } type ConfidenceNode struct { Child map[string]*ConfidenceNode `json:"c"` SuitableId string `json:"s,omitempty"` } func GetConfidenceNode() *ConfidenceNode { return &ConfidenceNode{Child:make(map[string]*ConfidenceNode), SuitableId:""} } /* Constructs the Confidence Tree by traversing over the Model Tree */ func (confTree *ConfidenceTree) ConstructConfidenceTree(modelTree *ModelTree) { traverseModelTreeAndConstructConfidenceTree(modelTree.Root, confTree.Root) } /* Traverse the Model Tree and construct the ConfidenceTree by adding new nodes in Confidence Tree as and when required */ func traverseModelTreeAndConstructConfidenceTree(modelRoot *GameStateNode, confRoot *ConfidenceNode) { if modelRoot == nil || len(modelRoot.ChildNode) == 0 { // since at this point, the confidence node should also have no child nodes as the game moves are over, // so we don't assign any preferredCellId to this one return } // TODO using sync pool which help improve the performance for k, v := range modelRoot.ChildNode { // get a confidence node with that Id // create an empty child map and assign to the new node childMap := make(map[string]*ConfidenceNode) preferredCellId := GetNodeWithMaxConfidence(modelRoot.ChildNode, modelRoot.SelectedCount) newConfNode := GetConfidenceNode() newConfNode.Child = childMap newConfNode.SuitableId = preferredCellId confRoot.Child[k] = newConfNode traverseModelTreeAndConstructConfidenceTree(v, confRoot.Child[k]) } } /* Returns the id of the child node which have the most confidence */ func GetNodeWithMaxConfidence(gameChildNodes map[string]*GameStateNode, visitCount int) string { maxConfidenceId := "" maxConf := 0.0 // calculate the confidence for each of the child nodes for id, v := range gameChildNodes { conf := calculateConfidence(v.WinCount, v.SelectedCount, visitCount) // keeping the strict less coz, if confidence is coming out as zero, default at there should // be another logic for selecting the preferred move and it should be handled by the client bot if conf > maxConf { maxConf = conf maxConfidenceId = id } } return maxConfidenceId } func calculateConfidence(vi, ni, visitCount int) float64 { return float64(vi) + math.Sqrt(math.Log(float64(visitCount)/float64(ni))) }
oots/ConfidenceTree.go
0.649801
0.442034
ConfidenceTree.go
starcoder
package qppb import "github.com/snowzach/queryp" func (qp *QueryParameters) QueryParameters() *queryp.QueryParameters { return &queryp.QueryParameters{ Filter: FilterQP(qp.Filter), Sort: SortQP(qp.Sort), Options: qp.Options, Limit: int64(qp.Limit), Offset: int64(qp.Offset), } } func FilterQP(ftpb []*FilterTerm) queryp.Filter { if len(ftpb) == 0 { return nil } f := make(queryp.Filter, 0, len(ftpb)) for _, ft := range ftpb { f = append(f, &queryp.FilterTerm{ Logic: ft.Logic.FilterLogic(), Field: ft.Field, Op: ft.Op.FilterOp(), Value: ft.Value.AsInterface(), SubFilter: FilterQP(ft.SubFilter), }) } return f } func (fl FilterLogic) FilterLogic() queryp.FilterLogic { switch fl { case FilterLogic_FilterLogicAnd: return queryp.FilterLogicAnd case FilterLogic_FilterLogicOr: return queryp.FilterLogicOr } return -1 } func (fo FilterOp) FilterOp() queryp.FilterOp { switch fo { case FilterOp_FilterOpEquals: return queryp.FilterOpEquals case FilterOp_FilterOpNotEquals: return queryp.FilterOpNotEquals case FilterOp_FilterOpLessThan: return queryp.FilterOpLessThan case FilterOp_FilterOpLessThanEqual: return queryp.FilterOpLessThanEqual case FilterOp_FilterOpGreaterThan: return queryp.FilterOpGreaterThan case FilterOp_FilterOpGreaterThanEqual: return queryp.FilterOpGreaterThanEqual case FilterOp_FilterOpLike: return queryp.FilterOpLike case FilterOp_FilterOpNotLike: return queryp.FilterOpNotLike case FilterOp_FilterOpILike: return queryp.FilterOpILike case FilterOp_FilterOpNotILike: return queryp.FilterOpNotILike case FilterOp_FilterOpRegexp: return queryp.FilterOpRegexp case FilterOp_FilterOpNotRegexp: return queryp.FilterOpNotRegexp case FilterOp_FilterOpIRegexp: return queryp.FilterOpIRegexp case FilterOp_FilterOpNotIRegexp: return queryp.FilterOpNotIRegexp } return -1 } func (ft FilterType) FilterType() queryp.FilterType { switch ft { case FilterType_FilterTypeNotFound: return queryp.FilterTypeNotFound case FilterType_FilterTypeSimple: return queryp.FilterTypeSimple case FilterType_FilterTypeString: return queryp.FilterTypeString case FilterType_FilterTypeNumeric: return queryp.FilterTypeNumeric case FilterType_FilterTypeTime: return queryp.FilterTypeTime case FilterType_FilterTypeBool: return queryp.FilterTypeBool } return -1 } func SortQP(stpb []*SortTerm) queryp.Sort { if len(stpb) == 0 { return nil } s := make(queryp.Sort, 0, len(stpb)) for _, st := range stpb { s = append(s, &queryp.SortTerm{ Field: st.Field, Desc: st.Desc, }) } return s }
qppb/pbqp.go
0.578091
0.46873
pbqp.go
starcoder
package evo // Crosser creates a new child from the parents through crossover (or cloning if there is only one parent). The crosser is not responsible for mutation or for assigning the genome an ID or to a species. type Crosser interface { Cross(parents ...Genome) (child Genome, err error) } // Evaluator utilises the network provided and returns its fitness (or error) as a result type Evaluator interface { Evaluate(Phenome) (Result, error) } // Matrix descibes data organised as a matrix. It mimics a subset of the signature of [gonum's mat.Matrix](https://godoc.org/gonum.org/v1/gonum/mat) which allows directly passing matrices from that package as inputs as well as any other type that implements it, such as [sparse](https://godoc.org/github.com/james-bowman/sparse). Network implementations, however, may expect a specific type and throw an error if they cannot convert to the desired type. type Matrix interface { // Dims returns the dimensions of a Matrix. Dims() (r, c int) // At returns the value of a matrix element at row i, column j. // It will panic if i or j are out of bounds for the matrix. At(i, j int) float64 } // Mutator changes the genome's encoding (nodes, conns, or traits) type Mutator interface { Mutate(*Genome) error } // Populator provides a popluation from which the experiment will begin type Populator interface { Populate() (Population, error) } // Seeder provides an unitialised genome from which to construct a new population type Seeder interface { Seed() (Genome, error) } // Searcher processes each phenome through the evaluator and returns the result type Searcher interface { Search(Evaluator, []Phenome) ([]Result, error) } // Selector examines a population returns the current genomes who will continue and those that will become parents type Selector interface { Select(Population) (continuing []Genome, parents [][]Genome, err error) } // Speciator assigns the population's genomes to a species, creating and destroying species as necessary. type Speciator interface { Speciate(*Population) error } // Transcriber creates the decoded substrate from the encoded one. type Transcriber interface { Transcribe(Substrate) (Substrate, error) } // Translator creates a new network from defintions contained in the nodes and connections type Translator interface { Translate(Substrate) (Network, error) } // Mutators collection which acts as a single mutator. Component mutators will be called in order // until the complexity of the genome changes. type Mutators []Mutator // Mutate the genome with the composite mutators func (m Mutators) Mutate(g *Genome) error { // Record the starting complexity n := g.Complexity() // Iterate the mutators in order for _, x := range m { // Use the current mutator on the genome if err := x.Mutate(g); err != nil { return err } // The complexity has changed so do not continue with the remaining mutations if g.Complexity() != n { break } } return nil }
helpers.go
0.867233
0.683261
helpers.go
starcoder
package core import ( "math" ) func basePressure(stream FlightData) float64 { pressures := make([]float64, 0) for _, segment := range stream.AllSegments() { if segment.Computed.SmoothedPressure > 0 { pressures = append(pressures, segment.Computed.SmoothedPressure) } if len(pressures) >= 10 { var sum float64 = 0 for _, v := range pressures { sum += v } return nanSafe(sum / float64(len(pressures))) } } return 0 } func altitude(bp float64, raw RawDataSegment) float64 { if bp == 0 { return 0 } return nanSafe(44307.7 * (1 - math.Pow((raw.Pressure/100)/bp, 0.190284))) } func normalizedPressure(raw RawDataSegment) float64 { return nanSafe(raw.Pressure / 100.0) } func velocity(stream FlightData, bp float64, raw RawDataSegment) float64 { altitude := altitude(bp, raw) segments := stream.AllSegments() for i := len(segments) - 1; i >= 0; i -= 1 { if segments[i].Computed.Altitude != altitude { return nanSafe((altitude - segments[i].Computed.Altitude) / (raw.Timestamp - segments[i].Raw.Timestamp)) } } return 0.0 } func yaw(raw RawDataSegment) float64 { return nanSafe(math.Atan2(-1.0*raw.Acceleration.X, raw.Acceleration.Z) * (180.0 / math.Pi)) } func pitch(raw RawDataSegment) float64 { return nanSafe(math.Atan2(-1.0*raw.Acceleration.Y, raw.Acceleration.Z) * (180.0 / math.Pi)) } func toRadians(degrees float64) float64 { return nanSafe(degrees * math.Pi / 180) } func toDegrees(radians float64) float64 { return nanSafe(radians * 180 / math.Pi) } func bearing(origin Coordinate, raw RawDataSegment) float64 { if origin.Lat == 0 || origin.Lon == 0 || raw.Coordinate.Lat == 0 || raw.Coordinate.Lon == 0 { return 0 } startLat := toRadians(origin.Lat) startLng := toRadians(origin.Lon) destLat := toRadians(raw.Coordinate.Lon) destLng := toRadians(raw.Coordinate.Lon) y := math.Sin(destLng-startLng) * math.Cos(destLat) x := math.Cos(startLat)*math.Sin(destLat) - math.Sin(startLat)*math.Cos(destLat)*math.Cos(destLng-startLng) brng := math.Atan2(y, x) brng = toDegrees(brng) return nanSafe(math.Mod(brng+360, 360)) } func distance(origin Coordinate, raw RawDataSegment) float64 { if origin.Lat == 0 || origin.Lon == 0 || raw.Coordinate.Lat == 0 || raw.Coordinate.Lon == 0 { return 0 } R := 6371e3 φ1 := origin.Lat * math.Pi / 180 φ2 := raw.Coordinate.Lat * math.Pi / 180 Δφ := (raw.Coordinate.Lat - origin.Lat) * math.Pi / 180 Δλ := (raw.Coordinate.Lon - origin.Lon) * math.Pi / 180 a := math.Sin(Δφ/2)*math.Sin(Δφ/2) + math.Cos(φ1)*math.Cos(φ2)*math.Sin(Δλ/2)*math.Sin(Δλ/2) c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) return nanSafe(R * c) } func dataRate(stream FlightData) float64 { totalsMap := make(map[int]float64) for _, timestamp := range stream.Time() { second := int(math.Floor(timestamp)) if total, ok := totalsMap[second]; ok { totalsMap[second] = total + 1 } else { totalsMap[second] = 1 } } total := 0.0 for _, secondTotal := range totalsMap { total += secondTotal } return nanSafe(total / float64(len(totalsMap))) } func averageComputedValue(seconds float64, stream FlightData, raw RawDataSegment, computed ComputedDataSegment, accessor func(seg ComputedDataSegment) float64) float64 { total := accessor(computed) n := 1.0 i := len(stream.AllSegments()) - 1 for i >= 0 && raw.Timestamp-stream.Time()[i] <= seconds { total += accessor(stream.AllSegments()[i].Computed) n++ i-- } return nanSafe(total / n) } func determineFlightMode(stream FlightData, raw RawDataSegment, computed ComputedDataSegment) FlightMode { length := len(stream.AllSegments()) if length == 0 { return ModePrelaunch } lastMode := stream.AllSegments()[length-1].Computed.FlightMode avgVelocity := averageComputedValue(1, stream, raw, computed, func(seg ComputedDataSegment) float64 { return seg.SmoothedVelocity }) avgAcceleration := averageComputedValue(1, stream, raw, computed, func(seg ComputedDataSegment) float64 { return seg.SmoothedVerticalAcceleration }) if lastMode == ModePrelaunch && avgVelocity > 5 { return ModeAscentPowered } if lastMode == ModeAscentPowered && avgAcceleration < 0 && avgVelocity > 0 { return ModeAscentUnpowered } if (lastMode == ModeAscentPowered || lastMode == ModeAscentUnpowered) && avgVelocity < 0 { return ModeDescentFreefall } if lastMode == ModeDescentFreefall && math.Abs(avgAcceleration) < 0.5 { return ModeDescentParachute } if (lastMode == ModeDescentFreefall || lastMode == ModeDescentParachute) && math.Abs(avgAcceleration) < 0.5 && math.Abs(avgVelocity) < 0.5 { return ModeRecovery } return lastMode } func ComputeDataSegment(stream FlightData, raw RawDataSegment) (ComputedDataSegment, float64, Coordinate) { bp := stream.BasePressure() if bp == 0 { bp = basePressure(stream) } origin := stream.Origin() if origin.Lat == 0 && origin.Lon == 0 && raw.Coordinate.Lat != 0 && raw.Coordinate.Lon != 0 { origin = raw.Coordinate } alt := altitude(bp, raw) vel := velocity(stream, bp, raw) press := normalizedPressure(raw) smoothedAlt := alt smoothedVel := vel smoothedVertAccel := 0.0 smoothedPress := press smoothedTemp := raw.Temperature s := len(stream.AllSegments()) if s > 0 { alpha := 0.5 smoothedAlt = smoothed(alpha, alt, stream.SmoothedAltitude()[s-1]) smoothedVel = smoothed(alpha, vel, stream.SmoothedVelocity()[s-1]) smoothedPress = smoothed(alpha, press, stream.SmoothedPressure()[s-1]) smoothedTemp = smoothed(alpha, raw.Temperature, stream.SmoothedTemperature()[s-1]) smoothedVertAccel = (smoothedVel - stream.SmoothedVelocity()[s-1]) / (raw.Timestamp - stream.Time()[s-1]) } computed := ComputedDataSegment{ Altitude: alt, Velocity: vel, Yaw: yaw(raw), Pitch: pitch(raw), Bearing: bearing(origin, raw), Distance: distance(origin, raw), DataRate: dataRate(stream), SmoothedAltitude: smoothedAlt, SmoothedVelocity: smoothedVel, SmoothedPressure: smoothedPress, SmoothedTemperature: smoothedTemp, SmoothedVerticalAcceleration: smoothedVertAccel, } computed.FlightMode = determineFlightMode(stream, raw, computed) return computed, bp, origin }
ground/core/computed.go
0.756178
0.528473
computed.go
starcoder
package piButterfly import ( "math" "github.com/czebos/pi/supportCode" ) // Input: lambda - security parameter, B - important variable for security // parties - all involved peoplesID, reci - the person expexting the message(Bob) // Output: an array of paths struct that contains array on IDs and Nonces // Creates a path of the intermediatery hops with a nonce number in parallel. // Makes a path for every leaf of the binary tree. The first B nodes are the leaves, //and creats a path up the tree to the root node. // MODIFIED TO ACCOMIDATE PIBUTTERFLY func GenTree(lambda int, B int, parties []int, recip int) []supportCode.Path { nodes := CreateTree(parties, B, lambda) //creates the tree with the right parents shuffle paths := make([]supportCode.Path, 0) for i := 0; i < int(B); i++ { path := make([]int, 0) nonces := make([]int, 0) currentNode := nodes[i] for currentNode.Parent != nil { //iterate through parents and add their nonces/paths path = append(path, currentNode.Path...) nonces = append(nonces, currentNode.Nonces...) currentNode = *currentNode.Parent } path = append(path, currentNode.Path...) //finalize for the last node path = append(path, recip) nonces = append(nonces, currentNode.Nonces...) paths = append(paths, supportCode.Path{path, nonces}) //add the path } return paths } // Creates a tree of Nodes based on the B passed, with length of path depending on lambda // Then assigns the corresponding parent. // the first b are the leaves // MODIFIED TO ACCOMIDATE PIBUTTERFLY func CreateTree(parties []int, B int, lambda int) []supportCode.Node { nodes := make([]supportCode.Node, (2*B)-1) //create length based on B for i := 0; i < len(nodes); i++ { path, nonces := supportCode.CreatePath(parties, supportCode.RoundB(math.Pow(math.Log2(float64(lambda)), 2))) //use path for each node nodes[i].Path = path nodes[i].Nonces = nonces //assigns nodes } factor := 0 //this makes the right parent index pivotNode := supportCode.CalculatePivotNode(B) for i := 0; i < pivotNode; i++ { nodes[i].Parent = &nodes[B+factor] //so the parent is the right node, and last is null if i%2 == 1 { factor += 1 } } return nodes }
piButterfly/supportCode.go
0.551091
0.482307
supportCode.go
starcoder
package codelab import ( "github.com/google/differential-privacy/privacy-on-beam/pbeam" "github.com/apache/beam/sdks/go/pkg/beam" ) // ComputeCountMeanSum computes the three aggregations (count, mean and sum) we // compute separately in the other files in a differentially private way. // This pipeline uses a single PrivacySpec for all the aggregations, meaning // that they share the same privacy budget. func ComputeCountMeanSum(s beam.Scope, col beam.PCollection) (visitsPerHour, meanTimeSpent, revenues beam.PCollection) { s = s.Scope("ComputeCountMeanSum") // Create a Privacy Spec and convert col into a PrivatePCollection spec := pbeam.NewPrivacySpec(epsilon, delta) // Shared by count, mean and sum. pCol := pbeam.MakePrivateFromStruct(s, col, spec, "VisitorID") visitHours := pbeam.ParDo(s, extractVisitHour, pCol) visitsPerHour = pbeam.Count(s, visitHours, pbeam.CountParams{ Epsilon: epsilon / 3, Delta: delta / 3, MaxPartitionsContributed: 1, // Visitors can visit the restaurant once (one hour) a day MaxValue: 1, // Visitors can visit the restaurant once within an hour }) hourToTimeSpent := pbeam.ParDo(s, extractVisitHourAndTimeSpentFn, pCol) meanTimeSpent = pbeam.MeanPerKey(s, hourToTimeSpent, pbeam.MeanParams{ Epsilon: epsilon / 3, Delta: delta / 3, MaxPartitionsContributed: 1, // Visitors can visit the restaurant once (one hour) a day MaxContributionsPerPartition: 1, // Visitors can visit the restaurant once within an hour MinValue: 0, // Minimum time spent per user (in mins) MaxValue: 60, // Maximum time spent per user (in mins) }) hourToMoneySpent := pbeam.ParDo(s, extractVisitHourAndTimeSpentFn, pCol) revenues = pbeam.SumPerKey(s, hourToMoneySpent, pbeam.SumParams{ Epsilon: epsilon / 3, Delta: delta / 3, MaxPartitionsContributed: 1, // Visitors can visit the restaurant once (one hour) a day MinValue: 0, // Minimum money spent per user (in euros) MaxValue: 40, // Maximum money spent per user (in euros) }) return visitsPerHour, meanTimeSpent, revenues }
privacy-on-beam/codelab/multiple.go
0.816589
0.453867
multiple.go
starcoder
package types import ( "sort" "strings" ) // Type represents a data type. type Type struct { // Kind will not be set when Ref is supplied. Kind Kind `json:",omitempty"` // Name is used as the descriptive name for the object. Name string `json:",omitempty"` // Element is used when Kind is an Array or Map. Element *Type `json:",omitempty"` // Argument and Returns are used when Kind is a Func. Either may be nil. Arguments, Returns []*Type `json:",omitempty"` // Properties is used for KindInterface Properties map[string]*Type `json:",omitempty"` // Ref is used when types are flattened for a Registry. It will point to an // index of another type in the registry. Ref string `json:",omitempty"` } // ToArray creates an array type using this element type. func (t *Type) ToArray() *Type { return &Type{ Kind: KindArray, Element: t, } } // ToMap creates a map type using this element type. func (t *Type) ToMap() *Type { return &Type{ Kind: KindMap, Element: t, } } func (t *Type) Copy() *Type { // This is so we don't have to check for nils on all the callers. if t == nil { return nil } ty := &Type{ Kind: t.Kind, Name: t.Name, Ref: t.Ref, Element: t.Element.Copy(), } for _, v := range t.Arguments { ty.Arguments = append(ty.Arguments, v.Copy()) } for _, v := range t.Returns { ty.Returns = append(ty.Returns, v.Copy()) } if len(t.Properties) > 0 { ty.Properties = map[string]*Type{} for k, v := range t.Properties { ty.Properties[k] = v.Copy() } } return ty } func (t *Type) String() string { if t == nil { return "" } switch t.Kind { case KindAny: return "any" case KindBool: return "bool" case KindChar: return "char" case KindData: return "data" case KindNumber: return "number" case KindString: return "string" case KindArray: return "[]" + t.Element.String() case KindMap: return "{}" + t.Element.String() case KindFunc: var args []string for _, arg := range t.Arguments { args = append(args, arg.String()) } s := "func(" + strings.Join(args, ", ") + ")" switch len(t.Returns) { case 0: // Do nothing case 1: s += " " + t.Returns[0].String() default: var returns []string for _, r := range t.Returns { returns = append(returns, r.String()) } s += " (" + strings.Join(returns, ", ") + ")" } return s } return t.Name } func NewArray(element *Type) *Type { return &Type{ Kind: KindArray, Element: element, } } func NewMap(element *Type) *Type { return &Type{ Kind: KindMap, Element: element, } } func NewRef(ref string) *Type { return &Type{ Ref: ref, } } func NewFunc(args, returns []*Type) *Type { return &Type{ Kind: KindFunc, Arguments: args, Returns: returns, } } // TypeFromString decodes a syntactically-valid type from a string. func TypeFromString(s string) *Type { tokens := tokenize(s) ty, _ := parseType(tokens, 0) return ty } func NewUnresolvedInterface(name string) *Type { return &Type{ Kind: KindUnresolvedInterface, Name: name, } } func NewInterface(name string, properties map[string]*Type) *Type { return &Type{ Kind: KindResolvedInterface, Name: name, Properties: properties, } } func (t *Type) Interface() string { s := "{ " for i, key := range t.SortedPropertyNames() { if i > 0 { s += "; " } if t.Properties[key].Kind == KindFunc { s += key + strings.TrimPrefix(t.Properties[key].String(), "func") } else { s += key + " " + t.Properties[key].String() } } if s == "{ " { return "{}" } return s + " }" } func (t *Type) SortedPropertyNames() []string { // Avoid allocation in most cases. if len(t.Properties) == 0 { return nil } names := make([]string, len(t.Properties)) i := 0 for name := range t.Properties { names[i] = name i++ } sort.Strings(names) return names }
types/type.go
0.739986
0.418578
type.go
starcoder
package ewkb import ( "database/sql/driver" "errors" "fmt" "github.com/kcasctiv/go-ewkb/geo" ) // Polygon presents Polygon geometry object type Polygon struct { header poly geo.Polygon } // NewPolygon returns new Polygon, // created from geometry base and coords data func NewPolygon(b Base, poly geo.Polygon) Polygon { return Polygon{ header: header{ byteOrder: b.ByteOrder(), wkbType: getFlags( b.HasZ(), b.HasM(), b.HasSRID(), ) | PolygonType, srid: b.SRID(), }, poly: poly, } } // Ring returns ring with specified index func (p *Polygon) Ring(idx int) geo.MultiPoint { return p.poly.Ring(idx) } // Len returns count of rings func (p *Polygon) Len() int { return p.poly.Len() } // String returns WKT/EWKT geometry representation func (p *Polygon) String() string { var s string if p.HasSRID() { s = fmt.Sprintf("SRID=%d;", p.srid) } s += "POLYGON" if !p.HasZ() && p.HasM() { s += "M" } return s + printPolygon(p, p.HasZ(), p.HasM()) } // Scan implements sql.Scanner interface func (p *Polygon) Scan(src interface{}) error { return scanGeometry(src, p) } // Value implements sql driver.Valuer interface func (p *Polygon) Value() (driver.Value, error) { return p.String(), nil } // UnmarshalBinary implements encoding.BinaryUnmarshaler interface func (p *Polygon) UnmarshalBinary(data []byte) error { h, byteOrder, offset := readHeader(data) if h.Type() != PolygonType { return errors.New("not expected geometry type") } p.header = h var err error p.poly, _, err = readPolygon(data[offset:], byteOrder, getReadPointFunc(h.wkbType)) return err } // MarshalBinary implements encoding.BinaryMarshaler interface func (p *Polygon) MarshalBinary() ([]byte, error) { size := headerSize(p.HasSRID()) + polygonSize(p, p.HasZ(), p.HasM()) b := make([]byte, size) byteOrder := getBinaryByteOrder(p.ByteOrder()) offset := writeHeader(p, p.Type(), byteOrder, p.HasSRID(), b) writePolygon(p, byteOrder, p.HasZ(), p.HasM(), b[offset:]) return b, nil } func printPolygon(p geo.Polygon, hasZ, hasM bool) string { if p.Len() == 0 { return " EMPTY" } var s string for idx := 0; idx < p.Len(); idx++ { s += printMultiPoint(p.Ring(idx), hasZ, hasM) + "," } return "(" + s[:len(s)-1] + ")" }
polygon.go
0.786295
0.40807
polygon.go
starcoder
package counts import ( "math" ) // Count32 is a count of something, capped at math.MaxUint32. type Count32 uint32 // NewCount32 initializes a Count32 from a uint64, capped at // math.MaxUint32. func NewCount32(n uint64) Count32 { if n > math.MaxUint32 { return Count32(math.MaxUint32) } return Count32(n) } // ToUint64 returns the value of `n` as a `uint64`. If the value has // overflowed, it returns `(math.MaxUint32, true)`. func (n Count32) ToUint64() (uint64, bool) { return uint64(n), n == math.MaxUint32 } // Plus returns the sum of two Count32s, capped at math.MaxUint32. func (n1 Count32) Plus(n2 Count32) Count32 { n := n1 + n2 if n < n1 { // Overflow return math.MaxUint32 } return n } // Increment increases `*n1` by `n2`, capped at math.MaxUint32. func (n1 *Count32) Increment(n2 Count32) { *n1 = n1.Plus(n2) } // AdjustMaxIfNecessary adjusts `*n1` to be `max(*n1, n2)`. Return // true iff `n2` was greater than `*n1`. func (n1 *Count32) AdjustMaxIfNecessary(n2 Count32) bool { if n2 <= *n1 { return false } *n1 = n2 return true } // AdjustMaxIfPossible adjusts `*n1` to be `max(*n1, n2)`. Return true // iff `n2` was greater than or equal to `*n1`. func (n1 *Count32) AdjustMaxIfPossible(n2 Count32) bool { if n2 < *n1 { return false } *n1 = n2 return true } // Count64 is a count of something, capped at math.MaxUint64. type Count64 uint64 // NewCount64 initializes a Count64 from a uint64. func NewCount64(n uint64) Count64 { return Count64(n) } // ToUint64 returns the value of `n` as a `uint64`. If the value has // overflowed, it returns `(math.MaxUint64, true)`. func (n Count64) ToUint64() (uint64, bool) { return uint64(n), n == math.MaxUint64 } // Plus returns the sum of two Count64s, capped at math.MaxUint64. func (n1 Count64) Plus(n2 Count64) Count64 { n := n1 + n2 if n < n1 { // Overflow return math.MaxUint64 } return n } // Increment increases `*n1` by `n2`, capped at math.MaxUint64. func (n1 *Count64) Increment(n2 Count64) { *n1 = n1.Plus(n2) } // AdjustMaxIfNecessary adjusts `*n1` to be `max(*n1, n2)`. Return // true iff `n2` was greater than `*n1`. func (n1 *Count64) AdjustMaxIfNecessary(n2 Count64) bool { if n2 <= *n1 { return false } *n1 = n2 return true } // AdjustMaxIfPossible adjusts `*n1` to be `max(*n1, n2)`. Return true // iff `n2` was greater than or equal to `*n1`. func (n1 *Count64) AdjustMaxIfPossible(n2 Count64) bool { if n2 <= *n1 { return false } *n1 = n2 return true }
counts/counts.go
0.851645
0.520679
counts.go
starcoder
package loerr func longestCollection(collections ...[]interface{}) int { max := 0 for _, collection := range collections { if len(collection) > max { max = len(collection) } } return max } // Zip2 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip2[A any, B any](a []A, b []B) []Tuple2[A, B] { size := Max[int]([]int{len(a), len(b)}) result := make([]Tuple2[A, B], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) result = append(result, Tuple2[A, B]{ A: _a, B: _b, }) } return result } // Zip3 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip3[A any, B any, C any](a []A, b []B, c []C) []Tuple3[A, B, C] { size := Max[int]([]int{len(a), len(b), len(c)}) result := make([]Tuple3[A, B, C], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) result = append(result, Tuple3[A, B, C]{ A: _a, B: _b, C: _c, }) } return result } // Zip4 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip4[A any, B any, C any, D any](a []A, b []B, c []C, d []D) []Tuple4[A, B, C, D] { size := Max[int]([]int{len(a), len(b), len(c), len(d)}) result := make([]Tuple4[A, B, C, D], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) result = append(result, Tuple4[A, B, C, D]{ A: _a, B: _b, C: _c, D: _d, }) } return result } // Zip5 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip5[A any, B any, C any, D any, E any](a []A, b []B, c []C, d []D, e []E) []Tuple5[A, B, C, D, E] { size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e)}) result := make([]Tuple5[A, B, C, D, E], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) _e, _ := Nth[E](e, index) result = append(result, Tuple5[A, B, C, D, E]{ A: _a, B: _b, C: _c, D: _d, E: _e, }) } return result } // Zip6 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip6[A any, B any, C any, D any, E any, F any](a []A, b []B, c []C, d []D, e []E, f []F) []Tuple6[A, B, C, D, E, F] { size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f)}) result := make([]Tuple6[A, B, C, D, E, F], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) _e, _ := Nth[E](e, index) _f, _ := Nth[F](f, index) result = append(result, Tuple6[A, B, C, D, E, F]{ A: _a, B: _b, C: _c, D: _d, E: _e, F: _f, }) } return result } // Zip7 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip7[A any, B any, C any, D any, E any, F any, G any](a []A, b []B, c []C, d []D, e []E, f []F, g []G) []Tuple7[A, B, C, D, E, F, G] { size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g)}) result := make([]Tuple7[A, B, C, D, E, F, G], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) _e, _ := Nth[E](e, index) _f, _ := Nth[F](f, index) _g, _ := Nth[G](g, index) result = append(result, Tuple7[A, B, C, D, E, F, G]{ A: _a, B: _b, C: _c, D: _d, E: _e, F: _f, G: _g, }) } return result } // Zip8 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip8[A any, B any, C any, D any, E any, F any, G any, H any](a []A, b []B, c []C, d []D, e []E, f []F, g []G, h []H) []Tuple8[A, B, C, D, E, F, G, H] { size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g), len(h)}) result := make([]Tuple8[A, B, C, D, E, F, G, H], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) _e, _ := Nth[E](e, index) _f, _ := Nth[F](f, index) _g, _ := Nth[G](g, index) _h, _ := Nth[H](h, index) result = append(result, Tuple8[A, B, C, D, E, F, G, H]{ A: _a, B: _b, C: _c, D: _d, E: _e, F: _f, G: _g, H: _h, }) } return result } // Zip9 creates a slice of grouped elements, the first of which contains the first elements // of the given arrays, the second of which contains the second elements of the given arrays, and so on. // When collections have different size, the Tuple attributes are filled with zero value. func Zip9[A any, B any, C any, D any, E any, F any, G any, H any, I any](a []A, b []B, c []C, d []D, e []E, f []F, g []G, h []H, i []I) []Tuple9[A, B, C, D, E, F, G, H, I] { size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g), len(h), len(i)}) result := make([]Tuple9[A, B, C, D, E, F, G, H, I], 0, size) for index := 0; index < size; index++ { _a, _ := Nth[A](a, index) _b, _ := Nth[B](b, index) _c, _ := Nth[C](c, index) _d, _ := Nth[D](d, index) _e, _ := Nth[E](e, index) _f, _ := Nth[F](f, index) _g, _ := Nth[G](g, index) _h, _ := Nth[H](h, index) _i, _ := Nth[I](i, index) result = append(result, Tuple9[A, B, C, D, E, F, G, H, I]{ A: _a, B: _b, C: _c, D: _d, E: _e, F: _f, G: _g, H: _h, I: _i, }) } return result } // Unzip2 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip2[A any, B any](tuples []Tuple2[A, B]) ([]A, []B) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) } return r1, r2 } // Unzip3 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip3[A any, B any, C any](tuples []Tuple3[A, B, C]) ([]A, []B, []C) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) } return r1, r2, r3 } // Unzip4 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip4[A any, B any, C any, D any](tuples []Tuple4[A, B, C, D]) ([]A, []B, []C, []D) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) } return r1, r2, r3, r4 } // Unzip5 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip5[A any, B any, C any, D any, E any](tuples []Tuple5[A, B, C, D, E]) ([]A, []B, []C, []D, []E) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) r5 := make([]E, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) r5 = append(r5, tuple.E) } return r1, r2, r3, r4, r5 } // Unzip6 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip6[A any, B any, C any, D any, E any, F any](tuples []Tuple6[A, B, C, D, E, F]) ([]A, []B, []C, []D, []E, []F) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) r5 := make([]E, 0, size) r6 := make([]F, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) r5 = append(r5, tuple.E) r6 = append(r6, tuple.F) } return r1, r2, r3, r4, r5, r6 } // Unzip7 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip7[A any, B any, C any, D any, E any, F any, G any](tuples []Tuple7[A, B, C, D, E, F, G]) ([]A, []B, []C, []D, []E, []F, []G) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) r5 := make([]E, 0, size) r6 := make([]F, 0, size) r7 := make([]G, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) r5 = append(r5, tuple.E) r6 = append(r6, tuple.F) r7 = append(r7, tuple.G) } return r1, r2, r3, r4, r5, r6, r7 } // Unzip8 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip8[A any, B any, C any, D any, E any, F any, G any, H any](tuples []Tuple8[A, B, C, D, E, F, G, H]) ([]A, []B, []C, []D, []E, []F, []G, []H) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) r5 := make([]E, 0, size) r6 := make([]F, 0, size) r7 := make([]G, 0, size) r8 := make([]H, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) r5 = append(r5, tuple.E) r6 = append(r6, tuple.F) r7 = append(r7, tuple.G) r8 = append(r8, tuple.H) } return r1, r2, r3, r4, r5, r6, r7, r8 } // Unzip9 accepts an array of grouped elements and creates an array regrouping the elements // to their pre-zip configuration. func Unzip9[A any, B any, C any, D any, E any, F any, G any, H any, I any](tuples []Tuple9[A, B, C, D, E, F, G, H, I]) ([]A, []B, []C, []D, []E, []F, []G, []H, []I) { size := len(tuples) r1 := make([]A, 0, size) r2 := make([]B, 0, size) r3 := make([]C, 0, size) r4 := make([]D, 0, size) r5 := make([]E, 0, size) r6 := make([]F, 0, size) r7 := make([]G, 0, size) r8 := make([]H, 0, size) r9 := make([]I, 0, size) for _, tuple := range tuples { r1 = append(r1, tuple.A) r2 = append(r2, tuple.B) r3 = append(r3, tuple.C) r4 = append(r4, tuple.D) r5 = append(r5, tuple.E) r6 = append(r6, tuple.F) r7 = append(r7, tuple.G) r8 = append(r8, tuple.H) r9 = append(r9, tuple.I) } return r1, r2, r3, r4, r5, r6, r7, r8, r9 }
tuples.go
0.810516
0.809012
tuples.go
starcoder
package iso20022 // Provides the details of the interest calculation. type InterestCalculation4 struct { // Indicates the calculation date of the interest amount. CalculationDate *ISODate `xml:"ClctnDt"` // Provides the identification of the collateral account. CollateralAccountIdentification *CollateralAccount2 `xml:"CollAcctId,omitempty"` // Provides the collateral amount used to calculate the interest amount and includes debit/short or credit/long positions. EffectivePrincipalAmount *AmountAndDirection20 `xml:"FctvPrncplAmt"` // Provides the collateral amount posted before taking into account the collateral movement amount. PrincipalAmount *AmountAndDirection20 `xml:"PrncplAmt,omitempty"` // Provides the additional amount of collateral posted between two calculation dates. MovementAmount *AmountAndDirection20 `xml:"MvmntAmt,omitempty"` // Indicates the number of days for the calculation of the interest. NumberOfDays *Number `xml:"NbOfDays,omitempty"` // Specifies the percentage charged for the use of an amount of money, usually expressed at an annual rate. The interest rate is the ratio of the amount of interest paid during a certain period of time compared to the principal amount of the interest bearing financial instrument. EffectiveRate *PercentageRate `xml:"FctvRate"` // Specifies the percentage charged for the use of an amount of money, usually expressed at an annual rate. The interest rate is the ratio of the amount of interest paid during a certain period of time compared to the principal amount of the interest bearing financial instrument. InterestRate *PercentageRate `xml:"IntrstRate,omitempty"` // Indicates the differences in interest rates. Spread *PercentageRate `xml:"Sprd,omitempty"` // Specifies the amount of money representing an interest payment. AccruedInterestAmount *AmountAndDirection20 `xml:"AcrdIntrstAmt"` // Specifies the total amount of money representing an interest payment. AggregatedInterestAmount *ActiveCurrencyAndAmount `xml:"AggtdIntrstAmt,omitempty"` } func (i *InterestCalculation4) SetCalculationDate(value string) { i.CalculationDate = (*ISODate)(&value) } func (i *InterestCalculation4) AddCollateralAccountIdentification() *CollateralAccount2 { i.CollateralAccountIdentification = new(CollateralAccount2) return i.CollateralAccountIdentification } func (i *InterestCalculation4) AddEffectivePrincipalAmount() *AmountAndDirection20 { i.EffectivePrincipalAmount = new(AmountAndDirection20) return i.EffectivePrincipalAmount } func (i *InterestCalculation4) AddPrincipalAmount() *AmountAndDirection20 { i.PrincipalAmount = new(AmountAndDirection20) return i.PrincipalAmount } func (i *InterestCalculation4) AddMovementAmount() *AmountAndDirection20 { i.MovementAmount = new(AmountAndDirection20) return i.MovementAmount } func (i *InterestCalculation4) SetNumberOfDays(value string) { i.NumberOfDays = (*Number)(&value) } func (i *InterestCalculation4) SetEffectiveRate(value string) { i.EffectiveRate = (*PercentageRate)(&value) } func (i *InterestCalculation4) SetInterestRate(value string) { i.InterestRate = (*PercentageRate)(&value) } func (i *InterestCalculation4) SetSpread(value string) { i.Spread = (*PercentageRate)(&value) } func (i *InterestCalculation4) AddAccruedInterestAmount() *AmountAndDirection20 { i.AccruedInterestAmount = new(AmountAndDirection20) return i.AccruedInterestAmount } func (i *InterestCalculation4) SetAggregatedInterestAmount(value, currency string) { i.AggregatedInterestAmount = NewActiveCurrencyAndAmount(value, currency) }
InterestCalculation4.go
0.860882
0.736756
InterestCalculation4.go
starcoder
package mapstriface import ( "fmt" "time" "github.com/elastic/beats/libbeat/common" "github.com/elastic/beats/libbeat/logp" "github.com/elastic/beats/metricbeat/schema" ) type ConvMap struct { Key string // The key in the data map Schema schema.Schema // The schema describing how to convert the sub-map } // Map drills down in the data dictionary by using the key func (convMap ConvMap) Map(key string, event common.MapStr, data map[string]interface{}) { subData, ok := data[convMap.Key].(map[string]interface{}) if !ok { logp.Err("Error accessing sub-dictionary `%s`", convMap.Key) return } subEvent := common.MapStr{} convMap.Schema.ApplyTo(subEvent, subData) event[key] = subEvent } func Dict(key string, s schema.Schema) ConvMap { return ConvMap{Key: key, Schema: s} } func toStr(key string, data map[string]interface{}) (interface{}, error) { emptyIface, exists := data[key] if !exists { return "", fmt.Errorf("Key not found") } str, ok := emptyIface.(string) if !ok { return "", fmt.Errorf("Expected string, found %T", emptyIface) } return str, nil } // Str creates a schema.Conv object for converting strings func Str(key string, opts ...schema.SchemaOption) schema.Conv { return schema.SetOptions(schema.Conv{Key: key, Func: toStr}, opts) } func toBool(key string, data map[string]interface{}) (interface{}, error) { emptyIface, exists := data[key] if !exists { return false, fmt.Errorf("Key not found") } boolean, ok := emptyIface.(bool) if !ok { return false, fmt.Errorf("Expected bool, found %T", emptyIface) } return boolean, nil } // Bool creates a Conv object for converting booleans func Bool(key string, opts ...schema.SchemaOption) schema.Conv { return schema.SetOptions(schema.Conv{Key: key, Func: toBool}, opts) } func toInteger(key string, data map[string]interface{}) (interface{}, error) { emptyIface, exists := data[key] if !exists { return 0, fmt.Errorf("Key not found") } switch emptyIface.(type) { case int64: return emptyIface.(int64), nil case int: return int64(emptyIface.(int)), nil case float64: return int64(emptyIface.(float64)), nil default: return 0, fmt.Errorf("Expected integer, found %T", emptyIface) } } // Int creates a Conv object for converting integers. Acceptable input // types are int64, int, and float64. func Int(key string, opts ...schema.SchemaOption) schema.Conv { return schema.SetOptions(schema.Conv{Key: key, Func: toInteger}, opts) } func toTime(key string, data map[string]interface{}) (interface{}, error) { emptyIface, exists := data[key] if !exists { return common.Time(time.Unix(0, 0)), fmt.Errorf("Key not found") } ts, ok := emptyIface.(time.Time) if !ok { return common.Time(time.Unix(0, 0)), fmt.Errorf("Expected date, found %T", emptyIface) } return common.Time(ts), nil } // Time creates a Conv object for converting Time objects. func Time(key string, opts ...schema.SchemaOption) schema.Conv { return schema.SetOptions(schema.Conv{Key: key, Func: toTime}, opts) }
vendor/github.com/elastic/beats/metricbeat/schema/mapstriface/mapstriface.go
0.788827
0.411939
mapstriface.go
starcoder
package swagger type JsonWebKey struct { // The \"alg\" (algorithm) parameter identifies the algorithm intended for use with the key. The values used should either be registered in the IANA \"JSON Web Signature and Encryption Algorithms\" registry established by [JWA] or be a value that contains a Collision- Resistant Name. Alg string `json:"alg"` Crv string `json:"crv,omitempty"` D string `json:"d,omitempty"` Dp string `json:"dp,omitempty"` Dq string `json:"dq,omitempty"` E string `json:"e,omitempty"` K string `json:"k,omitempty"` // The \"kid\" (key ID) parameter is used to match a specific key. This is used, for instance, to choose among a set of keys within a JWK Set during key rollover. The structure of the \"kid\" value is unspecified. When \"kid\" values are used within a JWK Set, different keys within the JWK Set SHOULD use distinct \"kid\" values. (One example in which different keys might use the same \"kid\" value is if they have different \"kty\" (key type) values but are considered to be equivalent alternatives by the application using them.) The \"kid\" value is a case-sensitive string. Kid string `json:"kid"` // The \"kty\" (key type) parameter identifies the cryptographic algorithm family used with the key, such as \"RSA\" or \"EC\". \"kty\" values should either be registered in the IANA \"JSON Web Key Types\" registry established by [JWA] or be a value that contains a Collision- Resistant Name. The \"kty\" value is a case-sensitive string. Kty string `json:"kty"` N string `json:"n,omitempty"` P string `json:"p,omitempty"` Q string `json:"q,omitempty"` Qi string `json:"qi,omitempty"` // Use (\"public key use\") identifies the intended use of the public key. The \"use\" parameter is employed to indicate whether a public key is used for encrypting data or verifying the signature on data. Values are commonly \"sig\" (signature) or \"enc\" (encryption). Use string `json:"use"` X string `json:"x,omitempty"` // The \"x5c\" (X.509 certificate chain) parameter contains a chain of one or more PKIX certificates [RFC5280]. The certificate chain is represented as a JSON array of certificate value strings. Each string in the array is a base64-encoded (Section 4 of [RFC4648] -- not base64url-encoded) DER [ITU.X690.1994] PKIX certificate value. The PKIX certificate containing the key value MUST be the first certificate. X5c []string `json:"x5c,omitempty"` Y string `json:"y,omitempty"` }
sdk/go/hydra/swagger/json_web_key.go
0.83612
0.487063
json_web_key.go
starcoder
package p460 import ( "fmt" ) /** Design and implement a data structure for Least Frequently Used (LFU) cache. It should support the following operations: get and put. get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1. put(key, value) - Set or insert the value if the key is not already present. When the cache reaches its capacity, it should invalidate the least frequently used item before inserting a new item. For the purpose of this problem, when there is a tie (i.e., two or more keys that have the same frequency), the least recently used key would be evicted. Follow up: Could you do both operations in O(1) time complexity? */ type LinkMapNode struct { val int prev *LinkMapNode next *LinkMapNode } type DLinkNode struct { count int keys map[int]*LinkMapNode head *LinkMapNode tail *LinkMapNode prev *DLinkNode next *DLinkNode } func newNode(count int) *DLinkNode { h := &LinkMapNode{} t := &LinkMapNode{} h.next = t t.prev = h return &DLinkNode{ count: count, keys: make(map[int]*LinkMapNode), head: h, tail: t} } func (n *DLinkNode) addKey(key int) { ln := &LinkMapNode{val: key} ln.next = n.tail ln.prev = n.tail.prev n.tail.prev.next = ln n.tail.prev = ln n.keys[key] = ln } func (n *DLinkNode) removeKey(key int) { node := n.keys[key] node.next.prev = node.prev node.prev.next = node.next delete(n.keys, key) } type LFUCache struct { cap int hmNode map[int]*DLinkNode hmValue map[int]int head *DLinkNode tail *DLinkNode } func Constructor(capacity int) LFUCache { h := &DLinkNode{} t := &DLinkNode{} h.next = t t.prev = h return LFUCache{ cap: capacity, hmNode: make(map[int]*DLinkNode), hmValue: make(map[int]int), head: h, tail: t, } } func (this *LFUCache) Get(key int) int { ans := -1 if v, ok := this.hmValue[key]; ok { this.increaseCount(key) ans = v } return ans } func (this *LFUCache) increaseCount(key int) { node := this.hmNode[key] node.removeKey(key) if node.next.count == node.count+1 { node.next.addKey(key) } else { tmp := newNode(node.count + 1) tmp.addKey(key) tmp.prev = node tmp.next = node.next node.next = tmp tmp.next.prev = tmp } this.hmNode[key] = node.next if len(node.keys) == 0 { removeNode(node) } } func removeNode(node *DLinkNode) { node.prev.next = node.next node.next.prev = node.prev } func (this *LFUCache) removeOld() { if this.head.next == this.tail { return } old := this.head.next.head.next.val this.head.next.removeKey(old) if len(this.head.next.keys) == 0 { removeNode(this.head.next) } delete(this.hmValue, old) delete(this.hmNode, old) } func (this *LFUCache) insertAfterHead(node *DLinkNode) { node.next = this.head.next node.prev = this.head this.head.next.prev = node this.head.next = node } func (this *LFUCache) addAfterHead(key int) { if this.head.next == this.tail || this.head.next.count > 0 { node := newNode(0) node.addKey(key) this.insertAfterHead(node) } else { this.head.next.addKey(key) } this.hmNode[key] = this.head.next } func (this *LFUCache) Put(key int, value int) { if this.cap == 0 { return } if _, ok := this.hmValue[key]; ok { this.hmValue[key] = value } else { if len(this.hmValue) < this.cap { this.hmValue[key] = value } else { this.removeOld() this.hmValue[key] = value } this.addAfterHead(key) } this.increaseCount(key) } func (this *LFUCache) printList() { fmt.Println(this.hmValue) for iter := this.head.next; iter != this.tail; iter = iter.next { fmt.Print("count ", iter.count, " ", "keys ") for cur := iter.head.next; cur != iter.tail; cur = cur.next { fmt.Print(cur.val, " ") } fmt.Print(", ") } fmt.Println() } /** * Your LRUCache object will be instantiated and called as such: * obj := Constructor(capacity); * param_1 := obj.Get(key); * obj.Put(key,value); */
algorithms/p460/460.go
0.606265
0.525186
460.go
starcoder
package go2linq import ( "sort" "sync" ) // Reimplementing LINQ to Objects: Part 16 – IntersectErr (and build fiddling) // https://codeblog.jonskeet.uk/2010/12/30/reimplementing-linq-to-objects-part-16-intersect-and-build-fiddling/ // https://docs.microsoft.com/dotnet/api/system.linq.enumerable.intersect // Intersect produces the set intersection of two sequences by using reflect.DeepEqual as equality comparer. // 'second' is enumerated immediately. // 'first' and 'second' must not be based on the same Enumerator, otherwise use IntersectSelf instead. func Intersect[Source any](first, second Enumerator[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } return IntersectEq(first, second, nil) } // IntersectMust is like Intersect but panics in case of error. func IntersectMust[Source any](first, second Enumerator[Source]) Enumerator[Source] { r, err := Intersect(first, second) if err != nil { panic(err) } return r } // IntersectSelf produces the set intersection of two sequences by using reflect.DeepEqual as equality comparer. // 'second' is enumerated immediately. // 'first' and 'second' may be based on the same Enumerator. // 'first' must have real Reset method. func IntersectSelf[Source any](first, second Enumerator[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } sl2 := Slice(second) first.Reset() return Intersect(first, NewOnSliceEn(sl2...)) } // IntersectSelfMust is like IntersectSelf but panics in case of error. func IntersectSelfMust[Source any](first, second Enumerator[Source]) Enumerator[Source] { r, err := IntersectSelf(first, second) if err != nil { panic(err) } return r } // IntersectEq produces the set intersection of two sequences by using the specified Equaler to compare values. // If 'eq' is nil reflect.DeepEqual is used. // 'second' is enumerated immediately. // Order of elements in the result corresponds to the order of elements in 'first'. // 'first' and 'second' must not be based on the same Enumerator, otherwise use IntersectEqSelf instead. func IntersectEq[Source any](first, second Enumerator[Source], eq Equaler[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } if eq == nil { eq = EqualerFunc[Source](DeepEqual[Source]) } var once sync.Once var dsl2 []Source d1 := DistinctEqMust(first, eq) var c Source return OnFunc[Source]{ mvNxt: func() bool { once.Do(func() { dsl2 = Slice(DistinctEqMust(second, eq)) }) for d1.MoveNext() { c = d1.Current() if elInElelEq(c, dsl2, eq) { return true } } return false }, crrnt: func() Source { return c }, rst: func() { d1.Reset() }, }, nil } // IntersectEqMust is like IntersectEq but panics in case of error. func IntersectEqMust[Source any](first, second Enumerator[Source], eq Equaler[Source]) Enumerator[Source] { r, err := IntersectEq(first, second, eq) if err != nil { panic(err) } return r } // IntersectEqSelf produces the set intersection of two sequences by using the specified Equaler to compare values. // If 'eq' is nil reflect.DeepEqual is used. // 'second' is enumerated immediately. // Order of elements in the result corresponds to the order of elements in 'first'. // 'first' and 'second' may be based on the same Enumerator. // 'first' must have real Reset method. func IntersectEqSelf[Source any](first, second Enumerator[Source], eq Equaler[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } sl2 := Slice(second) first.Reset() return IntersectEq(first, NewOnSliceEn(sl2...), eq) } // IntersectEqSelfMust is like IntersectEqSelf but panics in case of error. func IntersectEqSelfMust[Source any](first, second Enumerator[Source], eq Equaler[Source]) Enumerator[Source] { r, err := IntersectEqSelf(first, second, eq) if err != nil { panic(err) } return r } // IntersectCmp produces the set intersection of two sequences by using the specified Comparer to compare values. // (See DistinctCmp function.) // 'second' is enumerated immediately. // Order of elements in the result corresponds to the order of elements in 'first'. // 'first' and 'second' must not be based on the same Enumerator, otherwise use IntersectCmpSelf instead. func IntersectCmp[Source any](first, second Enumerator[Source], comparer Comparer[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } if comparer == nil { return nil, ErrNilComparer } var once sync.Once var dsl2 []Source d1 := DistinctCmpMust(first, comparer) var c Source return OnFunc[Source]{ mvNxt: func() bool { once.Do(func() { dsl2 = Slice(DistinctCmpMust(second, comparer)) sort.Slice(dsl2, func(i, j int) bool { return comparer.Compare(dsl2[i], dsl2[j]) < 0 }) }) for d1.MoveNext() { c = d1.Current() if elInElelCmp(c, dsl2, comparer) { return true } } return false }, crrnt: func() Source { return c }, rst: func() { d1.Reset() }, }, nil } // IntersectCmpMust is like IntersectCmp but panics in case of error. func IntersectCmpMust[Source any](first, second Enumerator[Source], comparer Comparer[Source]) Enumerator[Source] { r, err := IntersectCmp(first, second, comparer) if err != nil { panic(err) } return r } // IntersectCmpSelf produces the set intersection of two sequences by using the specified Comparer to compare values. // (See DistinctCmp function.) // 'second' is enumerated immediately. // Order of elements in the result corresponds to the order of elements in 'first'. // 'first' and 'second' may be based on the same Enumerator. // 'first' must have real Reset method. func IntersectCmpSelf[Source any](first, second Enumerator[Source], comparer Comparer[Source]) (Enumerator[Source], error) { if first == nil || second == nil { return nil, ErrNilSource } if comparer == nil { return nil, ErrNilComparer } sl2 := Slice(second) first.Reset() return IntersectCmp(first, NewOnSliceEn(sl2...), comparer) } // IntersectCmpSelfMust is like IntersectCmpSelf but panics in case of error. func IntersectCmpSelfMust[Source any](first, second Enumerator[Source], comparer Comparer[Source]) Enumerator[Source] { r, err := IntersectCmpSelf(first, second, comparer) if err != nil { panic(err) } return r }
intersect.go
0.804751
0.50177
intersect.go
starcoder
package graph import ( i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization" ) // CertificateAuthority type CertificateAuthority struct { // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. additionalData map[string]interface{}; // Required. The base64 encoded string representing the public certificate. certificate []byte; // The URL of the certificate revocation list. certificateRevocationListUrl *string; // The URL contains the list of all revoked certificates since the last time a full certificate revocaton list was created. deltaCertificateRevocationListUrl *string; // Required. true if the trusted certificate is a root authority, false if the trusted certificate is an intermediate authority. isRootAuthority *bool; // The issuer of the certificate, calculated from the certificate value. Read-only. issuer *string; // The subject key identifier of the certificate, calculated from the certificate value. Read-only. issuerSki *string; } // NewCertificateAuthority instantiates a new certificateAuthority and sets the default values. func NewCertificateAuthority()(*CertificateAuthority) { m := &CertificateAuthority{ } m.SetAdditionalData(make(map[string]interface{})); return m } // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *CertificateAuthority) GetAdditionalData()(map[string]interface{}) { if m == nil { return nil } else { return m.additionalData } } // GetCertificate gets the certificate property value. Required. The base64 encoded string representing the public certificate. func (m *CertificateAuthority) GetCertificate()([]byte) { if m == nil { return nil } else { return m.certificate } } // GetCertificateRevocationListUrl gets the certificateRevocationListUrl property value. The URL of the certificate revocation list. func (m *CertificateAuthority) GetCertificateRevocationListUrl()(*string) { if m == nil { return nil } else { return m.certificateRevocationListUrl } } // GetDeltaCertificateRevocationListUrl gets the deltaCertificateRevocationListUrl property value. The URL contains the list of all revoked certificates since the last time a full certificate revocaton list was created. func (m *CertificateAuthority) GetDeltaCertificateRevocationListUrl()(*string) { if m == nil { return nil } else { return m.deltaCertificateRevocationListUrl } } // GetIsRootAuthority gets the isRootAuthority property value. Required. true if the trusted certificate is a root authority, false if the trusted certificate is an intermediate authority. func (m *CertificateAuthority) GetIsRootAuthority()(*bool) { if m == nil { return nil } else { return m.isRootAuthority } } // GetIssuer gets the issuer property value. The issuer of the certificate, calculated from the certificate value. Read-only. func (m *CertificateAuthority) GetIssuer()(*string) { if m == nil { return nil } else { return m.issuer } } // GetIssuerSki gets the issuerSki property value. The subject key identifier of the certificate, calculated from the certificate value. Read-only. func (m *CertificateAuthority) GetIssuerSki()(*string) { if m == nil { return nil } else { return m.issuerSki } } // GetFieldDeserializers the deserialization information for the current model func (m *CertificateAuthority) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) { res := make(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) res["certificate"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetByteArrayValue() if err != nil { return err } if val != nil { m.SetCertificate(val) } return nil } res["certificateRevocationListUrl"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetCertificateRevocationListUrl(val) } return nil } res["deltaCertificateRevocationListUrl"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetDeltaCertificateRevocationListUrl(val) } return nil } res["isRootAuthority"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetIsRootAuthority(val) } return nil } res["issuer"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetIssuer(val) } return nil } res["issuerSki"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetIssuerSki(val) } return nil } return res } func (m *CertificateAuthority) IsNil()(bool) { return m == nil } // Serialize serializes information the current object func (m *CertificateAuthority) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) { { err := writer.WriteByteArrayValue("certificate", m.GetCertificate()) if err != nil { return err } } { err := writer.WriteStringValue("certificateRevocationListUrl", m.GetCertificateRevocationListUrl()) if err != nil { return err } } { err := writer.WriteStringValue("deltaCertificateRevocationListUrl", m.GetDeltaCertificateRevocationListUrl()) if err != nil { return err } } { err := writer.WriteBoolValue("isRootAuthority", m.GetIsRootAuthority()) if err != nil { return err } } { err := writer.WriteStringValue("issuer", m.GetIssuer()) if err != nil { return err } } { err := writer.WriteStringValue("issuerSki", m.GetIssuerSki()) if err != nil { return err } } { err := writer.WriteAdditionalData(m.GetAdditionalData()) if err != nil { return err } } return nil } // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *CertificateAuthority) SetAdditionalData(value map[string]interface{})() { if m != nil { m.additionalData = value } } // SetCertificate sets the certificate property value. Required. The base64 encoded string representing the public certificate. func (m *CertificateAuthority) SetCertificate(value []byte)() { if m != nil { m.certificate = value } } // SetCertificateRevocationListUrl sets the certificateRevocationListUrl property value. The URL of the certificate revocation list. func (m *CertificateAuthority) SetCertificateRevocationListUrl(value *string)() { if m != nil { m.certificateRevocationListUrl = value } } // SetDeltaCertificateRevocationListUrl sets the deltaCertificateRevocationListUrl property value. The URL contains the list of all revoked certificates since the last time a full certificate revocaton list was created. func (m *CertificateAuthority) SetDeltaCertificateRevocationListUrl(value *string)() { if m != nil { m.deltaCertificateRevocationListUrl = value } } // SetIsRootAuthority sets the isRootAuthority property value. Required. true if the trusted certificate is a root authority, false if the trusted certificate is an intermediate authority. func (m *CertificateAuthority) SetIsRootAuthority(value *bool)() { if m != nil { m.isRootAuthority = value } } // SetIssuer sets the issuer property value. The issuer of the certificate, calculated from the certificate value. Read-only. func (m *CertificateAuthority) SetIssuer(value *string)() { if m != nil { m.issuer = value } } // SetIssuerSki sets the issuerSki property value. The subject key identifier of the certificate, calculated from the certificate value. Read-only. func (m *CertificateAuthority) SetIssuerSki(value *string)() { if m != nil { m.issuerSki = value } }
models/microsoft/graph/certificate_authority.go
0.575469
0.451327
certificate_authority.go
starcoder
package ffnn import ( "gonum.org/v1/gonum/mat" "../utils/matrices" "../utils/matrices/ops" "math" "errors" "fmt" ) type FFLayer struct { // Layer Spec // Size of the input this layer requires inputSize int // Size of the output this layer brings outputSize int // The f function (will also hold its derivative f Activator // Layer State // Meaning: current weights // Size: outputSize x inputSize w *mat.Dense // Meaning: current biases // Size: outputSize x 1 b *mat.Dense // Meaning: current inputs // Size: inputSize x 1 i *mat.Dense // Meaning: linear composition wi // Size: outputSize x 1 wi *mat.Dense // Meaning: linear composition wi + b // size: outputSize x 1 z *mat.Dense // Meaning: activations f(z) // Size: outputSize x 1 a *mat.Dense } func makeFFLayer(inputSize, outputSize int, activator Activator, w *mat.Dense, b *mat.Dense) *FFLayer { // Creating an undefined input layer, aside from forcing it to 1 in the last element (the bias) i := mat.NewDense(inputSize, 1, nil) // Creating an undefined weighted i layer wi := mat.NewDense(outputSize, 1, nil) // Creating an undefined weighted i layer z := mat.NewDense(outputSize, 1, nil) // Creating an undefined a layer a := mat.NewDense(outputSize, 1, nil) return &FFLayer{ inputSize: inputSize, outputSize: outputSize, f: activator, w: w, b: b, i: i, wi: wi, z: z, a: a, } } func newFFLayer(inputSize, outputSize int, activator Activator) *FFLayer { bound := 1.0/math.Sqrt(float64(inputSize)) // Creating a noisy w/b layer w := matrices.Noise(outputSize, inputSize, bound) b := matrices.Noise(outputSize, 1, bound) return makeFFLayer(inputSize, outputSize, activator, w, b) } func unMarshall(expectedRows, expectedColumns int, data []byte, element string) (*mat.Dense, error) { m := mat.NewDense(expectedRows, expectedColumns, nil) m.Reset() if err := m.UnmarshalBinary(data); err != nil { return nil, err } else { rows, columns := m.Dims() if rows != expectedRows || columns != expectedColumns { return nil, errors.New(fmt.Sprintf( "layer %v size mismatch between requested and unmarshaled", element, )) } } return m, nil } func decodeFFLayer(inputSize, outputSize int, activator Activator, wData, bData []byte) (*FFLayer, error) { // Loading the w from memory var w, b *mat.Dense var err error if w, err = unMarshall(outputSize, inputSize, wData, "weights"); err != nil { return nil, err } if b, err = unMarshall(outputSize, 1, bData, "biases"); err != nil { return nil, err } return makeFFLayer(inputSize, outputSize, activator, w, b), nil } func encodeFFLayer(layer *FFLayer) ([]byte, []byte, error, error) { w, errW := layer.w.MarshalBinary() b, errB := layer.b.MarshalBinary() return w, b, errW, errB } func (layer *FFLayer) InputSize() int { return layer.inputSize } func (layer *FFLayer) OutputSize() int { return layer.outputSize } func (layer *FFLayer) Weights() *mat.Dense { return layer.w } func (layer *FFLayer) Inputs() *mat.Dense { return layer.i; } func (layer *FFLayer) WeightedInputs() *mat.Dense { return layer.z } func (layer *FFLayer) Activator() Activator { return layer.f } func (layer *FFLayer) Activations() *mat.Dense { return layer.a } func (layer *FFLayer) Forward(inputs *mat.Dense) { // `i` will be a column, compatible with (inputSize, 1). // Fill the new i. layer.i.Copy(inputs) // Compute the a = f(wi + b). layer.f.Base(ops.Add(ops.Mul(layer.w, layer.i, layer.wi), layer.b, layer.z), layer.a) }
ffnn/layers.go
0.678433
0.4165
layers.go
starcoder
package mdp import ( "fmt" "math" "math/rand" "github.com/misteroda/go-rl/base" ) const ( pqSize = 10000 minTDError = 0.001 minSDError = 0.0001 maxIterations = 1000000 numAnnealing = 7 ) // ValueIterator represents Value Iteration algorithm. type ValueIterator struct { model *Model V []float64 // state values Q []float64 // state-action values Policy []float64 isAbsorbing []bool alpha float64 // temperature parameter for softmax operator } // NewValueIterator constructs a ValueIterator instance from a given Model. func NewValueIterator(model *Model) *ValueIterator { vi := ValueIterator{ model: model, V: make([]float64, len(model.states)), Q: make([]float64, len(model.actions)), Policy: make([]float64, len(model.actions)), isAbsorbing: make([]bool, len(model.states)), } return &vi } // SetAbsorbingState sets absorbing states in the Model. // An absorbing state represents the state which terminates an episode. func (vi *ValueIterator) SetAbsorbingState(stateID int) bool { state, ok := vi.model.StateOf[stateID] if !ok { return false } vi.isAbsorbing[state.index] = true return true } // SetAlpha sets a value of alpha. func (vi *ValueIterator) SetAlpha(alpha float64) { vi.alpha = alpha } // InitAbsorbingState initializes absorbing states. func (vi *ValueIterator) InitAbsorbingState() { for i := range vi.isAbsorbing { vi.isAbsorbing[i] = false } } // Init initializes ValueIterator instance. func (vi *ValueIterator) Init() { base.Vector(vi.V).Fill(0.0) base.Vector(vi.Q).Fill(0.0) base.Vector(vi.Policy).Fill(0.0) vi.InitAbsorbingState() vi.alpha = 0.0 } // ToActions returns an action space of a given state as []*Action. func (vi *ValueIterator) ToActions(s *State) []*Action { if vi.isAbsorbing[s.index] { return s.actions[:0] } return s.actions } // RunValueIteration runs Value Iteration algorithm and updates V and Q. func (vi *ValueIterator) RunValueIteration() { m := vi.model pq := NewPriorityQueue(len(m.states)) var td float64 var j int tdThreshold := minTDError * math.Pow(2, float64(numAnnealing - 1)) for i := 0; i < numAnnealing; i++ { for stateIdx := range m.states { s := &m.states[stateIdx] td = vi.bellmanBackup(s) if td > tdThreshold && pq.Size() < pqSize { pq.Push(stateIdx, td) } } for j = 0; j < maxIterations && pq.Size() > 0; j++ { idx, _ := pq.Pop() for _, tr := range m.states[idx].transitions { s := tr.action.state td = vi.bellmanBackup(s) if td > tdThreshold && pq.Size() < pqSize { pq.Push(s.index, td) } } } // fmt.Printf("Annealing #%d { iters: %d, theta: %.3f, td: %.3f }\n", i, j, tdThreshold, td) tdThreshold *= 0.5 } } // UpdatePolicy updates policy based on current state-action values. func (vi *ValueIterator) UpdatePolicy() { m := vi.model for stateIdx := range m.states { s := &m.states[stateIdx] actions := vi.ToActions(s) if len(actions) == 0 { continue } bestAction := vi.bestAction(actions) if vi.alpha == 0 { for _, a := range actions { vi.Policy[a.index] = 0 } vi.Policy[bestAction.index] = 1 } else { maxQ := vi.Q[bestAction.index] z := 0.0 for _, a := range actions { vi.Policy[a.index] = math.Exp((vi.Q[a.index] - maxQ) / vi.alpha) z += vi.Policy[a.index] } for _, a := range s.actions { vi.Policy[a.index] /= z } } } } func (vi *ValueIterator) bellmanBackup(s *State) (tdError float64) { actions := vi.ToActions(s) if len(actions) == 0 { return } for _, a := range actions { vi.Q[a.index] = a.transition.r + vi.V[a.transition.state.index] } v := vi.softMax(s.actions) tdError = math.Abs(v - vi.V[s.index]) vi.V[s.index] = v return } func (vi *ValueIterator) softMax(actions []*Action) float64 { if len(actions) == 0 { panic("model: zero slice length") } maxQ := vi.Q[actions[0].index] for _, a := range actions[1:] { if maxQ < vi.Q[a.index] { maxQ = vi.Q[a.index] } } if vi.alpha == 0 { return maxQ } var lse float64 for _, a := range actions { lse += math.Exp((vi.Q[a.index] - maxQ) / vi.alpha) } return vi.alpha * math.Log(lse) + maxQ } func (vi *ValueIterator) bestAction(actions []*Action) *Action { if len(actions) == 0 { panic("model: zero slice length") } maxA := actions[0] maxQ := vi.Q[maxA.index] for _, a := range actions { if maxQ < vi.Q[a.index] { maxA = a maxQ = vi.Q[a.index] } } return maxA } func (vi *ValueIterator) sampleAction(actions []*Action) *Action { if len(actions) == 0 { panic("model: zero slice length") } cumP := 0.0 r := rand.Float64() for _, a := range actions[:len(actions)-1] { cumP += vi.Policy[a.index] if r < cumP { return a } } return actions[len(actions) - 1] } // GenerateTrajectory generates trajectory of given a start and a goal state based on current policy. func (vi *ValueIterator) GenerateTrajectory(startID, goalID, maxSteps int) (tr []int, ok bool) { m := vi.model startState, ok := m.StateOf[startID] if !ok { return } goalState, ok := m.StateOf[goalID] if !ok { return } s := startState tr = append(tr, s.id) for i := 0; i < maxSteps; i++ { actions := vi.ToActions(s) if len(s.actions) == 0 { ok = false return } s = vi.sampleAction(actions).transition.state tr = append(tr, s.id) if s == goalState { ok = true return } } ok = false return } func (vi *ValueIterator) computeStateDist(s *State, actionDist []float64) float64 { d := 0.0 for _, tr := range s.transitions { if vi.isAbsorbing[tr.action.state.index] { continue } d += actionDist[tr.action.index] } return d } // StateActionVisitation computes state-action visitation frequency distribution based on the current policy. func (vi *ValueIterator) StateActionVisitation(initialStateDist []float64) ([]float64, []float64) { m := vi.model stateDist := make([]float64, len(m.states)) copy(stateDist, initialStateDist) actionDist := make([]float64, len(m.actions)) pq := NewPriorityQueue(len(stateDist)) var sd float64 var j, idx int sdThreshold := minSDError * math.Pow(2, float64(numAnnealing - 1)) for i := 0; i < numAnnealing; i++ { if i == 0 { for idx, sd = range initialStateDist { if sd > 0 { pq.Push(idx, sd) } } } else { for stateIdx := range stateDist { s := &m.states[stateIdx] actions := vi.ToActions(s) if len(actions) == 0 || stateDist[stateIdx] < sdThreshold { continue } for _, a := range actions { actionDist[a.index] = stateDist[stateIdx] * vi.Policy[a.index] nextState := a.transition.state idx := nextState.index sd = stateDist[idx] stateDist[idx] = initialStateDist[idx] + vi.computeStateDist(nextState, actionDist) sd = math.Abs(sd - stateDist[idx]) if sd > sdThreshold && pq.Size() < pqSize { pq.Push(stateIdx, sd) } } } } for j = 0; j < maxIterations && pq.Size() > 0; j++ { stateIdx, _ := pq.Pop() s := &m.states[stateIdx] actions := vi.ToActions(s) if len(actions) == 0 || stateDist[stateIdx] < sdThreshold { continue } for _, a := range actions { actionDist[a.index] = stateDist[stateIdx] * vi.Policy[a.index] nextState := a.transition.state idx := nextState.index sd = stateDist[idx] stateDist[idx] = initialStateDist[idx] + vi.computeStateDist(nextState, actionDist) sd = math.Abs(sd - stateDist[idx]) if sd > sdThreshold && pq.Size() < pqSize { pq.Push(idx, sd) } } } sdThreshold *= 0.5 } return stateDist, actionDist } func (vi *ValueIterator) String() string { s := fmt.Sprintf("V: %v\n", vi.V) s += fmt.Sprintf("Q: %v\n", vi.Q) s += fmt.Sprintf("Policy: %v\n", vi.Policy) return s }
mdp/value_iterator.go
0.660939
0.405243
value_iterator.go
starcoder
package r3 import ( "fmt" "math/big" ) const ( // prec is the number of bits of precision to use for the Float values. // To keep things simple, we use the maximum allowable precision on big // values. This allows us to handle all values we expect in the s2 library. prec = big.MaxPrec ) // define some commonly referenced values. var ( precise0 = precInt(0) precise1 = precInt(1) ) // precStr wraps the conversion from a string into a big.Float. For results that // actually can be represented exactly, this should only be used on values that // are integer multiples of integer powers of 2. func precStr(s string) *big.Float { // Explicitly ignoring the bool return for this usage. f, _ := new(big.Float).SetPrec(prec).SetString(s) return f } func precInt(i int64) *big.Float { return new(big.Float).SetPrec(prec).SetInt64(i) } func precFloat(f float64) *big.Float { return new(big.Float).SetPrec(prec).SetFloat64(f) } func precAdd(a, b *big.Float) *big.Float { return new(big.Float).SetPrec(prec).Add(a, b) } func precSub(a, b *big.Float) *big.Float { return new(big.Float).SetPrec(prec).Sub(a, b) } func precMul(a, b *big.Float) *big.Float { return new(big.Float).SetPrec(prec).Mul(a, b) } // PreciseVector represents a point in ℝ³ using high-precision values. // Note that this is NOT a complete implementation because there are some // operations that Vector supports that are not feasible with arbitrary precision // math. (e.g., methods that need division like Normalize, or methods needing a // square root operation such as Norm) type PreciseVector struct { X, Y, Z *big.Float } // PreciseVectorFromVector creates a high precision vector from the given Vector. func PreciseVectorFromVector(v Vector) PreciseVector { return NewPreciseVector(v.X, v.Y, v.Z) } // NewPreciseVector creates a high precision vector from the given floating point values. func NewPreciseVector(x, y, z float64) PreciseVector { return PreciseVector{ X: precFloat(x), Y: precFloat(y), Z: precFloat(z), } } // Vector returns this precise vector converted to a Vector. func (v PreciseVector) Vector() Vector { // The accuracy flag is ignored on these conversions back to float64. x, _ := v.X.Float64() y, _ := v.Y.Float64() z, _ := v.Z.Float64() return Vector{x, y, z}.Normalize() } // Equal reports whether v and ov are equal. func (v PreciseVector) Equal(ov PreciseVector) bool { return v.X.Cmp(ov.X) == 0 && v.Y.Cmp(ov.Y) == 0 && v.Z.Cmp(ov.Z) == 0 } func (v PreciseVector) String() string { return fmt.Sprintf("(%10g, %10g, %10g)", v.X, v.Y, v.Z) } // Norm2 returns the square of the norm. func (v PreciseVector) Norm2() *big.Float { return v.Dot(v) } // IsUnit reports whether this vector is of unit length. func (v PreciseVector) IsUnit() bool { return v.Norm2().Cmp(precise1) == 0 } // Abs returns the vector with nonnegative components. func (v PreciseVector) Abs() PreciseVector { return PreciseVector{ X: new(big.Float).Abs(v.X), Y: new(big.Float).Abs(v.Y), Z: new(big.Float).Abs(v.Z), } } // Add returns the standard vector sum of v and ov. func (v PreciseVector) Add(ov PreciseVector) PreciseVector { return PreciseVector{ X: precAdd(v.X, ov.X), Y: precAdd(v.Y, ov.Y), Z: precAdd(v.Z, ov.Z), } } // Sub returns the standard vector difference of v and ov. func (v PreciseVector) Sub(ov PreciseVector) PreciseVector { return PreciseVector{ X: precSub(v.X, ov.X), Y: precSub(v.Y, ov.Y), Z: precSub(v.Z, ov.Z), } } // Mul returns the standard scalar product of v and f. func (v PreciseVector) Mul(f *big.Float) PreciseVector { return PreciseVector{ X: precMul(v.X, f), Y: precMul(v.Y, f), Z: precMul(v.Z, f), } } // MulByFloat64 returns the standard scalar product of v and f. func (v PreciseVector) MulByFloat64(f float64) PreciseVector { return v.Mul(precFloat(f)) } // Dot returns the standard dot product of v and ov. func (v PreciseVector) Dot(ov PreciseVector) *big.Float { return precAdd(precMul(v.X, ov.X), precAdd(precMul(v.Y, ov.Y), precMul(v.Z, ov.Z))) } // Cross returns the standard cross product of v and ov. func (v PreciseVector) Cross(ov PreciseVector) PreciseVector { return PreciseVector{ X: precSub(precMul(v.Y, ov.Z), precMul(v.Z, ov.Y)), Y: precSub(precMul(v.Z, ov.X), precMul(v.X, ov.Z)), Z: precSub(precMul(v.X, ov.Y), precMul(v.Y, ov.X)), } } // LargestComponent returns the axis that represents the largest component in this vector. func (v PreciseVector) LargestComponent() Axis { t := v.Abs() if t.X.Cmp(t.Y) > 0 { if t.X.Cmp(t.Z) > 0 { return XAxis } return ZAxis } if t.Y.Cmp(t.Z) > 0 { return YAxis } return ZAxis } // SmallestComponent returns the axis that represents the smallest component in this vector. func (v PreciseVector) SmallestComponent() Axis { t := v.Abs() if t.X.Cmp(t.Y) < 0 { if t.X.Cmp(t.Z) < 0 { return XAxis } return ZAxis } if t.Y.Cmp(t.Z) < 0 { return YAxis } return ZAxis }
vendor/github.com/golang/geo/r3/precisevector.go
0.853211
0.716243
precisevector.go
starcoder
package gomock import ( "unsafe" ) // Trie is base trie tree interface{} type Trie interface { Insert(path string, response Response) Search(path string) (Response, bool) } type trie struct { root *node } type node struct { child *node bros *node path string partPath string queries Queries response Response } // NewTrie returns Trie(*trie) object func NewTrie() Trie { return &trie{ root: new(node), } } func (t *trie) Insert(path string, response Response) { if len(path) == 0 || path[0] != '/' { return } if len(path) == 1 { t.root.update("/", path, Queries{}, response) return } t.insert(path, response) } func (t *trie) insert(path string, response Response) { bytesPath := stringToBytes(path) n := t.root.update("/", path, Queries{}, Response{}) for i := 1; i < len(bytesPath); i++ { startPos, endPos := blockPos(bytesPath, i) var queries Queries if len(bytesPath) > endPos && bytesPath[endPos] == '?' { queries = parseQuery(bytesToString(bytesPath, endPos+1, len(bytesPath))) if startPos == endPos { n.queries = queries n.response = response } else { n.update(bytesToString(bytesPath, startPos, endPos), path, queries, response) } break } n = n.update(bytesToString(bytesPath, startPos, endPos), path, queries, response) i = endPos } } // nextBlockPos returns start pos of next block func blockPos(path []byte, startPos int) (int, int) { endPos := startPos for i := startPos; i < len(path) && path[i] != '/' && path[i] != '?'; i++ { endPos++ } return startPos, endPos } func (t *trie) Search(path string) (Response, bool) { if len(path) == 0 || path[0] != '/' { return Response{}, false } n := t.root if n = n.get("/"); n == nil { return Response{}, false } if len(path) == 1 { return n.response, true } if path[len(path)-1] == '/' { path = path[:len(path)-1] } return t.search(path, n) } func (t *trie) search(path string, n *node) (Response, bool) { bytesPath := stringToBytes(path) for i := 1; i < len(bytesPath); i++ { startPos, endPos := blockPos(bytesPath, i) var queries Queries if len(bytesPath) > endPos && bytesPath[endPos] == '?' { queries = parseQuery(bytesToString(bytesPath, endPos+1, len(bytesPath))) if startPos == endPos { if n.queries.Match(queries) { return n.response, true } } } if n = n.get(bytesToString(bytesPath, startPos, endPos)); n == nil { return Response{}, false } if len(bytesPath) > endPos && bytesPath[endPos] == '?' && n.queries.Match(queries) { return n.response, true } if endPos >= len(bytesPath) { return n.response, true } i = endPos } return Response{}, false } func (n *node) get(partPath string) *node { child := n.child for child != nil { if child.partPath == partPath || child.partPath == "*" { return child } child = child.bros } return child } func (n *node) insert(partPath, path string, queries []Query, response Response) *node { newNode := &node{ bros: n.child, path: path, partPath: partPath, response: response, } if len(queries) != 0 { newNode.queries = queries } n.child = newNode return newNode } func (n *node) update(partPath, path string, queries []Query, response Response) *node { child := n.get(partPath) if child == nil { child = n.insert(partPath, path, queries, response) } return child } func stringToBytes(path string) []byte { return *(*[]byte)(unsafe.Pointer(&path)) } func bytesToString(b []byte, start, end int) string { return (*(*string)(unsafe.Pointer(&b)))[start:end] }
pkg/gomock/trie.go
0.645455
0.400427
trie.go
starcoder