code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package square
// Represents a collection of catalog objects for the purpose of applying a `PricingRule`. Including a catalog object will include all of its subtypes. For example, including a category in a product set will include all of its items and associated item variations in the product set. Including an item in a product set will also include its item variations.
type CatalogProductSet struct {
// User-defined name for the product set. For example, \"Clearance Items\" or \"Winter Sale Items\".
Name string `json:"name,omitempty"`
// Unique IDs for any `CatalogObject` included in this product set. Any number of these catalog objects can be in an order for a pricing rule to apply. This can be used with `product_ids_all` in a parent `CatalogProductSet` to match groups of products for a bulk discount, such as a discount for an entree and side combo. Only one of `product_ids_all`, `product_ids_any`, or `all_products` can be set. Max: 500 catalog object IDs.
ProductIdsAny []string `json:"product_ids_any,omitempty"`
// Unique IDs for any `CatalogObject` included in this product set. All objects in this set must be included in an order for a pricing rule to apply. Only one of `product_ids_all`, `product_ids_any`, or `all_products` can be set. Max: 500 catalog object IDs.
ProductIdsAll []string `json:"product_ids_all,omitempty"`
// If set, there must be exactly this many items from `products_any` or `products_all` in the cart for the discount to apply. Cannot be combined with either `quantity_min` or `quantity_max`.
QuantityExact int64 `json:"quantity_exact,omitempty"`
// If set, there must be at least this many items from `products_any` or `products_all` in a cart for the discount to apply. See `quantity_exact`. Defaults to 0 if `quantity_exact`, `quantity_min` and `quantity_max` are all unspecified.
QuantityMin int64 `json:"quantity_min,omitempty"`
// If set, the pricing rule will apply to a maximum of this many items from `products_any` or `products_all`.
QuantityMax int64 `json:"quantity_max,omitempty"`
// If set to `true`, the product set will include every item in the catalog. Only one of `product_ids_all`, `product_ids_any`, or `all_products` can be set.
AllProducts bool `json:"all_products,omitempty"`
} | square/model_catalog_product_set.go | 0.86421 | 0.566019 | model_catalog_product_set.go | starcoder |
package data
// LanguageInfo exposes the data for a language's Linguist YAML entry as a Go struct.
// See https://github.com/github/linguist/blob/master/lib/linguist/languages.yml
type LanguageInfo struct {
// Name is the language name. May contain symbols not safe for use in some filesystems (e.g., `F*`).
Name string
// FSName is the filesystem safe name. Will only be set if Name is not safe for use in all filesystems.
FSName string
// Type is the language Type. See data.Type for values.
Type Type
// Color is the CSS hex color to represent the language. Only used if type is "programming" or "markup".
Color string
// Group is the name of the parent language. Languages in a group are counted in the statistics as the parent language.
Group string
// Aliases is a slice of additional aliases (implicitly includes name.downcase)
Aliases []string
// Extensions is a slice of associated extensions (the first one is considered the primary extension).
Extensions []string
// A slice of associated interpreters
Interpreters []string
// Filenames is a slice of filenames commonly associated with the language.
Filenames []string
// MimeType (maps to codemirror_mime_type in linguist.yaml) is the string name of the file mime type used for highlighting whenever a file is edited.
MimeType string
// TMScope is the TextMate scope that represents this programming language.
TMScope string
// AceMode is the name of the Ace Mode used for highlighting whenever a file is edited.
AceMode string
// CodeMirrorMode is the name of the CodeMirror Mode used for highlighting whenever a file is edited.
CodeMirrorMode string
// Wrap is a boolean flag to enable line wrapping in an editor.
Wrap bool
// LanguageID is the Linguist-assigned numeric ID for the language.
LanguageID int
}
// LanguageInfoByID allows accessing LanguageInfo by a language's ID.
var LanguageInfoByID = map[int]LanguageInfo{
0: LanguageInfo{
Name: "1C Enterprise",
FSName: "",
Type: TypeForString("programming"),
Color: "#814CCC",
Group: "",
Aliases: []string{},
Extensions: []string{
".bsl",
".os",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.bsl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 0,
},
387204628: LanguageInfo{
Name: "2-Dimensional Array",
FSName: "",
Type: TypeForString("data"),
Color: "#38761D",
Group: "",
Aliases: []string{},
Extensions: []string{
".2da",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.2da",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 387204628,
},
577529595: LanguageInfo{
Name: "4D",
FSName: "",
Type: TypeForString("programming"),
Color: "#004289",
Group: "",
Aliases: []string{},
Extensions: []string{
".4dm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.4dm",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 577529595,
},
1: LanguageInfo{
Name: "ABAP",
FSName: "",
Type: TypeForString("programming"),
Color: "#E8274B",
Group: "",
Aliases: []string{},
Extensions: []string{
".abap",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.abap",
AceMode: "abap",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1,
},
452681853: LanguageInfo{
Name: "ABAP CDS",
FSName: "",
Type: TypeForString("programming"),
Color: "#555e25",
Group: "",
Aliases: []string{},
Extensions: []string{
".asddls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.abapcds",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 452681853,
},
429: LanguageInfo{
Name: "ABNF",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".abnf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.abnf",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 429,
},
2: LanguageInfo{
Name: "AGS Script",
FSName: "",
Type: TypeForString("programming"),
Color: "#B9D9FF",
Group: "",
Aliases: []string{
"ags",
},
Extensions: []string{
".asc",
".ash",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 2,
},
451700185: LanguageInfo{
Name: "AIDL",
FSName: "",
Type: TypeForString("programming"),
Color: "#34EB6B",
Group: "",
Aliases: []string{},
Extensions: []string{
".aidl",
},
Interpreters: []string{
"aidl",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.aidl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 451700185,
},
658971832: LanguageInfo{
Name: "AL",
FSName: "",
Type: TypeForString("programming"),
Color: "#3AA2B5",
Group: "",
Aliases: []string{},
Extensions: []string{
".al",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.al",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 658971832,
},
3: LanguageInfo{
Name: "AMPL",
FSName: "",
Type: TypeForString("programming"),
Color: "#E6EFBB",
Group: "",
Aliases: []string{},
Extensions: []string{
".ampl",
".mod",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ampl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 3,
},
4: LanguageInfo{
Name: "ANTLR",
FSName: "",
Type: TypeForString("programming"),
Color: "#9DC3FF",
Group: "",
Aliases: []string{},
Extensions: []string{
".g4",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.antlr",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 4,
},
5: LanguageInfo{
Name: "API Blueprint",
FSName: "",
Type: TypeForString("markup"),
Color: "#2ACCA8",
Group: "",
Aliases: []string{},
Extensions: []string{
".apib",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.markdown.source.gfm.apib",
AceMode: "markdown",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 5,
},
6: LanguageInfo{
Name: "APL",
FSName: "",
Type: TypeForString("programming"),
Color: "#5A8164",
Group: "",
Aliases: []string{},
Extensions: []string{
".apl",
".dyalog",
},
Interpreters: []string{
"apl",
"aplx",
"dyalog",
},
Filenames: []string{},
MimeType: "text/apl",
TMScope: "source.apl",
AceMode: "text",
CodeMirrorMode: "apl",
Wrap: false,
LanguageID: 6,
},
124996147: LanguageInfo{
Name: "ASL",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".asl",
".dsl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.asl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 124996147,
},
7: LanguageInfo{
Name: "ASN.1",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".asn",
".asn1",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ttcn-asn",
TMScope: "source.asn",
AceMode: "text",
CodeMirrorMode: "asn.1",
Wrap: false,
LanguageID: 7,
},
564186416: LanguageInfo{
Name: "ASP.NET",
FSName: "",
Type: TypeForString("programming"),
Color: "#9400ff",
Group: "",
Aliases: []string{
"aspx",
"aspx-vb",
},
Extensions: []string{
".asax",
".ascx",
".ashx",
".asmx",
".aspx",
".axd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-aspx",
TMScope: "text.html.asp",
AceMode: "text",
CodeMirrorMode: "htmlembedded",
Wrap: false,
LanguageID: 564186416,
},
9: LanguageInfo{
Name: "ATS",
FSName: "",
Type: TypeForString("programming"),
Color: "#1ac620",
Group: "",
Aliases: []string{
"ats2",
},
Extensions: []string{
".dats",
".hats",
".sats",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ats",
AceMode: "ocaml",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 9,
},
10: LanguageInfo{
Name: "ActionScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#882B0F",
Group: "",
Aliases: []string{
"actionscript 3",
"actionscript3",
"as3",
},
Extensions: []string{
".as",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.actionscript.3",
AceMode: "actionscript",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 10,
},
11: LanguageInfo{
Name: "Ada",
FSName: "",
Type: TypeForString("programming"),
Color: "#02f88c",
Group: "",
Aliases: []string{
"ada95",
"ada2005",
},
Extensions: []string{
".adb",
".ada",
".ads",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ada",
AceMode: "ada",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 11,
},
147198098: LanguageInfo{
Name: "Adobe Font Metrics",
FSName: "",
Type: TypeForString("data"),
Color: "#fa0f00",
Group: "",
Aliases: []string{
"acfm",
"adobe composite font metrics",
"adobe multiple font metrics",
"amfm",
},
Extensions: []string{
".afm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.afm",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 147198098,
},
12: LanguageInfo{
Name: "Agda",
FSName: "",
Type: TypeForString("programming"),
Color: "#315665",
Group: "",
Aliases: []string{},
Extensions: []string{
".agda",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.agda",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 12,
},
13: LanguageInfo{
Name: "Alloy",
FSName: "",
Type: TypeForString("programming"),
Color: "#64C800",
Group: "",
Aliases: []string{},
Extensions: []string{
".als",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.alloy",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 13,
},
14: LanguageInfo{
Name: "Alpine Abuild",
FSName: "",
Type: TypeForString("programming"),
Color: "#0D597F",
Group: "Shell",
Aliases: []string{
"abuild",
"apkbuild",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"APKBUILD",
},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 14,
},
187772328: LanguageInfo{
Name: "Altium Designer",
FSName: "",
Type: TypeForString("data"),
Color: "#A89663",
Group: "",
Aliases: []string{
"altium",
},
Extensions: []string{
".OutJob",
".PcbDoc",
".PrjPCB",
".SchDoc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ini",
AceMode: "ini",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 187772328,
},
389477596: LanguageInfo{
Name: "AngelScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#C7D7DC",
Group: "",
Aliases: []string{},
Extensions: []string{
".as",
".angelscript",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.angelscript",
AceMode: "text",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 389477596,
},
15: LanguageInfo{
Name: "Ant Build System",
FSName: "",
Type: TypeForString("data"),
Color: "#A9157E",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"ant.xml",
"build.xml",
},
MimeType: "application/xml",
TMScope: "text.xml.ant",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 15,
},
16: LanguageInfo{
Name: "ApacheConf",
FSName: "",
Type: TypeForString("data"),
Color: "#d12127",
Group: "",
Aliases: []string{
"aconf",
"apache",
},
Extensions: []string{
".apacheconf",
".vhost",
},
Interpreters: []string{},
Filenames: []string{
".htaccess",
"apache2.conf",
"httpd.conf",
},
MimeType: "",
TMScope: "source.apache-config",
AceMode: "apache_conf",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 16,
},
17: LanguageInfo{
Name: "Apex",
FSName: "",
Type: TypeForString("programming"),
Color: "#1797c0",
Group: "",
Aliases: []string{},
Extensions: []string{
".cls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-java",
TMScope: "source.java",
AceMode: "java",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 17,
},
18: LanguageInfo{
Name: "Apollo Guidance Computer",
FSName: "",
Type: TypeForString("programming"),
Color: "#0B3D91",
Group: "Assembly",
Aliases: []string{},
Extensions: []string{
".agc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.agc",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 18,
},
19: LanguageInfo{
Name: "AppleScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#101F1F",
Group: "",
Aliases: []string{
"osascript",
},
Extensions: []string{
".applescript",
".scpt",
},
Interpreters: []string{
"osascript",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.applescript",
AceMode: "applescript",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 19,
},
20: LanguageInfo{
Name: "Arc",
FSName: "",
Type: TypeForString("programming"),
Color: "#aa2afe",
Group: "",
Aliases: []string{},
Extensions: []string{
".arc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 20,
},
22: LanguageInfo{
Name: "AsciiDoc",
FSName: "",
Type: TypeForString("prose"),
Color: "#73a0c5",
Group: "",
Aliases: []string{},
Extensions: []string{
".asciidoc",
".adoc",
".asc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.asciidoc",
AceMode: "asciidoc",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 22,
},
23: LanguageInfo{
Name: "AspectJ",
FSName: "",
Type: TypeForString("programming"),
Color: "#a957b0",
Group: "",
Aliases: []string{},
Extensions: []string{
".aj",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.aspectj",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 23,
},
24: LanguageInfo{
Name: "Assembly",
FSName: "",
Type: TypeForString("programming"),
Color: "#6E4C13",
Group: "",
Aliases: []string{
"asm",
"nasm",
},
Extensions: []string{
".asm",
".a51",
".i",
".inc",
".nasm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.assembly",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 24,
},
578209015: LanguageInfo{
Name: "Astro",
FSName: "",
Type: TypeForString("markup"),
Color: "#ff5a03",
Group: "",
Aliases: []string{},
Extensions: []string{
".astro",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/jsx",
TMScope: "text.html.astro",
AceMode: "html",
CodeMirrorMode: "jsx",
Wrap: false,
LanguageID: 578209015,
},
591605007: LanguageInfo{
Name: "Asymptote",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff0000",
Group: "",
Aliases: []string{},
Extensions: []string{
".asy",
},
Interpreters: []string{
"asy",
},
Filenames: []string{},
MimeType: "text/x-kotlin",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 591605007,
},
25: LanguageInfo{
Name: "Augeas",
FSName: "",
Type: TypeForString("programming"),
Color: "#9CC134",
Group: "",
Aliases: []string{},
Extensions: []string{
".aug",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 25,
},
26: LanguageInfo{
Name: "AutoHotkey",
FSName: "",
Type: TypeForString("programming"),
Color: "#6594b9",
Group: "",
Aliases: []string{
"ahk",
},
Extensions: []string{
".ahk",
".ahkl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ahk",
AceMode: "autohotkey",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 26,
},
27: LanguageInfo{
Name: "AutoIt",
FSName: "",
Type: TypeForString("programming"),
Color: "#1C3552",
Group: "",
Aliases: []string{
"au3",
"AutoIt3",
"AutoItScript",
},
Extensions: []string{
".au3",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.autoit",
AceMode: "autohotkey",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 27,
},
785497837: LanguageInfo{
Name: "Avro IDL",
FSName: "",
Type: TypeForString("data"),
Color: "#0040FF",
Group: "",
Aliases: []string{},
Extensions: []string{
".avdl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.avro",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 785497837,
},
28: LanguageInfo{
Name: "Awk",
FSName: "",
Type: TypeForString("programming"),
Color: "#c30e9b",
Group: "",
Aliases: []string{},
Extensions: []string{
".awk",
".auk",
".gawk",
".mawk",
".nawk",
},
Interpreters: []string{
"awk",
"gawk",
"mawk",
"nawk",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.awk",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 28,
},
28923963: LanguageInfo{
Name: "BASIC",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff0000",
Group: "",
Aliases: []string{},
Extensions: []string{
".bas",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.basic",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 28923963,
},
720859680: LanguageInfo{
Name: "Ballerina",
FSName: "",
Type: TypeForString("programming"),
Color: "#FF5000",
Group: "",
Aliases: []string{},
Extensions: []string{
".bal",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ballerina",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 720859680,
},
29: LanguageInfo{
Name: "Batchfile",
FSName: "",
Type: TypeForString("programming"),
Color: "#C1F12E",
Group: "",
Aliases: []string{
"bat",
"batch",
"dosbatch",
"winbatch",
},
Extensions: []string{
".bat",
".cmd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.batchfile",
AceMode: "batchfile",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 29,
},
545626333: LanguageInfo{
Name: "Beef",
FSName: "",
Type: TypeForString("programming"),
Color: "#a52f4e",
Group: "",
Aliases: []string{},
Extensions: []string{
".bf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csharp",
TMScope: "source.cs",
AceMode: "csharp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 545626333,
},
30: LanguageInfo{
Name: "Befunge",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".befunge",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.befunge",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 30,
},
982188347: LanguageInfo{
Name: "BibTeX",
FSName: "",
Type: TypeForString("markup"),
Color: "#778899",
Group: "TeX",
Aliases: []string{},
Extensions: []string{
".bib",
".bibtex",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-stex",
TMScope: "text.bibtex",
AceMode: "tex",
CodeMirrorMode: "stex",
Wrap: false,
LanguageID: 982188347,
},
321200902: LanguageInfo{
Name: "Bicep",
FSName: "",
Type: TypeForString("programming"),
Color: "#519aba",
Group: "",
Aliases: []string{},
Extensions: []string{
".bicep",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.bicep",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 321200902,
},
31: LanguageInfo{
Name: "Bison",
FSName: "",
Type: TypeForString("programming"),
Color: "#6A463F",
Group: "Yacc",
Aliases: []string{},
Extensions: []string{
".bison",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.yacc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 31,
},
32: LanguageInfo{
Name: "BitBake",
FSName: "",
Type: TypeForString("programming"),
Color: "#00bce4",
Group: "",
Aliases: []string{},
Extensions: []string{
".bb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 32,
},
33: LanguageInfo{
Name: "Blade",
FSName: "",
Type: TypeForString("markup"),
Color: "#f7523f",
Group: "",
Aliases: []string{},
Extensions: []string{
".blade",
".blade.php",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.php.blade",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 33,
},
34: LanguageInfo{
Name: "BlitzBasic",
FSName: "",
Type: TypeForString("programming"),
Color: "#00FFAE",
Group: "",
Aliases: []string{
"b3d",
"blitz3d",
"blitzplus",
"bplus",
},
Extensions: []string{
".bb",
".decls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.blitzmax",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 34,
},
35: LanguageInfo{
Name: "BlitzMax",
FSName: "",
Type: TypeForString("programming"),
Color: "#cd6400",
Group: "",
Aliases: []string{
"bmax",
},
Extensions: []string{
".bmx",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.blitzmax",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 35,
},
36: LanguageInfo{
Name: "Bluespec",
FSName: "",
Type: TypeForString("programming"),
Color: "#12223c",
Group: "",
Aliases: []string{},
Extensions: []string{
".bsv",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.bsv",
AceMode: "verilog",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 36,
},
37: LanguageInfo{
Name: "Boo",
FSName: "",
Type: TypeForString("programming"),
Color: "#d4bec1",
Group: "",
Aliases: []string{},
Extensions: []string{
".boo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.boo",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 37,
},
955017407: LanguageInfo{
Name: "Boogie",
FSName: "",
Type: TypeForString("programming"),
Color: "#c80fa0",
Group: "",
Aliases: []string{},
Extensions: []string{
".bpl",
},
Interpreters: []string{
"boogie",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.boogie",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 955017407,
},
38: LanguageInfo{
Name: "Brainfuck",
FSName: "",
Type: TypeForString("programming"),
Color: "#2F2530",
Group: "",
Aliases: []string{},
Extensions: []string{
".b",
".bf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-brainfuck",
TMScope: "source.bf",
AceMode: "text",
CodeMirrorMode: "brainfuck",
Wrap: false,
LanguageID: 38,
},
39: LanguageInfo{
Name: "Brightscript",
FSName: "",
Type: TypeForString("programming"),
Color: "#662D91",
Group: "",
Aliases: []string{},
Extensions: []string{
".brs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.brightscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 39,
},
153503348: LanguageInfo{
Name: "Browserslist",
FSName: "",
Type: TypeForString("data"),
Color: "#ffd539",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".browserslistrc",
"browserslist",
},
MimeType: "",
TMScope: "text.browserslist",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 153503348,
},
41: LanguageInfo{
Name: "C",
FSName: "",
Type: TypeForString("programming"),
Color: "#555555",
Group: "",
Aliases: []string{},
Extensions: []string{
".c",
".cats",
".h",
".idc",
},
Interpreters: []string{
"tcc",
},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 41,
},
42: LanguageInfo{
Name: "C#",
FSName: "",
Type: TypeForString("programming"),
Color: "#178600",
Group: "",
Aliases: []string{
"csharp",
"cake",
"cakescript",
},
Extensions: []string{
".cs",
".cake",
".csx",
".linq",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csharp",
TMScope: "source.cs",
AceMode: "csharp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 42,
},
43: LanguageInfo{
Name: "C++",
FSName: "",
Type: TypeForString("programming"),
Color: "#f34b7d",
Group: "",
Aliases: []string{
"cpp",
},
Extensions: []string{
".cpp",
".c++",
".cc",
".cp",
".cxx",
".h",
".h++",
".hh",
".hpp",
".hxx",
".inc",
".inl",
".ino",
".ipp",
".ixx",
".re",
".tcc",
".tpp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 43,
},
44: LanguageInfo{
Name: "C-ObjDump",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".c-objdump",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "objdump.x86asm",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 44,
},
45: LanguageInfo{
Name: "C2hs Haskell",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Haskell",
Aliases: []string{
"c2hs",
},
Extensions: []string{
".chs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haskell",
TMScope: "source.haskell",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 45,
},
29176339: LanguageInfo{
Name: "CIL",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cil",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cil",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 29176339,
},
46: LanguageInfo{
Name: "CLIPS",
FSName: "",
Type: TypeForString("programming"),
Color: "#00A300",
Group: "",
Aliases: []string{},
Extensions: []string{
".clp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.clips",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 46,
},
47: LanguageInfo{
Name: "CMake",
FSName: "",
Type: TypeForString("programming"),
Color: "#DA3434",
Group: "",
Aliases: []string{},
Extensions: []string{
".cmake",
".cmake.in",
},
Interpreters: []string{},
Filenames: []string{
"CMakeLists.txt",
},
MimeType: "text/x-cmake",
TMScope: "source.cmake",
AceMode: "text",
CodeMirrorMode: "cmake",
Wrap: false,
LanguageID: 47,
},
48: LanguageInfo{
Name: "COBOL",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cob",
".cbl",
".ccp",
".cobol",
".cpy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-cobol",
TMScope: "source.cobol",
AceMode: "cobol",
CodeMirrorMode: "cobol",
Wrap: false,
LanguageID: 48,
},
321684729: LanguageInfo{
Name: "CODEOWNERS",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"CODEOWNERS",
},
MimeType: "",
TMScope: "text.codeowners",
AceMode: "gitignore",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 321684729,
},
49: LanguageInfo{
Name: "COLLADA",
FSName: "",
Type: TypeForString("data"),
Color: "#F1A42B",
Group: "",
Aliases: []string{},
Extensions: []string{
".dae",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 49,
},
424: LanguageInfo{
Name: "CSON",
FSName: "",
Type: TypeForString("data"),
Color: "#244776",
Group: "",
Aliases: []string{},
Extensions: []string{
".cson",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-coffeescript",
TMScope: "source.coffee",
AceMode: "coffee",
CodeMirrorMode: "coffeescript",
Wrap: false,
LanguageID: 424,
},
50: LanguageInfo{
Name: "CSS",
FSName: "",
Type: TypeForString("markup"),
Color: "#563d7c",
Group: "",
Aliases: []string{},
Extensions: []string{
".css",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/css",
TMScope: "source.css",
AceMode: "css",
CodeMirrorMode: "css",
Wrap: false,
LanguageID: 50,
},
51: LanguageInfo{
Name: "CSV",
FSName: "",
Type: TypeForString("data"),
Color: "#237346",
Group: "",
Aliases: []string{},
Extensions: []string{
".csv",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 51,
},
356063509: LanguageInfo{
Name: "CUE",
FSName: "",
Type: TypeForString("programming"),
Color: "#5886E1",
Group: "",
Aliases: []string{},
Extensions: []string{
".cue",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cue",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 356063509,
},
657332628: LanguageInfo{
Name: "CWeb",
FSName: "",
Type: TypeForString("programming"),
Color: "#00007a",
Group: "",
Aliases: []string{},
Extensions: []string{
".w",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 657332628,
},
677095381: LanguageInfo{
Name: "Cabal Config",
FSName: "",
Type: TypeForString("data"),
Color: "#483465",
Group: "",
Aliases: []string{
"Cabal",
},
Extensions: []string{
".cabal",
},
Interpreters: []string{},
Filenames: []string{
"cabal.config",
"cabal.project",
},
MimeType: "text/x-haskell",
TMScope: "source.cabal",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 677095381,
},
52: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("programming"),
Color: "#c42727",
Group: "",
Aliases: []string{},
Extensions: []string{
".capnp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.capnp",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 52,
},
53: LanguageInfo{
Name: "CartoCSS",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"Carto",
},
Extensions: []string{
".mss",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.css.mss",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 53,
},
54: LanguageInfo{
Name: "Ceylon",
FSName: "",
Type: TypeForString("programming"),
Color: "#dfa535",
Group: "",
Aliases: []string{},
Extensions: []string{
".ceylon",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ceylon",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 54,
},
55: LanguageInfo{
Name: "Chapel",
FSName: "",
Type: TypeForString("programming"),
Color: "#8dc63f",
Group: "",
Aliases: []string{
"chpl",
},
Extensions: []string{
".chpl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.chapel",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 55,
},
56: LanguageInfo{
Name: "Charity",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ch",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 56,
},
57: LanguageInfo{
Name: "ChucK",
FSName: "",
Type: TypeForString("programming"),
Color: "#3f8000",
Group: "",
Aliases: []string{},
Extensions: []string{
".ck",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-java",
TMScope: "source.java",
AceMode: "java",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 57,
},
58: LanguageInfo{
Name: "Cirru",
FSName: "",
Type: TypeForString("programming"),
Color: "#ccccff",
Group: "",
Aliases: []string{},
Extensions: []string{
".cirru",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cirru",
AceMode: "cirru",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 58,
},
59: LanguageInfo{
Name: "Clarion",
FSName: "",
Type: TypeForString("programming"),
Color: "#db901e",
Group: "",
Aliases: []string{},
Extensions: []string{
".clw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.clarion",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 59,
},
91493841: LanguageInfo{
Name: "Clarity",
FSName: "",
Type: TypeForString("programming"),
Color: "#5546ff",
Group: "",
Aliases: []string{},
Extensions: []string{
".clar",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.clar",
AceMode: "lisp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 91493841,
},
8: LanguageInfo{
Name: "Classic ASP",
FSName: "",
Type: TypeForString("programming"),
Color: "#6a40fd",
Group: "",
Aliases: []string{
"asp",
},
Extensions: []string{
".asp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.asp",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 8,
},
60: LanguageInfo{
Name: "Clean",
FSName: "",
Type: TypeForString("programming"),
Color: "#3F85AF",
Group: "",
Aliases: []string{},
Extensions: []string{
".icl",
".dcl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.clean",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 60,
},
61: LanguageInfo{
Name: "Click",
FSName: "",
Type: TypeForString("programming"),
Color: "#E4E6F3",
Group: "",
Aliases: []string{},
Extensions: []string{
".click",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.click",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 61,
},
62: LanguageInfo{
Name: "Clojure",
FSName: "",
Type: TypeForString("programming"),
Color: "#db5855",
Group: "",
Aliases: []string{},
Extensions: []string{
".clj",
".boot",
".cl2",
".cljc",
".cljs",
".cljs.hl",
".cljscm",
".cljx",
".hic",
},
Interpreters: []string{},
Filenames: []string{
"riemann.config",
},
MimeType: "text/x-clojure",
TMScope: "source.clojure",
AceMode: "clojure",
CodeMirrorMode: "clojure",
Wrap: false,
LanguageID: 62,
},
357046146: LanguageInfo{
Name: "Closure Templates",
FSName: "",
Type: TypeForString("markup"),
Color: "#0d948f",
Group: "",
Aliases: []string{
"soy",
},
Extensions: []string{
".soy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-soy",
TMScope: "text.html.soy",
AceMode: "soy_template",
CodeMirrorMode: "soy",
Wrap: false,
LanguageID: 357046146,
},
407996372: LanguageInfo{
Name: "Cloud Firestore Security Rules",
FSName: "",
Type: TypeForString("data"),
Color: "#FFA000",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"firestore.rules",
},
MimeType: "text/css",
TMScope: "source.firestore",
AceMode: "less",
CodeMirrorMode: "css",
Wrap: false,
LanguageID: 407996372,
},
421026389: LanguageInfo{
Name: "CoNLL-U",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"CoNLL",
"CoNLL-X",
},
Extensions: []string{
".conllu",
".conll",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.conllu",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 421026389,
},
424259634: LanguageInfo{
Name: "CodeQL",
FSName: "",
Type: TypeForString("programming"),
Color: "#140f46",
Group: "",
Aliases: []string{
"ql",
},
Extensions: []string{
".ql",
".qll",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ql",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 424259634,
},
63: LanguageInfo{
Name: "CoffeeScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#244776",
Group: "",
Aliases: []string{
"coffee",
"coffee-script",
},
Extensions: []string{
".coffee",
"._coffee",
".cake",
".cjsx",
".iced",
},
Interpreters: []string{
"coffee",
},
Filenames: []string{
"Cakefile",
},
MimeType: "text/x-coffeescript",
TMScope: "source.coffee",
AceMode: "coffee",
CodeMirrorMode: "coffeescript",
Wrap: false,
LanguageID: 63,
},
64: LanguageInfo{
Name: "ColdFusion",
FSName: "",
Type: TypeForString("programming"),
Color: "#ed2cd6",
Group: "",
Aliases: []string{
"cfm",
"cfml",
"coldfusion html",
},
Extensions: []string{
".cfm",
".cfml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.cfm",
AceMode: "coldfusion",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 64,
},
65: LanguageInfo{
Name: "ColdFusion CFC",
FSName: "",
Type: TypeForString("programming"),
Color: "#ed2cd6",
Group: "ColdFusion",
Aliases: []string{
"cfc",
},
Extensions: []string{
".cfc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cfscript",
AceMode: "coldfusion",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 65,
},
66: LanguageInfo{
Name: "Common Lisp",
FSName: "",
Type: TypeForString("programming"),
Color: "#3fb68b",
Group: "",
Aliases: []string{
"lisp",
},
Extensions: []string{
".lisp",
".asd",
".cl",
".l",
".lsp",
".ny",
".podsl",
".sexp",
},
Interpreters: []string{
"lisp",
"sbcl",
"ccl",
"clisp",
"ecl",
},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 66,
},
988547172: LanguageInfo{
Name: "Common Workflow Language",
FSName: "",
Type: TypeForString("programming"),
Color: "#B5314C",
Group: "",
Aliases: []string{
"cwl",
},
Extensions: []string{
".cwl",
},
Interpreters: []string{
"cwl-runner",
},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.cwl",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 988547172,
},
67: LanguageInfo{
Name: "Component Pascal",
FSName: "",
Type: TypeForString("programming"),
Color: "#B0CE4E",
Group: "",
Aliases: []string{},
Extensions: []string{
".cp",
".cps",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-pascal",
TMScope: "source.pascal",
AceMode: "pascal",
CodeMirrorMode: "pascal",
Wrap: false,
LanguageID: 67,
},
68: LanguageInfo{
Name: "Cool",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cool",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 68,
},
69: LanguageInfo{
Name: "Coq",
FSName: "",
Type: TypeForString("programming"),
Color: "#d0b68c",
Group: "",
Aliases: []string{},
Extensions: []string{
".coq",
".v",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.coq",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 69,
},
70: LanguageInfo{
Name: "Cpp-ObjDump",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"c++-objdump",
},
Extensions: []string{
".cppobjdump",
".c++-objdump",
".c++objdump",
".cpp-objdump",
".cxx-objdump",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "objdump.x86asm",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 70,
},
71: LanguageInfo{
Name: "Creole",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".creole",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.creole",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 71,
},
72: LanguageInfo{
Name: "Crystal",
FSName: "",
Type: TypeForString("programming"),
Color: "#000100",
Group: "",
Aliases: []string{},
Extensions: []string{
".cr",
},
Interpreters: []string{
"crystal",
},
Filenames: []string{},
MimeType: "text/x-crystal",
TMScope: "source.crystal",
AceMode: "ruby",
CodeMirrorMode: "crystal",
Wrap: false,
LanguageID: 72,
},
73: LanguageInfo{
Name: "Csound",
FSName: "",
Type: TypeForString("programming"),
Color: "#1a1a1a",
Group: "",
Aliases: []string{
"csound-orc",
},
Extensions: []string{
".orc",
".udo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.csound",
AceMode: "csound_orchestra",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 73,
},
74: LanguageInfo{
Name: "Csound Document",
FSName: "",
Type: TypeForString("programming"),
Color: "#1a1a1a",
Group: "",
Aliases: []string{
"csound-csd",
},
Extensions: []string{
".csd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.csound-document",
AceMode: "csound_document",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 74,
},
75: LanguageInfo{
Name: "Csound Score",
FSName: "",
Type: TypeForString("programming"),
Color: "#1a1a1a",
Group: "",
Aliases: []string{
"csound-sco",
},
Extensions: []string{
".sco",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.csound-score",
AceMode: "csound_score",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 75,
},
77: LanguageInfo{
Name: "Cuda",
FSName: "",
Type: TypeForString("programming"),
Color: "#3A4E3A",
Group: "",
Aliases: []string{},
Extensions: []string{
".cu",
".cuh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.cuda-c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 77,
},
942714150: LanguageInfo{
Name: "Cue Sheet",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cue",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.cuesheet",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 942714150,
},
439829048: LanguageInfo{
Name: "Curry",
FSName: "",
Type: TypeForString("programming"),
Color: "#531242",
Group: "",
Aliases: []string{},
Extensions: []string{
".curry",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.curry",
AceMode: "haskell",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 439829048,
},
78: LanguageInfo{
Name: "Cycript",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/javascript",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 78,
},
79: LanguageInfo{
Name: "Cython",
FSName: "",
Type: TypeForString("programming"),
Color: "#fedf5b",
Group: "",
Aliases: []string{
"pyrex",
},
Extensions: []string{
".pyx",
".pxd",
".pxi",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-cython",
TMScope: "source.cython",
AceMode: "text",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 79,
},
80: LanguageInfo{
Name: "D",
FSName: "",
Type: TypeForString("programming"),
Color: "#ba595e",
Group: "",
Aliases: []string{
"Dlang",
},
Extensions: []string{
".d",
".di",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-d",
TMScope: "source.d",
AceMode: "d",
CodeMirrorMode: "d",
Wrap: false,
LanguageID: 80,
},
81: LanguageInfo{
Name: "D-ObjDump",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".d-objdump",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "objdump.x86asm",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 81,
},
82: LanguageInfo{
Name: "DIGITAL Command Language",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"dcl",
},
Extensions: []string{
".com",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 82,
},
83: LanguageInfo{
Name: "DM",
FSName: "",
Type: TypeForString("programming"),
Color: "#447265",
Group: "",
Aliases: []string{
"byond",
},
Extensions: []string{
".dm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.dm",
AceMode: "c_cpp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 83,
},
84: LanguageInfo{
Name: "DNS Zone",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".zone",
".arpa",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.zone_file",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 84,
},
85: LanguageInfo{
Name: "DTrace",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"dtrace-script",
},
Extensions: []string{
".d",
},
Interpreters: []string{
"dtrace",
},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 85,
},
969323346: LanguageInfo{
Name: "Dafny",
FSName: "",
Type: TypeForString("programming"),
Color: "#FFEC25",
Group: "",
Aliases: []string{},
Extensions: []string{
".dfy",
},
Interpreters: []string{
"dafny",
},
Filenames: []string{},
MimeType: "",
TMScope: "text.dfy.dafny",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 969323346,
},
86: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("data"),
Color: "#8eff23",
Group: "",
Aliases: []string{
"dpatch",
},
Extensions: []string{
".darcspatch",
".dpatch",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 86,
},
87: LanguageInfo{
Name: "Dart",
FSName: "",
Type: TypeForString("programming"),
Color: "#00B4AB",
Group: "",
Aliases: []string{},
Extensions: []string{
".dart",
},
Interpreters: []string{
"dart",
},
Filenames: []string{},
MimeType: "application/dart",
TMScope: "source.dart",
AceMode: "dart",
CodeMirrorMode: "dart",
Wrap: false,
LanguageID: 87,
},
974514097: LanguageInfo{
Name: "DataWeave",
FSName: "",
Type: TypeForString("programming"),
Color: "#003a52",
Group: "",
Aliases: []string{},
Extensions: []string{
".dwl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.data-weave",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 974514097,
},
527438264: LanguageInfo{
Name: "Debian Package Control File",
FSName: "",
Type: TypeForString("data"),
Color: "#D70751",
Group: "",
Aliases: []string{},
Extensions: []string{
".dsc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.deb-control",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 527438264,
},
435000929: LanguageInfo{
Name: "DenizenScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#FBEE96",
Group: "",
Aliases: []string{},
Extensions: []string{
".dsc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.denizenscript",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 435000929,
},
793969321: LanguageInfo{
Name: "Dhall",
FSName: "",
Type: TypeForString("programming"),
Color: "#dfafff",
Group: "",
Aliases: []string{},
Extensions: []string{
".dhall",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haskell",
TMScope: "source.haskell",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 793969321,
},
88: LanguageInfo{
Name: "Diff",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"udiff",
},
Extensions: []string{
".diff",
".patch",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-diff",
TMScope: "source.diff",
AceMode: "diff",
CodeMirrorMode: "diff",
Wrap: false,
LanguageID: 88,
},
201049282: LanguageInfo{
Name: "DirectX 3D File",
FSName: "",
Type: TypeForString("data"),
Color: "#aace60",
Group: "",
Aliases: []string{},
Extensions: []string{
".x",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 201049282,
},
89: LanguageInfo{
Name: "Dockerfile",
FSName: "",
Type: TypeForString("programming"),
Color: "#384d54",
Group: "",
Aliases: []string{
"Containerfile",
},
Extensions: []string{
".dockerfile",
},
Interpreters: []string{},
Filenames: []string{
"Containerfile",
"Dockerfile",
},
MimeType: "text/x-dockerfile",
TMScope: "source.dockerfile",
AceMode: "dockerfile",
CodeMirrorMode: "dockerfile",
Wrap: false,
LanguageID: 89,
},
90: LanguageInfo{
Name: "Dogescript",
FSName: "",
Type: TypeForString("programming"),
Color: "#cca760",
Group: "",
Aliases: []string{},
Extensions: []string{
".djs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 90,
},
91: LanguageInfo{
Name: "Dylan",
FSName: "",
Type: TypeForString("programming"),
Color: "#6c616e",
Group: "",
Aliases: []string{},
Extensions: []string{
".dylan",
".dyl",
".intr",
".lid",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-dylan",
TMScope: "source.dylan",
AceMode: "text",
CodeMirrorMode: "dylan",
Wrap: false,
LanguageID: 91,
},
92: LanguageInfo{
Name: "E",
FSName: "",
Type: TypeForString("programming"),
Color: "#ccce35",
Group: "",
Aliases: []string{},
Extensions: []string{
".e",
},
Interpreters: []string{
"rune",
},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 92,
},
529653389: LanguageInfo{
Name: "E-mail",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"email",
"eml",
"mail",
"mbox",
},
Extensions: []string{
".eml",
".mbox",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/mbox",
TMScope: "text.eml.basic",
AceMode: "text",
CodeMirrorMode: "mbox",
Wrap: false,
LanguageID: 529653389,
},
430: LanguageInfo{
Name: "EBNF",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ebnf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ebnf",
TMScope: "source.ebnf",
AceMode: "text",
CodeMirrorMode: "ebnf",
Wrap: false,
LanguageID: 430,
},
93: LanguageInfo{
Name: "ECL",
FSName: "",
Type: TypeForString("programming"),
Color: "#8a1267",
Group: "",
Aliases: []string{},
Extensions: []string{
".ecl",
".eclxml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ecl",
TMScope: "source.ecl",
AceMode: "text",
CodeMirrorMode: "ecl",
Wrap: false,
LanguageID: 93,
},
94: LanguageInfo{
Name: "ECLiPSe",
FSName: "",
Type: TypeForString("programming"),
Color: "#001d9d",
Group: "prolog",
Aliases: []string{},
Extensions: []string{
".ecl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.prolog.eclipse",
AceMode: "prolog",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 94,
},
95: LanguageInfo{
Name: "EJS",
FSName: "",
Type: TypeForString("markup"),
Color: "#a91e50",
Group: "",
Aliases: []string{},
Extensions: []string{
".ejs",
".ect",
".ejs.t",
".jst",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.js",
AceMode: "ejs",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 95,
},
96: LanguageInfo{
Name: "EQ",
FSName: "",
Type: TypeForString("programming"),
Color: "#a78649",
Group: "",
Aliases: []string{},
Extensions: []string{
".eq",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csharp",
TMScope: "source.cs",
AceMode: "csharp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 96,
},
97: LanguageInfo{
Name: "Eagle",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".sch",
".brd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 97,
},
963512632: LanguageInfo{
Name: "Earthly",
FSName: "",
Type: TypeForString("programming"),
Color: "#2af0ff",
Group: "",
Aliases: []string{
"Earthfile",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"Earthfile",
},
MimeType: "",
TMScope: "source.earthfile",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 963512632,
},
342840477: LanguageInfo{
Name: "Easybuild",
FSName: "",
Type: TypeForString("data"),
Color: "#069406",
Group: "Python",
Aliases: []string{},
Extensions: []string{
".eb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-python",
TMScope: "source.python",
AceMode: "python",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 342840477,
},
98: LanguageInfo{
Name: "Ecere Projects",
FSName: "",
Type: TypeForString("data"),
Color: "#913960",
Group: "JavaScript",
Aliases: []string{},
Extensions: []string{
".epj",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/json",
TMScope: "source.json",
AceMode: "json",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 98,
},
96139566: LanguageInfo{
Name: "EditorConfig",
FSName: "",
Type: TypeForString("data"),
Color: "#fff1f2",
Group: "INI",
Aliases: []string{
"editor-config",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".editorconfig",
},
MimeType: "text/x-properties",
TMScope: "source.editorconfig",
AceMode: "ini",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 96139566,
},
342840478: LanguageInfo{
Name: "Edje Data Collection",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".edc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 342840478,
},
99: LanguageInfo{
Name: "Eiffel",
FSName: "",
Type: TypeForString("programming"),
Color: "#4d6977",
Group: "",
Aliases: []string{},
Extensions: []string{
".e",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-eiffel",
TMScope: "source.eiffel",
AceMode: "eiffel",
CodeMirrorMode: "eiffel",
Wrap: false,
LanguageID: 99,
},
100: LanguageInfo{
Name: "Elixir",
FSName: "",
Type: TypeForString("programming"),
Color: "#6e4a7e",
Group: "",
Aliases: []string{},
Extensions: []string{
".ex",
".exs",
},
Interpreters: []string{
"elixir",
},
Filenames: []string{
"mix.lock",
},
MimeType: "",
TMScope: "source.elixir",
AceMode: "elixir",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 100,
},
101: LanguageInfo{
Name: "Elm",
FSName: "",
Type: TypeForString("programming"),
Color: "#60B5CC",
Group: "",
Aliases: []string{},
Extensions: []string{
".elm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-elm",
TMScope: "source.elm",
AceMode: "elm",
CodeMirrorMode: "elm",
Wrap: false,
LanguageID: 101,
},
102: LanguageInfo{
Name: "Emacs Lisp",
FSName: "",
Type: TypeForString("programming"),
Color: "#c065db",
Group: "",
Aliases: []string{
"elisp",
"emacs",
},
Extensions: []string{
".el",
".emacs",
".emacs.desktop",
},
Interpreters: []string{},
Filenames: []string{
".abbrev_defs",
".emacs",
".emacs.desktop",
".gnus",
".spacemacs",
".viper",
"Cask",
"Project.ede",
"_emacs",
"abbrev_defs",
},
MimeType: "text/x-common-lisp",
TMScope: "source.emacs.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 102,
},
103: LanguageInfo{
Name: "EmberScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#FFF4F3",
Group: "",
Aliases: []string{},
Extensions: []string{
".em",
".emberscript",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-coffeescript",
TMScope: "source.coffee",
AceMode: "coffee",
CodeMirrorMode: "coffeescript",
Wrap: false,
LanguageID: 103,
},
104: LanguageInfo{
Name: "Erlang",
FSName: "",
Type: TypeForString("programming"),
Color: "#B83998",
Group: "",
Aliases: []string{},
Extensions: []string{
".erl",
".app.src",
".es",
".escript",
".hrl",
".xrl",
".yrl",
},
Interpreters: []string{
"escript",
},
Filenames: []string{
"Emakefile",
"rebar.config",
"rebar.config.lock",
"rebar.lock",
},
MimeType: "text/x-erlang",
TMScope: "source.erlang",
AceMode: "erlang",
CodeMirrorMode: "erlang",
Wrap: false,
LanguageID: 104,
},
880693982: LanguageInfo{
Name: "Euphoria",
FSName: "",
Type: TypeForString("programming"),
Color: "#FF790B",
Group: "",
Aliases: []string{},
Extensions: []string{
".e",
".ex",
},
Interpreters: []string{
"eui",
"euiw",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.euphoria",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 880693982,
},
105: LanguageInfo{
Name: "F#",
FSName: "",
Type: TypeForString("programming"),
Color: "#b845fc",
Group: "",
Aliases: []string{
"fsharp",
},
Extensions: []string{
".fs",
".fsi",
".fsx",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-fsharp",
TMScope: "source.fsharp",
AceMode: "text",
CodeMirrorMode: "mllike",
Wrap: false,
LanguageID: 105,
},
336943375: LanguageInfo{
Name: "F*",
FSName: "Fstar",
Type: TypeForString("programming"),
Color: "#572e30",
Group: "",
Aliases: []string{
"fstar",
},
Extensions: []string{
".fst",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.fstar",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 336943375,
},
686129783: LanguageInfo{
Name: "FIGlet Font",
FSName: "",
Type: TypeForString("data"),
Color: "#FFDDBB",
Group: "",
Aliases: []string{
"FIGfont",
},
Extensions: []string{
".flf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.figfont",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 686129783,
},
106: LanguageInfo{
Name: "FLUX",
FSName: "",
Type: TypeForString("programming"),
Color: "#88ccff",
Group: "",
Aliases: []string{},
Extensions: []string{
".fx",
".flux",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 106,
},
108: LanguageInfo{
Name: "Factor",
FSName: "",
Type: TypeForString("programming"),
Color: "#636746",
Group: "",
Aliases: []string{},
Extensions: []string{
".factor",
},
Interpreters: []string{},
Filenames: []string{
".factor-boot-rc",
".factor-rc",
},
MimeType: "text/x-factor",
TMScope: "source.factor",
AceMode: "text",
CodeMirrorMode: "factor",
Wrap: false,
LanguageID: 108,
},
109: LanguageInfo{
Name: "Fancy",
FSName: "",
Type: TypeForString("programming"),
Color: "#7b9db4",
Group: "",
Aliases: []string{},
Extensions: []string{
".fy",
".fancypack",
},
Interpreters: []string{},
Filenames: []string{
"Fakefile",
},
MimeType: "",
TMScope: "source.fancy",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 109,
},
110: LanguageInfo{
Name: "Fantom",
FSName: "",
Type: TypeForString("programming"),
Color: "#14253c",
Group: "",
Aliases: []string{},
Extensions: []string{
".fan",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.fan",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 110,
},
622529198: LanguageInfo{
Name: "Faust",
FSName: "",
Type: TypeForString("programming"),
Color: "#c37240",
Group: "",
Aliases: []string{},
Extensions: []string{
".dsp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.faust",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 622529198,
},
239946126: LanguageInfo{
Name: "Fennel",
FSName: "",
Type: TypeForString("programming"),
Color: "#fff3d7",
Group: "",
Aliases: []string{},
Extensions: []string{
".fnl",
},
Interpreters: []string{
"fennel",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.fnl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 239946126,
},
111: LanguageInfo{
Name: "Filebench WML",
FSName: "",
Type: TypeForString("programming"),
Color: "#F6B900",
Group: "",
Aliases: []string{},
Extensions: []string{
".f",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 111,
},
112: LanguageInfo{
Name: "Filterscript",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "RenderScript",
Aliases: []string{},
Extensions: []string{
".fs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 112,
},
206353404: LanguageInfo{
Name: "Fluent",
FSName: "",
Type: TypeForString("programming"),
Color: "#ffcc33",
Group: "",
Aliases: []string{},
Extensions: []string{
".ftl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ftl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 206353404,
},
113: LanguageInfo{
Name: "Formatted",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".for",
".eam.fs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 113,
},
114: LanguageInfo{
Name: "Forth",
FSName: "",
Type: TypeForString("programming"),
Color: "#341708",
Group: "",
Aliases: []string{},
Extensions: []string{
".fth",
".4th",
".f",
".for",
".forth",
".fr",
".frt",
".fs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-forth",
TMScope: "source.forth",
AceMode: "forth",
CodeMirrorMode: "forth",
Wrap: false,
LanguageID: 114,
},
107: LanguageInfo{
Name: "Fortran",
FSName: "",
Type: TypeForString("programming"),
Color: "#4d41b1",
Group: "Fortran",
Aliases: []string{},
Extensions: []string{
".f",
".f77",
".for",
".fpp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-fortran",
TMScope: "source.fortran",
AceMode: "text",
CodeMirrorMode: "fortran",
Wrap: false,
LanguageID: 107,
},
761352333: LanguageInfo{
Name: "Fortran Free Form",
FSName: "",
Type: TypeForString("programming"),
Color: "#4d41b1",
Group: "Fortran",
Aliases: []string{},
Extensions: []string{
".f90",
".f03",
".f08",
".f95",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-fortran",
TMScope: "source.fortran.modern",
AceMode: "text",
CodeMirrorMode: "fortran",
Wrap: false,
LanguageID: 761352333,
},
472896659: LanguageInfo{
Name: "FreeBasic",
FSName: "",
Type: TypeForString("programming"),
Color: "#867db1",
Group: "",
Aliases: []string{
"fb",
},
Extensions: []string{
".bi",
".bas",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-vb",
TMScope: "source.vbnet",
AceMode: "text",
CodeMirrorMode: "vb",
Wrap: false,
LanguageID: 472896659,
},
115: LanguageInfo{
Name: "FreeMarker",
FSName: "",
Type: TypeForString("programming"),
Color: "#0050b2",
Group: "",
Aliases: []string{
"ftl",
},
Extensions: []string{
".ftl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.ftl",
AceMode: "ftl",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 115,
},
116: LanguageInfo{
Name: "Frege",
FSName: "",
Type: TypeForString("programming"),
Color: "#00cafe",
Group: "",
Aliases: []string{},
Extensions: []string{
".fr",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.haskell",
AceMode: "haskell",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 116,
},
97358117: LanguageInfo{
Name: "Futhark",
FSName: "",
Type: TypeForString("programming"),
Color: "#5f021f",
Group: "",
Aliases: []string{},
Extensions: []string{
".fut",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.futhark",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 97358117,
},
117: LanguageInfo{
Name: "G-code",
FSName: "",
Type: TypeForString("programming"),
Color: "#D08CF2",
Group: "",
Aliases: []string{},
Extensions: []string{
".g",
".cnc",
".gco",
".gcode",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gcode",
AceMode: "gcode",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 117,
},
290345951: LanguageInfo{
Name: "GAML",
FSName: "",
Type: TypeForString("programming"),
Color: "#FFC766",
Group: "",
Aliases: []string{},
Extensions: []string{
".gaml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 290345951,
},
118: LanguageInfo{
Name: "GAMS",
FSName: "",
Type: TypeForString("programming"),
Color: "#f49a22",
Group: "",
Aliases: []string{},
Extensions: []string{
".gms",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 118,
},
119: LanguageInfo{
Name: "GAP",
FSName: "",
Type: TypeForString("programming"),
Color: "#0000cc",
Group: "",
Aliases: []string{},
Extensions: []string{
".g",
".gap",
".gd",
".gi",
".tst",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gap",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 119,
},
121: LanguageInfo{
Name: "GCC Machine Description",
FSName: "",
Type: TypeForString("programming"),
Color: "#FFCFAB",
Group: "",
Aliases: []string{},
Extensions: []string{
".md",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 121,
},
122: LanguageInfo{
Name: "GDB",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".gdb",
".gdbinit",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gdb",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 122,
},
123: LanguageInfo{
Name: "GDScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#355570",
Group: "",
Aliases: []string{},
Extensions: []string{
".gd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gdscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 123,
},
459577965: LanguageInfo{
Name: "GEDCOM",
FSName: "",
Type: TypeForString("data"),
Color: "#003058",
Group: "",
Aliases: []string{},
Extensions: []string{
".ged",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gedcom",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 459577965,
},
124: LanguageInfo{
Name: "GLSL",
FSName: "",
Type: TypeForString("programming"),
Color: "#5686a5",
Group: "",
Aliases: []string{},
Extensions: []string{
".glsl",
".fp",
".frag",
".frg",
".fs",
".fsh",
".fshader",
".geo",
".geom",
".glslf",
".glslv",
".gs",
".gshader",
".rchit",
".rmiss",
".shader",
".tesc",
".tese",
".vert",
".vrx",
".vsh",
".vshader",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.glsl",
AceMode: "glsl",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 124,
},
302957008: LanguageInfo{
Name: "GN",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".gn",
".gni",
},
Interpreters: []string{
"gn",
},
Filenames: []string{
".gn",
},
MimeType: "text/x-python",
TMScope: "source.gn",
AceMode: "python",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 302957008,
},
257856279: LanguageInfo{
Name: "GSC",
FSName: "",
Type: TypeForString("programming"),
Color: "#FF6800",
Group: "",
Aliases: []string{},
Extensions: []string{
".gsc",
".csc",
".gsh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.gsc",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 257856279,
},
125: LanguageInfo{
Name: "Game Maker Language",
FSName: "",
Type: TypeForString("programming"),
Color: "#71b417",
Group: "",
Aliases: []string{},
Extensions: []string{
".gml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 125,
},
907065713: LanguageInfo{
Name: "Gemfile.lock",
FSName: "",
Type: TypeForString("data"),
Color: "#701516",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"Gemfile.lock",
},
MimeType: "",
TMScope: "source.gemfile-lock",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 907065713,
},
792408528: LanguageInfo{
Name: "Genie",
FSName: "",
Type: TypeForString("programming"),
Color: "#fb855d",
Group: "",
Aliases: []string{},
Extensions: []string{
".gs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 792408528,
},
126: LanguageInfo{
Name: "Genshi",
FSName: "",
Type: TypeForString("programming"),
Color: "#951531",
Group: "",
Aliases: []string{
"xml+genshi",
"xml+kid",
},
Extensions: []string{
".kid",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml.genshi",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 126,
},
127: LanguageInfo{
Name: "Gentoo Ebuild",
FSName: "",
Type: TypeForString("programming"),
Color: "#9400ff",
Group: "Shell",
Aliases: []string{},
Extensions: []string{
".ebuild",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 127,
},
128: LanguageInfo{
Name: "Gentoo Eclass",
FSName: "",
Type: TypeForString("programming"),
Color: "#9400ff",
Group: "Shell",
Aliases: []string{},
Extensions: []string{
".eclass",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 128,
},
404627610: LanguageInfo{
Name: "Gerber Image",
FSName: "",
Type: TypeForString("data"),
Color: "#d20b00",
Group: "",
Aliases: []string{
"rs-274x",
},
Extensions: []string{
".gbr",
".cmp",
".gbl",
".gbo",
".gbp",
".gbs",
".gko",
".gml",
".gpb",
".gpt",
".gtl",
".gto",
".gtp",
".gts",
".ncl",
".sol",
},
Interpreters: []string{
"gerbv",
"gerbview",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.gerber",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 404627610,
},
129: LanguageInfo{
Name: "Gettext Catalog",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{
"pot",
},
Extensions: []string{
".po",
".pot",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.po",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 129,
},
76: LanguageInfo{
Name: "Gherkin",
FSName: "",
Type: TypeForString("programming"),
Color: "#5B2063",
Group: "",
Aliases: []string{
"cucumber",
},
Extensions: []string{
".feature",
".story",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.gherkin.feature",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 76,
},
956324166: LanguageInfo{
Name: "Git Attributes",
FSName: "",
Type: TypeForString("data"),
Color: "#F44D27",
Group: "INI",
Aliases: []string{
"gitattributes",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".gitattributes",
},
MimeType: "text/x-sh",
TMScope: "source.gitattributes",
AceMode: "gitignore",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 956324166,
},
807968997: LanguageInfo{
Name: "Git Config",
FSName: "",
Type: TypeForString("data"),
Color: "#F44D27",
Group: "INI",
Aliases: []string{
"gitconfig",
"gitmodules",
},
Extensions: []string{
".gitconfig",
},
Interpreters: []string{},
Filenames: []string{
".gitconfig",
".gitmodules",
},
MimeType: "text/x-properties",
TMScope: "source.gitconfig",
AceMode: "ini",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 807968997,
},
1054258749: LanguageInfo{
Name: "Gleam",
FSName: "",
Type: TypeForString("programming"),
Color: "#ffaff3",
Group: "",
Aliases: []string{},
Extensions: []string{
".gleam",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gleam",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1054258749,
},
130: LanguageInfo{
Name: "Glyph",
FSName: "",
Type: TypeForString("programming"),
Color: "#c1ac7f",
Group: "",
Aliases: []string{},
Extensions: []string{
".glf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-tcl",
TMScope: "source.tcl",
AceMode: "tcl",
CodeMirrorMode: "tcl",
Wrap: false,
LanguageID: 130,
},
997665271: LanguageInfo{
Name: "Glyph Bitmap Distribution Format",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".bdf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.bdf",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 997665271,
},
131: LanguageInfo{
Name: "Gnuplot",
FSName: "",
Type: TypeForString("programming"),
Color: "#f0a9f0",
Group: "",
Aliases: []string{},
Extensions: []string{
".gp",
".gnu",
".gnuplot",
".p",
".plot",
".plt",
},
Interpreters: []string{
"gnuplot",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.gnuplot",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 131,
},
132: LanguageInfo{
Name: "Go",
FSName: "",
Type: TypeForString("programming"),
Color: "#00ADD8",
Group: "",
Aliases: []string{
"golang",
},
Extensions: []string{
".go",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-go",
TMScope: "source.go",
AceMode: "golang",
CodeMirrorMode: "go",
Wrap: false,
LanguageID: 132,
},
1054391671: LanguageInfo{
Name: "Go Checksums",
FSName: "",
Type: TypeForString("data"),
Color: "#00ADD8",
Group: "",
Aliases: []string{
"go.sum",
"go sum",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"go.sum",
},
MimeType: "",
TMScope: "go.sum",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1054391671,
},
947461016: LanguageInfo{
Name: "Go Module",
FSName: "",
Type: TypeForString("data"),
Color: "#00ADD8",
Group: "",
Aliases: []string{
"go.mod",
"go mod",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"go.mod",
},
MimeType: "",
TMScope: "go.mod",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 947461016,
},
133: LanguageInfo{
Name: "Golo",
FSName: "",
Type: TypeForString("programming"),
Color: "#88562A",
Group: "",
Aliases: []string{},
Extensions: []string{
".golo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.golo",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 133,
},
134: LanguageInfo{
Name: "Gosu",
FSName: "",
Type: TypeForString("programming"),
Color: "#82937f",
Group: "",
Aliases: []string{},
Extensions: []string{
".gs",
".gst",
".gsx",
".vark",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.gosu.2",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 134,
},
135: LanguageInfo{
Name: "Grace",
FSName: "",
Type: TypeForString("programming"),
Color: "#615f8b",
Group: "",
Aliases: []string{},
Extensions: []string{
".grace",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.grace",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 135,
},
136: LanguageInfo{
Name: "Gradle",
FSName: "",
Type: TypeForString("data"),
Color: "#02303a",
Group: "",
Aliases: []string{},
Extensions: []string{
".gradle",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.groovy.gradle",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 136,
},
137: LanguageInfo{
Name: "Grammatical Framework",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff0000",
Group: "",
Aliases: []string{
"gf",
},
Extensions: []string{
".gf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haskell",
TMScope: "source.gf",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 137,
},
138: LanguageInfo{
Name: "Graph Modeling Language",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".gml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 138,
},
139: LanguageInfo{
Name: "GraphQL",
FSName: "",
Type: TypeForString("data"),
Color: "#e10098",
Group: "",
Aliases: []string{},
Extensions: []string{
".graphql",
".gql",
".graphqls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.graphql",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 139,
},
140: LanguageInfo{
Name: "Graphviz (DOT)",
FSName: "",
Type: TypeForString("data"),
Color: "#2596be",
Group: "",
Aliases: []string{},
Extensions: []string{
".dot",
".gv",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.dot",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 140,
},
142: LanguageInfo{
Name: "Groovy",
FSName: "",
Type: TypeForString("programming"),
Color: "#4298b8",
Group: "",
Aliases: []string{},
Extensions: []string{
".groovy",
".grt",
".gtpl",
".gvy",
},
Interpreters: []string{
"groovy",
},
Filenames: []string{
"Jenkinsfile",
},
MimeType: "text/x-groovy",
TMScope: "source.groovy",
AceMode: "groovy",
CodeMirrorMode: "groovy",
Wrap: false,
LanguageID: 142,
},
143: LanguageInfo{
Name: "Groovy Server Pages",
FSName: "",
Type: TypeForString("programming"),
Color: "#4298b8",
Group: "Groovy",
Aliases: []string{
"gsp",
"java server page",
},
Extensions: []string{
".gsp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-jsp",
TMScope: "text.html.jsp",
AceMode: "jsp",
CodeMirrorMode: "htmlembedded",
Wrap: false,
LanguageID: 143,
},
366607477: LanguageInfo{
Name: "HAProxy",
FSName: "",
Type: TypeForString("data"),
Color: "#106da9",
Group: "",
Aliases: []string{},
Extensions: []string{
".cfg",
},
Interpreters: []string{},
Filenames: []string{
"haproxy.cfg",
},
MimeType: "",
TMScope: "source.haproxy-config",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 366607477,
},
144: LanguageInfo{
Name: "HCL",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"HashiCorp Configuration Language",
"terraform",
},
Extensions: []string{
".hcl",
".nomad",
".tf",
".tfvars",
".workflow",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ruby",
TMScope: "source.terraform",
AceMode: "ruby",
CodeMirrorMode: "ruby",
Wrap: false,
LanguageID: 144,
},
145: LanguageInfo{
Name: "HLSL",
FSName: "",
Type: TypeForString("programming"),
Color: "#aace60",
Group: "",
Aliases: []string{},
Extensions: []string{
".hlsl",
".cginc",
".fx",
".fxh",
".hlsli",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.hlsl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 145,
},
146: LanguageInfo{
Name: "HTML",
FSName: "",
Type: TypeForString("markup"),
Color: "#e34c26",
Group: "",
Aliases: []string{
"xhtml",
},
Extensions: []string{
".html",
".hta",
".htm",
".html.hl",
".inc",
".xht",
".xhtml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.basic",
AceMode: "html",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 146,
},
148: LanguageInfo{
Name: "HTML+ECR",
FSName: "",
Type: TypeForString("markup"),
Color: "#2e1052",
Group: "HTML",
Aliases: []string{
"ecr",
},
Extensions: []string{
".ecr",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.ecr",
AceMode: "text",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 148,
},
149: LanguageInfo{
Name: "HTML+EEX",
FSName: "",
Type: TypeForString("markup"),
Color: "#6e4a7e",
Group: "HTML",
Aliases: []string{
"eex",
"heex",
"leex",
},
Extensions: []string{
".eex",
".html.heex",
".html.leex",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.elixir",
AceMode: "text",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 149,
},
150: LanguageInfo{
Name: "HTML+ERB",
FSName: "",
Type: TypeForString("markup"),
Color: "#701516",
Group: "HTML",
Aliases: []string{
"erb",
"rhtml",
"html+ruby",
},
Extensions: []string{
".erb",
".erb.deface",
".rhtml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-erb",
TMScope: "text.html.erb",
AceMode: "text",
CodeMirrorMode: "htmlembedded",
Wrap: false,
LanguageID: 150,
},
151: LanguageInfo{
Name: "HTML+PHP",
FSName: "",
Type: TypeForString("markup"),
Color: "#4f5d95",
Group: "HTML",
Aliases: []string{},
Extensions: []string{
".phtml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-httpd-php",
TMScope: "text.html.php",
AceMode: "php",
CodeMirrorMode: "php",
Wrap: false,
LanguageID: 151,
},
479039817: LanguageInfo{
Name: "HTML+Razor",
FSName: "",
Type: TypeForString("markup"),
Color: "#512be4",
Group: "HTML",
Aliases: []string{
"razor",
},
Extensions: []string{
".cshtml",
".razor",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.cshtml",
AceMode: "razor",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 479039817,
},
152: LanguageInfo{
Name: "HTTP",
FSName: "",
Type: TypeForString("data"),
Color: "#005C9C",
Group: "",
Aliases: []string{},
Extensions: []string{
".http",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "message/http",
TMScope: "source.httpspec",
AceMode: "text",
CodeMirrorMode: "http",
Wrap: false,
LanguageID: 152,
},
786683730: LanguageInfo{
Name: "HXML",
FSName: "",
Type: TypeForString("data"),
Color: "#f68712",
Group: "",
Aliases: []string{},
Extensions: []string{
".hxml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.hxml",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 786683730,
},
153: LanguageInfo{
Name: "Hack",
FSName: "",
Type: TypeForString("programming"),
Color: "#878787",
Group: "",
Aliases: []string{},
Extensions: []string{
".hack",
".hh",
".hhi",
".php",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-httpd-php",
TMScope: "source.hack",
AceMode: "php",
CodeMirrorMode: "php",
Wrap: false,
LanguageID: 153,
},
154: LanguageInfo{
Name: "Haml",
FSName: "",
Type: TypeForString("markup"),
Color: "#ece2a9",
Group: "",
Aliases: []string{},
Extensions: []string{
".haml",
".haml.deface",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haml",
TMScope: "text.haml",
AceMode: "haml",
CodeMirrorMode: "haml",
Wrap: false,
LanguageID: 154,
},
155: LanguageInfo{
Name: "Handlebars",
FSName: "",
Type: TypeForString("markup"),
Color: "#f7931e",
Group: "",
Aliases: []string{
"hbs",
"htmlbars",
},
Extensions: []string{
".handlebars",
".hbs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.handlebars",
AceMode: "handlebars",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 155,
},
156: LanguageInfo{
Name: "Harbour",
FSName: "",
Type: TypeForString("programming"),
Color: "#0e60e3",
Group: "",
Aliases: []string{},
Extensions: []string{
".hb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.harbour",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 156,
},
157: LanguageInfo{
Name: "Haskell",
FSName: "",
Type: TypeForString("programming"),
Color: "#5e5086",
Group: "",
Aliases: []string{},
Extensions: []string{
".hs",
".hs-boot",
".hsc",
},
Interpreters: []string{
"runghc",
"runhaskell",
"runhugs",
},
Filenames: []string{},
MimeType: "text/x-haskell",
TMScope: "source.haskell",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 157,
},
158: LanguageInfo{
Name: "Haxe",
FSName: "",
Type: TypeForString("programming"),
Color: "#df7900",
Group: "",
Aliases: []string{},
Extensions: []string{
".hx",
".hxsl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haxe",
TMScope: "source.hx",
AceMode: "haxe",
CodeMirrorMode: "haxe",
Wrap: false,
LanguageID: 158,
},
931814087: LanguageInfo{
Name: "HiveQL",
FSName: "",
Type: TypeForString("programming"),
Color: "#dce200",
Group: "",
Aliases: []string{},
Extensions: []string{
".q",
".hql",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.hql",
AceMode: "sql",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 931814087,
},
928121743: LanguageInfo{
Name: "HolyC",
FSName: "",
Type: TypeForString("programming"),
Color: "#ffefaf",
Group: "",
Aliases: []string{},
Extensions: []string{
".hc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.hc",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 928121743,
},
159: LanguageInfo{
Name: "Hy",
FSName: "",
Type: TypeForString("programming"),
Color: "#7790B2",
Group: "",
Aliases: []string{
"hylang",
},
Extensions: []string{
".hy",
},
Interpreters: []string{
"hy",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.hy",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 159,
},
160: LanguageInfo{
Name: "HyPhy",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".bf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 160,
},
161: LanguageInfo{
Name: "IDL",
FSName: "",
Type: TypeForString("programming"),
Color: "#a3522f",
Group: "",
Aliases: []string{},
Extensions: []string{
".pro",
".dlm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-idl",
TMScope: "source.idl",
AceMode: "text",
CodeMirrorMode: "idl",
Wrap: false,
LanguageID: 161,
},
162: LanguageInfo{
Name: "IGOR Pro",
FSName: "",
Type: TypeForString("programming"),
Color: "#0000cc",
Group: "",
Aliases: []string{
"igor",
"igorpro",
},
Extensions: []string{
".ipf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.igor",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 162,
},
163: LanguageInfo{
Name: "INI",
FSName: "",
Type: TypeForString("data"),
Color: "#d1dbe0",
Group: "",
Aliases: []string{
"dosini",
},
Extensions: []string{
".ini",
".cfg",
".dof",
".lektorproject",
".prefs",
".pro",
".properties",
},
Interpreters: []string{},
Filenames: []string{
".flake8",
"buildozer.spec",
},
MimeType: "text/x-properties",
TMScope: "source.ini",
AceMode: "ini",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 163,
},
164: LanguageInfo{
Name: "IRC log",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"irc",
"irc logs",
},
Extensions: []string{
".irclog",
".weechatlog",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/mirc",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "mirc",
Wrap: false,
LanguageID: 164,
},
165: LanguageInfo{
Name: "Idris",
FSName: "",
Type: TypeForString("programming"),
Color: "#b30000",
Group: "",
Aliases: []string{},
Extensions: []string{
".idr",
".lidr",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.idris",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 165,
},
74444240: LanguageInfo{
Name: "Ignore List",
FSName: "",
Type: TypeForString("data"),
Color: "#000000",
Group: "INI",
Aliases: []string{
"ignore",
"gitignore",
"git-ignore",
},
Extensions: []string{
".gitignore",
},
Interpreters: []string{},
Filenames: []string{
".atomignore",
".babelignore",
".bzrignore",
".coffeelintignore",
".cvsignore",
".dockerignore",
".eleventyignore",
".eslintignore",
".gitignore",
".markdownlintignore",
".nodemonignore",
".npmignore",
".prettierignore",
".stylelintignore",
".vercelignore",
".vscodeignore",
"gitignore-global",
"gitignore_global",
},
MimeType: "text/x-sh",
TMScope: "source.gitignore",
AceMode: "gitignore",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 74444240,
},
575143428: LanguageInfo{
Name: "ImageJ Macro",
FSName: "",
Type: TypeForString("programming"),
Color: "#99AAFF",
Group: "",
Aliases: []string{
"ijm",
},
Extensions: []string{
".ijm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 575143428,
},
166: LanguageInfo{
Name: "Inform 7",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"i7",
"inform7",
},
Extensions: []string{
".ni",
".i7x",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.inform7",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 166,
},
167: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("programming"),
Color: "#264b99",
Group: "",
Aliases: []string{},
Extensions: []string{
".iss",
".isl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.inno",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 167,
},
168: LanguageInfo{
Name: "Io",
FSName: "",
Type: TypeForString("programming"),
Color: "#a9188d",
Group: "",
Aliases: []string{},
Extensions: []string{
".io",
},
Interpreters: []string{
"io",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.io",
AceMode: "io",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 168,
},
169: LanguageInfo{
Name: "Ioke",
FSName: "",
Type: TypeForString("programming"),
Color: "#078193",
Group: "",
Aliases: []string{},
Extensions: []string{
".ik",
},
Interpreters: []string{
"ioke",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.ioke",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 169,
},
170: LanguageInfo{
Name: "Isabelle",
FSName: "",
Type: TypeForString("programming"),
Color: "#FEFE00",
Group: "",
Aliases: []string{},
Extensions: []string{
".thy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.isabelle.theory",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 170,
},
171: LanguageInfo{
Name: "Isabelle ROOT",
FSName: "",
Type: TypeForString("programming"),
Color: "#FEFE00",
Group: "Isabelle",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"ROOT",
},
MimeType: "",
TMScope: "source.isabelle.root",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 171,
},
172: LanguageInfo{
Name: "J",
FSName: "",
Type: TypeForString("programming"),
Color: "#9EEDFF",
Group: "",
Aliases: []string{},
Extensions: []string{
".ijs",
},
Interpreters: []string{
"jconsole",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.j",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 172,
},
447261135: LanguageInfo{
Name: "JAR Manifest",
FSName: "",
Type: TypeForString("data"),
Color: "#b07219",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"MANIFEST.MF",
},
MimeType: "",
TMScope: "source.yaml",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 447261135,
},
173: LanguageInfo{
Name: "JFlex",
FSName: "",
Type: TypeForString("programming"),
Color: "#DBCA00",
Group: "Lex",
Aliases: []string{},
Extensions: []string{
".flex",
".jflex",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jflex",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 173,
},
174: LanguageInfo{
Name: "JSON",
FSName: "",
Type: TypeForString("data"),
Color: "#292929",
Group: "",
Aliases: []string{},
Extensions: []string{
".json",
".avsc",
".geojson",
".gltf",
".har",
".ice",
".JSON-tmLanguage",
".jsonl",
".mcmeta",
".tfstate",
".tfstate.backup",
".topojson",
".webapp",
".webmanifest",
".yy",
".yyp",
},
Interpreters: []string{},
Filenames: []string{
".arcconfig",
".auto-changelog",
".c8rc",
".htmlhintrc",
".imgbotconfig",
".nycrc",
".tern-config",
".tern-project",
".watchmanconfig",
"Pipfile.lock",
"composer.lock",
"mcmod.info",
},
MimeType: "application/json",
TMScope: "source.json",
AceMode: "json",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 174,
},
423: LanguageInfo{
Name: "JSON with Comments",
FSName: "",
Type: TypeForString("data"),
Color: "#292929",
Group: "JSON",
Aliases: []string{
"jsonc",
},
Extensions: []string{
".jsonc",
".code-snippets",
".sublime-build",
".sublime-commands",
".sublime-completions",
".sublime-keymap",
".sublime-macro",
".sublime-menu",
".sublime-mousemap",
".sublime-project",
".sublime-settings",
".sublime-theme",
".sublime-workspace",
".sublime_metrics",
".sublime_session",
},
Interpreters: []string{},
Filenames: []string{
".babelrc",
".eslintrc.json",
".jscsrc",
".jshintrc",
".jslintrc",
"api-extractor.json",
"devcontainer.json",
"jsconfig.json",
"language-configuration.json",
"tsconfig.json",
"tslint.json",
},
MimeType: "text/javascript",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 423,
},
175: LanguageInfo{
Name: "JSON5",
FSName: "",
Type: TypeForString("data"),
Color: "#267CB9",
Group: "",
Aliases: []string{},
Extensions: []string{
".json5",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/json",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 175,
},
176: LanguageInfo{
Name: "JSONLD",
FSName: "",
Type: TypeForString("data"),
Color: "#0c479c",
Group: "",
Aliases: []string{},
Extensions: []string{
".jsonld",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/json",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 176,
},
177: LanguageInfo{
Name: "JSONiq",
FSName: "",
Type: TypeForString("programming"),
Color: "#40d47e",
Group: "",
Aliases: []string{},
Extensions: []string{
".jq",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/json",
TMScope: "source.jsoniq",
AceMode: "jsoniq",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 177,
},
1028705371: LanguageInfo{
Name: "Janet",
FSName: "",
Type: TypeForString("programming"),
Color: "#0886a5",
Group: "",
Aliases: []string{},
Extensions: []string{
".janet",
},
Interpreters: []string{
"janet",
},
Filenames: []string{},
MimeType: "text/x-scheme",
TMScope: "source.janet",
AceMode: "scheme",
CodeMirrorMode: "scheme",
Wrap: false,
LanguageID: 1028705371,
},
180: LanguageInfo{
Name: "Jasmin",
FSName: "",
Type: TypeForString("programming"),
Color: "#d03600",
Group: "",
Aliases: []string{},
Extensions: []string{
".j",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jasmin",
AceMode: "java",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 180,
},
181: LanguageInfo{
Name: "Java",
FSName: "",
Type: TypeForString("programming"),
Color: "#b07219",
Group: "",
Aliases: []string{},
Extensions: []string{
".java",
".jav",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-java",
TMScope: "source.java",
AceMode: "java",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 181,
},
519377561: LanguageInfo{
Name: "Java Properties",
FSName: "",
Type: TypeForString("data"),
Color: "#2A6277",
Group: "",
Aliases: []string{},
Extensions: []string{
".properties",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-properties",
TMScope: "source.java-properties",
AceMode: "properties",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 519377561,
},
182: LanguageInfo{
Name: "Java Server Pages",
FSName: "",
Type: TypeForString("programming"),
Color: "#2A6277",
Group: "Java",
Aliases: []string{
"jsp",
},
Extensions: []string{
".jsp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/x-jsp",
TMScope: "text.html.jsp",
AceMode: "jsp",
CodeMirrorMode: "htmlembedded",
Wrap: false,
LanguageID: 182,
},
183: LanguageInfo{
Name: "JavaScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#f1e05a",
Group: "",
Aliases: []string{
"js",
"node",
},
Extensions: []string{
".js",
"._js",
".bones",
".cjs",
".es",
".es6",
".frag",
".gs",
".jake",
".javascript",
".jsb",
".jscad",
".jsfl",
".jsm",
".jss",
".jsx",
".mjs",
".njs",
".pac",
".sjs",
".ssjs",
".xsjs",
".xsjslib",
},
Interpreters: []string{
"chakra",
"d8",
"gjs",
"js",
"node",
"nodejs",
"qjs",
"rhino",
"v8",
"v8-shell",
},
Filenames: []string{
"Jakefile",
},
MimeType: "text/javascript",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 183,
},
914318960: LanguageInfo{
Name: "JavaScript+ERB",
FSName: "",
Type: TypeForString("programming"),
Color: "#f1e05a",
Group: "JavaScript",
Aliases: []string{},
Extensions: []string{
".js.erb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/javascript",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 914318960,
},
774635084: LanguageInfo{
Name: "Jest Snapshot",
FSName: "",
Type: TypeForString("data"),
Color: "#15c213",
Group: "",
Aliases: []string{},
Extensions: []string{
".snap",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/javascript",
TMScope: "source.jest.snap",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 774635084,
},
147: LanguageInfo{
Name: "Jinja",
FSName: "",
Type: TypeForString("markup"),
Color: "#a52a22",
Group: "",
Aliases: []string{
"django",
"html+django",
"html+jinja",
"htmldjango",
},
Extensions: []string{
".jinja",
".j2",
".jinja2",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-django",
TMScope: "text.html.django",
AceMode: "django",
CodeMirrorMode: "django",
Wrap: false,
LanguageID: 147,
},
284531423: LanguageInfo{
Name: "Jison",
FSName: "",
Type: TypeForString("programming"),
Color: "#56b3cb",
Group: "Yacc",
Aliases: []string{},
Extensions: []string{
".jison",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jison",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 284531423,
},
406395330: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("programming"),
Color: "#56b3cb",
Group: "Lex",
Aliases: []string{},
Extensions: []string{
".jisonlex",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jisonlex",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 406395330,
},
998078858: LanguageInfo{
Name: "Jolie",
FSName: "",
Type: TypeForString("programming"),
Color: "#843179",
Group: "",
Aliases: []string{},
Extensions: []string{
".ol",
".iol",
},
Interpreters: []string{
"jolie",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.jolie",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 998078858,
},
664885656: LanguageInfo{
Name: "Jsonnet",
FSName: "",
Type: TypeForString("programming"),
Color: "#0064bd",
Group: "",
Aliases: []string{},
Extensions: []string{
".jsonnet",
".libsonnet",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jsonnet",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 664885656,
},
184: LanguageInfo{
Name: "Julia",
FSName: "",
Type: TypeForString("programming"),
Color: "#a270ba",
Group: "",
Aliases: []string{},
Extensions: []string{
".jl",
},
Interpreters: []string{
"julia",
},
Filenames: []string{},
MimeType: "text/x-julia",
TMScope: "source.julia",
AceMode: "julia",
CodeMirrorMode: "julia",
Wrap: false,
LanguageID: 184,
},
185: LanguageInfo{
Name: "Jupyter Notebook",
FSName: "",
Type: TypeForString("markup"),
Color: "#DA5B0B",
Group: "",
Aliases: []string{
"IPython Notebook",
},
Extensions: []string{
".ipynb",
},
Interpreters: []string{},
Filenames: []string{
"Notebook",
},
MimeType: "application/json",
TMScope: "source.json",
AceMode: "json",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 185,
},
186: LanguageInfo{
Name: "KRL",
FSName: "",
Type: TypeForString("programming"),
Color: "#28430A",
Group: "",
Aliases: []string{},
Extensions: []string{
".krl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 186,
},
818804755: LanguageInfo{
Name: "Kaitai Struct",
FSName: "",
Type: TypeForString("programming"),
Color: "#773b37",
Group: "",
Aliases: []string{
"ksy",
},
Extensions: []string{
".ksy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.yaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 818804755,
},
603336474: LanguageInfo{
Name: "KakouneScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#6f8042",
Group: "",
Aliases: []string{
"kak",
"kakscript",
},
Extensions: []string{
".kak",
},
Interpreters: []string{},
Filenames: []string{
"kakrc",
},
MimeType: "",
TMScope: "source.kakscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 603336474,
},
187: LanguageInfo{
Name: "KiCad Layout",
FSName: "",
Type: TypeForString("data"),
Color: "#2f4aab",
Group: "",
Aliases: []string{
"pcbnew",
},
Extensions: []string{
".kicad_pcb",
".kicad_mod",
".kicad_wks",
},
Interpreters: []string{},
Filenames: []string{
"fp-lib-table",
},
MimeType: "text/x-common-lisp",
TMScope: "source.pcb.sexp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 187,
},
140848857: LanguageInfo{
Name: "KiCad Legacy Layout",
FSName: "",
Type: TypeForString("data"),
Color: "#2f4aab",
Group: "",
Aliases: []string{},
Extensions: []string{
".brd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pcb.board",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 140848857,
},
622447435: LanguageInfo{
Name: "KiCad Schematic",
FSName: "",
Type: TypeForString("data"),
Color: "#2f4aab",
Group: "",
Aliases: []string{
"eeschema schematic",
},
Extensions: []string{
".sch",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pcb.schematic",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 622447435,
},
188: LanguageInfo{
Name: "Kit",
FSName: "",
Type: TypeForString("markup"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".kit",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.basic",
AceMode: "html",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 188,
},
189: LanguageInfo{
Name: "Kotlin",
FSName: "",
Type: TypeForString("programming"),
Color: "#A97BFF",
Group: "",
Aliases: []string{},
Extensions: []string{
".kt",
".ktm",
".kts",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-kotlin",
TMScope: "source.kotlin",
AceMode: "text",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 189,
},
225697190: LanguageInfo{
Name: "Kusto",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".csl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.kusto",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 225697190,
},
190: LanguageInfo{
Name: "LFE",
FSName: "",
Type: TypeForString("programming"),
Color: "#4C3023",
Group: "",
Aliases: []string{},
Extensions: []string{
".lfe",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 190,
},
191: LanguageInfo{
Name: "LLVM",
FSName: "",
Type: TypeForString("programming"),
Color: "#185619",
Group: "",
Aliases: []string{},
Extensions: []string{
".ll",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.llvm",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 191,
},
192: LanguageInfo{
Name: "LOLCODE",
FSName: "",
Type: TypeForString("programming"),
Color: "#cc9900",
Group: "",
Aliases: []string{},
Extensions: []string{
".lol",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 192,
},
193: LanguageInfo{
Name: "LSL",
FSName: "",
Type: TypeForString("programming"),
Color: "#3d9970",
Group: "",
Aliases: []string{},
Extensions: []string{
".lsl",
".lslp",
},
Interpreters: []string{
"lsl",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.lsl",
AceMode: "lsl",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 193,
},
1013566805: LanguageInfo{
Name: "LTspice Symbol",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".asy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-spreadsheet",
TMScope: "source.ltspice.symbol",
AceMode: "text",
CodeMirrorMode: "spreadsheet",
Wrap: false,
LanguageID: 1013566805,
},
194: LanguageInfo{
Name: "LabVIEW",
FSName: "",
Type: TypeForString("programming"),
Color: "#fede06",
Group: "",
Aliases: []string{},
Extensions: []string{
".lvproj",
".lvlib",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 194,
},
758480799: LanguageInfo{
Name: "Lark",
FSName: "",
Type: TypeForString("data"),
Color: "#2980B9",
Group: "",
Aliases: []string{},
Extensions: []string{
".lark",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ebnf",
TMScope: "source.lark",
AceMode: "text",
CodeMirrorMode: "ebnf",
Wrap: false,
LanguageID: 758480799,
},
195: LanguageInfo{
Name: "Lasso",
FSName: "",
Type: TypeForString("programming"),
Color: "#999999",
Group: "",
Aliases: []string{
"lassoscript",
},
Extensions: []string{
".lasso",
".las",
".lasso8",
".lasso9",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "file.lasso",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 195,
},
196: LanguageInfo{
Name: "Latte",
FSName: "",
Type: TypeForString("markup"),
Color: "#f2a542",
Group: "",
Aliases: []string{},
Extensions: []string{
".latte",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-smarty",
TMScope: "text.html.smarty",
AceMode: "smarty",
CodeMirrorMode: "smarty",
Wrap: false,
LanguageID: 196,
},
197: LanguageInfo{
Name: "Lean",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".lean",
".hlean",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.lean",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 197,
},
198: LanguageInfo{
Name: "Less",
FSName: "",
Type: TypeForString("markup"),
Color: "#1d365d",
Group: "",
Aliases: []string{
"less-css",
},
Extensions: []string{
".less",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/css",
TMScope: "source.css.less",
AceMode: "less",
CodeMirrorMode: "css",
Wrap: false,
LanguageID: 198,
},
199: LanguageInfo{
Name: "Lex",
FSName: "",
Type: TypeForString("programming"),
Color: "#DBCA00",
Group: "",
Aliases: []string{
"flex",
},
Extensions: []string{
".l",
".lex",
},
Interpreters: []string{},
Filenames: []string{
"Lexer.x",
"lexer.x",
},
MimeType: "",
TMScope: "source.lex",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 199,
},
200: LanguageInfo{
Name: "LilyPond",
FSName: "",
Type: TypeForString("programming"),
Color: "#9ccc7c",
Group: "",
Aliases: []string{},
Extensions: []string{
".ly",
".ily",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.lilypond",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 200,
},
201: LanguageInfo{
Name: "Limbo",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".b",
".m",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 201,
},
202: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ld",
".lds",
".x",
},
Interpreters: []string{},
Filenames: []string{
"ld.script",
},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 202,
},
203: LanguageInfo{
Name: "Linux Kernel Module",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".mod",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 203,
},
204: LanguageInfo{
Name: "Liquid",
FSName: "",
Type: TypeForString("markup"),
Color: "#67b8de",
Group: "",
Aliases: []string{},
Extensions: []string{
".liquid",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.liquid",
AceMode: "liquid",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 204,
},
205: LanguageInfo{
Name: "Literate Agda",
FSName: "",
Type: TypeForString("programming"),
Color: "#315665",
Group: "Agda",
Aliases: []string{},
Extensions: []string{
".lagda",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 205,
},
206: LanguageInfo{
Name: "Literate CoffeeScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#244776",
Group: "CoffeeScript",
Aliases: []string{
"litcoffee",
},
Extensions: []string{
".litcoffee",
".coffee.md",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.litcoffee",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 206,
},
207: LanguageInfo{
Name: "Literate Haskell",
FSName: "",
Type: TypeForString("programming"),
Color: "#5e5086",
Group: "Haskell",
Aliases: []string{
"lhaskell",
"lhs",
},
Extensions: []string{
".lhs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-literate-haskell",
TMScope: "text.tex.latex.haskell",
AceMode: "text",
CodeMirrorMode: "haskell-literate",
Wrap: false,
LanguageID: 207,
},
208: LanguageInfo{
Name: "LiveScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#499886",
Group: "",
Aliases: []string{
"live-script",
"ls",
},
Extensions: []string{
".ls",
"._ls",
},
Interpreters: []string{},
Filenames: []string{
"Slakefile",
},
MimeType: "text/x-livescript",
TMScope: "source.livescript",
AceMode: "livescript",
CodeMirrorMode: "livescript",
Wrap: false,
LanguageID: 208,
},
209: LanguageInfo{
Name: "Logos",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".xm",
".x",
".xi",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.logos",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 209,
},
210: LanguageInfo{
Name: "Logtalk",
FSName: "",
Type: TypeForString("programming"),
Color: "#295b9a",
Group: "",
Aliases: []string{},
Extensions: []string{
".lgt",
".logtalk",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.logtalk",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 210,
},
211: LanguageInfo{
Name: "LookML",
FSName: "",
Type: TypeForString("programming"),
Color: "#652B81",
Group: "",
Aliases: []string{},
Extensions: []string{
".lookml",
".model.lkml",
".view.lkml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.yaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 211,
},
212: LanguageInfo{
Name: "LoomScript",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.loomscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 212,
},
213: LanguageInfo{
Name: "Lua",
FSName: "",
Type: TypeForString("programming"),
Color: "#000080",
Group: "",
Aliases: []string{},
Extensions: []string{
".lua",
".fcgi",
".nse",
".p8",
".pd_lua",
".rbxs",
".rockspec",
".wlua",
},
Interpreters: []string{
"lua",
},
Filenames: []string{
".luacheckrc",
},
MimeType: "text/x-lua",
TMScope: "source.lua",
AceMode: "lua",
CodeMirrorMode: "lua",
Wrap: false,
LanguageID: 213,
},
214: LanguageInfo{
Name: "M",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"mumps",
},
Extensions: []string{
".mumps",
".m",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-mumps",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "mumps",
Wrap: false,
LanguageID: 214,
},
215: LanguageInfo{
Name: "M4",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".m4",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.m4",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 215,
},
216: LanguageInfo{
Name: "M4Sugar",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "M4",
Aliases: []string{
"autoconf",
},
Extensions: []string{
".m4",
},
Interpreters: []string{},
Filenames: []string{
"configure.ac",
},
MimeType: "",
TMScope: "source.m4",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 216,
},
225: LanguageInfo{
Name: "MATLAB",
FSName: "",
Type: TypeForString("programming"),
Color: "#e16737",
Group: "",
Aliases: []string{
"octave",
},
Extensions: []string{
".matlab",
".m",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-octave",
TMScope: "source.matlab",
AceMode: "matlab",
CodeMirrorMode: "octave",
Wrap: false,
LanguageID: 225,
},
217: LanguageInfo{
Name: "MAXScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#00a6a6",
Group: "",
Aliases: []string{},
Extensions: []string{
".ms",
".mcr",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.maxscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 217,
},
448253929: LanguageInfo{
Name: "MLIR",
FSName: "",
Type: TypeForString("programming"),
Color: "#5EC8DB",
Group: "",
Aliases: []string{},
Extensions: []string{
".mlir",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mlir",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 448253929,
},
426: LanguageInfo{
Name: "MQL4",
FSName: "",
Type: TypeForString("programming"),
Color: "#62A8D6",
Group: "",
Aliases: []string{},
Extensions: []string{
".mq4",
".mqh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mql5",
AceMode: "c_cpp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 426,
},
427: LanguageInfo{
Name: "MQL5",
FSName: "",
Type: TypeForString("programming"),
Color: "#4A76B8",
Group: "",
Aliases: []string{},
Extensions: []string{
".mq5",
".mqh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mql5",
AceMode: "c_cpp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 427,
},
218: LanguageInfo{
Name: "MTML",
FSName: "",
Type: TypeForString("markup"),
Color: "#b7e1f4",
Group: "",
Aliases: []string{},
Extensions: []string{
".mtml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.html.basic",
AceMode: "html",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 218,
},
219: LanguageInfo{
Name: "MUF",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Forth",
Aliases: []string{},
Extensions: []string{
".muf",
".m",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-forth",
TMScope: "none",
AceMode: "forth",
CodeMirrorMode: "forth",
Wrap: false,
LanguageID: 219,
},
34167825: LanguageInfo{
Name: "Macaulay2",
FSName: "",
Type: TypeForString("programming"),
Color: "#d8ffff",
Group: "",
Aliases: []string{
"m2",
},
Extensions: []string{
".m2",
},
Interpreters: []string{
"M2",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.m2",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 34167825,
},
220: LanguageInfo{
Name: "Makefile",
FSName: "",
Type: TypeForString("programming"),
Color: "#427819",
Group: "",
Aliases: []string{
"bsdmake",
"make",
"mf",
},
Extensions: []string{
".mak",
".d",
".make",
".makefile",
".mk",
".mkfile",
},
Interpreters: []string{
"make",
},
Filenames: []string{
"BSDmakefile",
"GNUmakefile",
"Kbuild",
"Makefile",
"Makefile.am",
"Makefile.boot",
"Makefile.frag",
"Makefile.in",
"Makefile.inc",
"Makefile.wat",
"makefile",
"makefile.sco",
"mkfile",
},
MimeType: "text/x-cmake",
TMScope: "source.makefile",
AceMode: "makefile",
CodeMirrorMode: "cmake",
Wrap: false,
LanguageID: 220,
},
221: LanguageInfo{
Name: "Mako",
FSName: "",
Type: TypeForString("programming"),
Color: "#7e858d",
Group: "",
Aliases: []string{},
Extensions: []string{
".mako",
".mao",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.mako",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 221,
},
222: LanguageInfo{
Name: "Markdown",
FSName: "",
Type: TypeForString("prose"),
Color: "#083fa1",
Group: "",
Aliases: []string{
"pandoc",
},
Extensions: []string{
".md",
".markdown",
".mdown",
".mdwn",
".mdx",
".mkd",
".mkdn",
".mkdown",
".ronn",
".scd",
".workbook",
},
Interpreters: []string{},
Filenames: []string{
"contents.lr",
},
MimeType: "text/x-gfm",
TMScope: "source.gfm",
AceMode: "markdown",
CodeMirrorMode: "gfm",
Wrap: true,
LanguageID: 222,
},
932782397: LanguageInfo{
Name: "Marko",
FSName: "",
Type: TypeForString("markup"),
Color: "#42bff2",
Group: "",
Aliases: []string{
"markojs",
},
Extensions: []string{
".marko",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "text.marko",
AceMode: "text",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 932782397,
},
223: LanguageInfo{
Name: "Mask",
FSName: "",
Type: TypeForString("markup"),
Color: "#f97732",
Group: "",
Aliases: []string{},
Extensions: []string{
".mask",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mask",
AceMode: "mask",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 223,
},
224: LanguageInfo{
Name: "Mathematica",
FSName: "",
Type: TypeForString("programming"),
Color: "#dd1100",
Group: "",
Aliases: []string{
"mma",
"wolfram",
"wolfram language",
"wolfram lang",
"wl",
},
Extensions: []string{
".mathematica",
".cdf",
".m",
".ma",
".mt",
".nb",
".nbp",
".wl",
".wlt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-mathematica",
TMScope: "source.mathematica",
AceMode: "text",
CodeMirrorMode: "mathematica",
Wrap: false,
LanguageID: 224,
},
226: LanguageInfo{
Name: "Maven POM",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "XML",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"pom.xml",
},
MimeType: "text/xml",
TMScope: "text.xml.pom",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 226,
},
227: LanguageInfo{
Name: "Max",
FSName: "",
Type: TypeForString("programming"),
Color: "#c4a79c",
Group: "",
Aliases: []string{
"max/msp",
"maxmsp",
},
Extensions: []string{
".maxpat",
".maxhelp",
".maxproj",
".mxt",
".pat",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/json",
TMScope: "source.json",
AceMode: "json",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 227,
},
229: LanguageInfo{
Name: "Mercury",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff2b2b",
Group: "",
Aliases: []string{},
Extensions: []string{
".m",
".moo",
},
Interpreters: []string{
"mmi",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.mercury",
AceMode: "prolog",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 229,
},
799141244: LanguageInfo{
Name: "Meson",
FSName: "",
Type: TypeForString("programming"),
Color: "#007800",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"meson.build",
"meson_options.txt",
},
MimeType: "",
TMScope: "source.meson",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 799141244,
},
230: LanguageInfo{
Name: "Metal",
FSName: "",
Type: TypeForString("programming"),
Color: "#8f14e9",
Group: "",
Aliases: []string{},
Extensions: []string{
".metal",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 230,
},
800983837: LanguageInfo{
Name: "Microsoft Developer Studio Project",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".dsp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 800983837,
},
849523096: LanguageInfo{
Name: "Microsoft Visual Studio Solution",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".sln",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.solution",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 849523096,
},
231: LanguageInfo{
Name: "MiniD",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".minid",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 231,
},
4896465: LanguageInfo{
Name: "MiniYAML",
FSName: "",
Type: TypeForString("data"),
Color: "#ff1111",
Group: "",
Aliases: []string{},
Extensions: []string{
".yaml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.miniyaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 4896465,
},
968740319: LanguageInfo{
Name: "Mint",
FSName: "",
Type: TypeForString("programming"),
Color: "#02b046",
Group: "",
Aliases: []string{},
Extensions: []string{
".mint",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mint",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 968740319,
},
232: LanguageInfo{
Name: "Mirah",
FSName: "",
Type: TypeForString("programming"),
Color: "#c7a938",
Group: "",
Aliases: []string{},
Extensions: []string{
".druby",
".duby",
".mirah",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ruby",
TMScope: "source.ruby",
AceMode: "ruby",
CodeMirrorMode: "ruby",
Wrap: false,
LanguageID: 232,
},
233: LanguageInfo{
Name: "Modelica",
FSName: "",
Type: TypeForString("programming"),
Color: "#de1d31",
Group: "",
Aliases: []string{},
Extensions: []string{
".mo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-modelica",
TMScope: "source.modelica",
AceMode: "text",
CodeMirrorMode: "modelica",
Wrap: false,
LanguageID: 233,
},
234: LanguageInfo{
Name: "Modula-2",
FSName: "",
Type: TypeForString("programming"),
Color: "#10253f",
Group: "",
Aliases: []string{},
Extensions: []string{
".mod",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.modula2",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 234,
},
564743864: LanguageInfo{
Name: "Modula-3",
FSName: "",
Type: TypeForString("programming"),
Color: "#223388",
Group: "",
Aliases: []string{},
Extensions: []string{
".i3",
".ig",
".m3",
".mg",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.modula-3",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 564743864,
},
235: LanguageInfo{
Name: "Module Management System",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".mms",
".mmk",
},
Interpreters: []string{},
Filenames: []string{
"descrip.mmk",
"descrip.mms",
},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 235,
},
236: LanguageInfo{
Name: "Monkey",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".monkey",
".monkey2",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.monkey",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 236,
},
237: LanguageInfo{
Name: "Moocode",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".moo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 237,
},
238: LanguageInfo{
Name: "MoonScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff4585",
Group: "",
Aliases: []string{},
Extensions: []string{
".moon",
},
Interpreters: []string{
"moon",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.moonscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 238,
},
202937027: LanguageInfo{
Name: "Motoko",
FSName: "",
Type: TypeForString("programming"),
Color: "#fbb03b",
Group: "",
Aliases: []string{},
Extensions: []string{
".mo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mo",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 202937027,
},
477582706: LanguageInfo{
Name: "Motorola 68K Assembly",
FSName: "",
Type: TypeForString("programming"),
Color: "#005daa",
Group: "Assembly",
Aliases: []string{
"m68k",
},
Extensions: []string{
".asm",
".i",
".inc",
".s",
".x68",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.m68k",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 477582706,
},
474864066: LanguageInfo{
Name: "Muse",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{
"amusewiki",
"emacs muse",
},
Extensions: []string{
".muse",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.muse",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 474864066,
},
638334590: LanguageInfo{
Name: "Mustache",
FSName: "",
Type: TypeForString("markup"),
Color: "#724b3b",
Group: "",
Aliases: []string{},
Extensions: []string{
".mustache",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-smarty",
TMScope: "text.html.smarty",
AceMode: "smarty",
CodeMirrorMode: "smarty",
Wrap: false,
LanguageID: 638334590,
},
239: LanguageInfo{
Name: "Myghty",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".myt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 239,
},
171666519: LanguageInfo{
Name: "NASL",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".nasl",
".inc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.nasl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 171666519,
},
240: LanguageInfo{
Name: "NCL",
FSName: "",
Type: TypeForString("programming"),
Color: "#28431f",
Group: "",
Aliases: []string{},
Extensions: []string{
".ncl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ncl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 240,
},
481192983: LanguageInfo{
Name: "NEON",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"nette object notation",
"ne-on",
},
Extensions: []string{
".neon",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.neon",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 481192983,
},
241: LanguageInfo{
Name: "NL",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".nl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 241,
},
685022663: LanguageInfo{
Name: "NPM Config",
FSName: "",
Type: TypeForString("data"),
Color: "#cb3837",
Group: "INI",
Aliases: []string{
"npmrc",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".npmrc",
},
MimeType: "",
TMScope: "source.ini.npmrc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 685022663,
},
242: LanguageInfo{
Name: "NSIS",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".nsi",
".nsh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-nsis",
TMScope: "source.nsis",
AceMode: "text",
CodeMirrorMode: "nsis",
Wrap: false,
LanguageID: 242,
},
731233819: LanguageInfo{
Name: "NWScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#111522",
Group: "",
Aliases: []string{},
Extensions: []string{
".nss",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c.nwscript",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 731233819,
},
521429430: LanguageInfo{
Name: "Nearley",
FSName: "",
Type: TypeForString("programming"),
Color: "#990000",
Group: "",
Aliases: []string{},
Extensions: []string{
".ne",
".nearley",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ne",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 521429430,
},
243: LanguageInfo{
Name: "Nemerle",
FSName: "",
Type: TypeForString("programming"),
Color: "#3d3c6e",
Group: "",
Aliases: []string{},
Extensions: []string{
".n",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.nemerle",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 243,
},
244: LanguageInfo{
Name: "NetLinx",
FSName: "",
Type: TypeForString("programming"),
Color: "#0aa0ff",
Group: "",
Aliases: []string{},
Extensions: []string{
".axs",
".axi",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.netlinx",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 244,
},
245: LanguageInfo{
Name: "NetLinx+ERB",
FSName: "",
Type: TypeForString("programming"),
Color: "#747faa",
Group: "",
Aliases: []string{},
Extensions: []string{
".axs.erb",
".axi.erb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.netlinx.erb",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 245,
},
246: LanguageInfo{
Name: "NetLogo",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff6375",
Group: "",
Aliases: []string{},
Extensions: []string{
".nlogo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 246,
},
247: LanguageInfo{
Name: "NewLisp",
FSName: "",
Type: TypeForString("programming"),
Color: "#87AED7",
Group: "",
Aliases: []string{},
Extensions: []string{
".nl",
".lisp",
".lsp",
},
Interpreters: []string{
"newlisp",
},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 247,
},
506780613: LanguageInfo{
Name: "Nextflow",
FSName: "",
Type: TypeForString("programming"),
Color: "#3ac486",
Group: "",
Aliases: []string{},
Extensions: []string{
".nf",
},
Interpreters: []string{
"nextflow",
},
Filenames: []string{
"nextflow.config",
},
MimeType: "",
TMScope: "source.nextflow",
AceMode: "groovy",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 506780613,
},
248: LanguageInfo{
Name: "Nginx",
FSName: "",
Type: TypeForString("data"),
Color: "#009639",
Group: "",
Aliases: []string{
"nginx configuration file",
},
Extensions: []string{
".nginx",
".nginxconf",
".vhost",
},
Interpreters: []string{},
Filenames: []string{
"nginx.conf",
},
MimeType: "text/x-nginx-conf",
TMScope: "source.nginx",
AceMode: "text",
CodeMirrorMode: "nginx",
Wrap: false,
LanguageID: 248,
},
249: LanguageInfo{
Name: "Nim",
FSName: "",
Type: TypeForString("programming"),
Color: "#ffc200",
Group: "",
Aliases: []string{},
Extensions: []string{
".nim",
".nim.cfg",
".nimble",
".nimrod",
".nims",
},
Interpreters: []string{},
Filenames: []string{
"nim.cfg",
},
MimeType: "",
TMScope: "source.nim",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 249,
},
250: LanguageInfo{
Name: "Ninja",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ninja",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ninja",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 250,
},
251: LanguageInfo{
Name: "Nit",
FSName: "",
Type: TypeForString("programming"),
Color: "#009917",
Group: "",
Aliases: []string{},
Extensions: []string{
".nit",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.nit",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 251,
},
252: LanguageInfo{
Name: "Nix",
FSName: "",
Type: TypeForString("programming"),
Color: "#7e7eff",
Group: "",
Aliases: []string{
"nixos",
},
Extensions: []string{
".nix",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.nix",
AceMode: "nix",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 252,
},
253: LanguageInfo{
Name: "Nu",
FSName: "",
Type: TypeForString("programming"),
Color: "#c9df40",
Group: "",
Aliases: []string{
"nush",
},
Extensions: []string{
".nu",
},
Interpreters: []string{
"nush",
},
Filenames: []string{
"Nukefile",
},
MimeType: "text/x-scheme",
TMScope: "source.nu",
AceMode: "scheme",
CodeMirrorMode: "scheme",
Wrap: false,
LanguageID: 253,
},
254: LanguageInfo{
Name: "NumPy",
FSName: "",
Type: TypeForString("programming"),
Color: "#9C8AF9",
Group: "Python",
Aliases: []string{},
Extensions: []string{
".numpy",
".numpyw",
".numsc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-python",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 254,
},
461856962: LanguageInfo{
Name: "Nunjucks",
FSName: "",
Type: TypeForString("markup"),
Color: "#3d8137",
Group: "",
Aliases: []string{
"njk",
},
Extensions: []string{
".njk",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.nunjucks",
AceMode: "nunjucks",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 461856962,
},
255: LanguageInfo{
Name: "OCaml",
FSName: "",
Type: TypeForString("programming"),
Color: "#3be133",
Group: "",
Aliases: []string{},
Extensions: []string{
".ml",
".eliom",
".eliomi",
".ml4",
".mli",
".mll",
".mly",
},
Interpreters: []string{
"ocaml",
"ocamlrun",
"ocamlscript",
},
Filenames: []string{},
MimeType: "text/x-ocaml",
TMScope: "source.ocaml",
AceMode: "ocaml",
CodeMirrorMode: "mllike",
Wrap: false,
LanguageID: 255,
},
256: LanguageInfo{
Name: "ObjDump",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".objdump",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "objdump.x86asm",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 256,
},
985227236: LanguageInfo{
Name: "Object Data Instance Notation",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".odin",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.odin-ehr",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 985227236,
},
202735509: LanguageInfo{
Name: "ObjectScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#424893",
Group: "",
Aliases: []string{},
Extensions: []string{
".cls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.objectscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 202735509,
},
257: LanguageInfo{
Name: "Objective-C",
FSName: "",
Type: TypeForString("programming"),
Color: "#438eff",
Group: "",
Aliases: []string{
"obj-c",
"objc",
"objectivec",
},
Extensions: []string{
".m",
".h",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-objectivec",
TMScope: "source.objc",
AceMode: "objectivec",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 257,
},
258: LanguageInfo{
Name: "Objective-C++",
FSName: "",
Type: TypeForString("programming"),
Color: "#6866fb",
Group: "",
Aliases: []string{
"obj-c++",
"objc++",
"objectivec++",
},
Extensions: []string{
".mm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-objectivec",
TMScope: "source.objc++",
AceMode: "objectivec",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 258,
},
259: LanguageInfo{
Name: "Objective-J",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff0c5a",
Group: "",
Aliases: []string{
"obj-j",
"objectivej",
"objj",
},
Extensions: []string{
".j",
".sj",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.js.objj",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 259,
},
889244082: LanguageInfo{
Name: "Odin",
FSName: "",
Type: TypeForString("programming"),
Color: "#60AFFE",
Group: "",
Aliases: []string{
"odinlang",
"odin-lang",
},
Extensions: []string{
".odin",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.odin",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 889244082,
},
260: LanguageInfo{
Name: "Omgrofl",
FSName: "",
Type: TypeForString("programming"),
Color: "#cabbff",
Group: "",
Aliases: []string{},
Extensions: []string{
".omgrofl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 260,
},
261: LanguageInfo{
Name: "Opa",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".opa",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.opa",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 261,
},
262: LanguageInfo{
Name: "Opal",
FSName: "",
Type: TypeForString("programming"),
Color: "#f7ede0",
Group: "",
Aliases: []string{},
Extensions: []string{
".opal",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.opal",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 262,
},
840483232: LanguageInfo{
Name: "Open Policy Agent",
FSName: "",
Type: TypeForString("programming"),
Color: "#7d9199",
Group: "",
Aliases: []string{},
Extensions: []string{
".rego",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.rego",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 840483232,
},
263: LanguageInfo{
Name: "OpenCL",
FSName: "",
Type: TypeForString("programming"),
Color: "#ed2e2d",
Group: "C",
Aliases: []string{},
Extensions: []string{
".cl",
".opencl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 263,
},
264: LanguageInfo{
Name: "OpenEdge ABL",
FSName: "",
Type: TypeForString("programming"),
Color: "#5ce600",
Group: "",
Aliases: []string{
"progress",
"openedge",
"abl",
},
Extensions: []string{
".p",
".cls",
".w",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.abl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 264,
},
153739399: LanguageInfo{
Name: "OpenQASM",
FSName: "",
Type: TypeForString("programming"),
Color: "#AA70FF",
Group: "",
Aliases: []string{},
Extensions: []string{
".qasm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.qasm",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 153739399,
},
265: LanguageInfo{
Name: "OpenRC runscript",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Shell",
Aliases: []string{
"openrc",
},
Extensions: []string{},
Interpreters: []string{
"openrc-run",
},
Filenames: []string{},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 265,
},
266: LanguageInfo{
Name: "OpenSCAD",
FSName: "",
Type: TypeForString("programming"),
Color: "#e5cd45",
Group: "",
Aliases: []string{},
Extensions: []string{
".scad",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.scad",
AceMode: "scad",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 266,
},
598917541: LanguageInfo{
Name: "OpenStep Property List",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".plist",
".glyphs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.plist",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 598917541,
},
374317347: LanguageInfo{
Name: "OpenType Feature File",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"AFDKO",
},
Extensions: []string{
".fea",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.opentype",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 374317347,
},
267: LanguageInfo{
Name: "Org",
FSName: "",
Type: TypeForString("prose"),
Color: "#77aa99",
Group: "",
Aliases: []string{},
Extensions: []string{
".org",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 267,
},
268: LanguageInfo{
Name: "Ox",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ox",
".oxh",
".oxo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ox",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 268,
},
269: LanguageInfo{
Name: "Oxygene",
FSName: "",
Type: TypeForString("programming"),
Color: "#cdd0e3",
Group: "",
Aliases: []string{},
Extensions: []string{
".oxygene",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 269,
},
270: LanguageInfo{
Name: "Oz",
FSName: "",
Type: TypeForString("programming"),
Color: "#fab738",
Group: "",
Aliases: []string{},
Extensions: []string{
".oz",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-oz",
TMScope: "source.oz",
AceMode: "text",
CodeMirrorMode: "oz",
Wrap: false,
LanguageID: 270,
},
348895984: LanguageInfo{
Name: "P4",
FSName: "",
Type: TypeForString("programming"),
Color: "#7055b5",
Group: "",
Aliases: []string{},
Extensions: []string{
".p4",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.p4",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 348895984,
},
81442128: LanguageInfo{
Name: "PEG.js",
FSName: "",
Type: TypeForString("programming"),
Color: "#234d6b",
Group: "",
Aliases: []string{},
Extensions: []string{
".pegjs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/javascript",
TMScope: "source.pegjs",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 81442128,
},
272: LanguageInfo{
Name: "PHP",
FSName: "",
Type: TypeForString("programming"),
Color: "#4F5D95",
Group: "",
Aliases: []string{
"inc",
},
Extensions: []string{
".php",
".aw",
".ctp",
".fcgi",
".inc",
".php3",
".php4",
".php5",
".phps",
".phpt",
},
Interpreters: []string{
"php",
},
Filenames: []string{
".php",
".php_cs",
".php_cs.dist",
"Phakefile",
},
MimeType: "application/x-httpd-php",
TMScope: "text.html.php",
AceMode: "php",
CodeMirrorMode: "php",
Wrap: false,
LanguageID: 272,
},
273: LanguageInfo{
Name: "PLSQL",
FSName: "",
Type: TypeForString("programming"),
Color: "#dad8d8",
Group: "",
Aliases: []string{},
Extensions: []string{
".pls",
".bdy",
".ddl",
".fnc",
".pck",
".pkb",
".pks",
".plb",
".plsql",
".prc",
".spc",
".sql",
".tpb",
".tps",
".trg",
".vw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-plsql",
TMScope: "none",
AceMode: "sql",
CodeMirrorMode: "sql",
Wrap: false,
LanguageID: 273,
},
274: LanguageInfo{
Name: "PLpgSQL",
FSName: "",
Type: TypeForString("programming"),
Color: "#336790",
Group: "",
Aliases: []string{},
Extensions: []string{
".pgsql",
".sql",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sql",
TMScope: "source.sql",
AceMode: "pgsql",
CodeMirrorMode: "sql",
Wrap: false,
LanguageID: 274,
},
275: LanguageInfo{
Name: "POV-Ray SDL",
FSName: "",
Type: TypeForString("programming"),
Color: "#6bac65",
Group: "",
Aliases: []string{
"pov-ray",
"povray",
},
Extensions: []string{
".pov",
".inc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pov-ray sdl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 275,
},
276: LanguageInfo{
Name: "Pan",
FSName: "",
Type: TypeForString("programming"),
Color: "#cc0000",
Group: "",
Aliases: []string{},
Extensions: []string{
".pan",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pan",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 276,
},
277: LanguageInfo{
Name: "Papyrus",
FSName: "",
Type: TypeForString("programming"),
Color: "#6600cc",
Group: "",
Aliases: []string{},
Extensions: []string{
".psc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.papyrus.skyrim",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 277,
},
278: LanguageInfo{
Name: "Parrot",
FSName: "",
Type: TypeForString("programming"),
Color: "#f3ca0a",
Group: "",
Aliases: []string{},
Extensions: []string{
".parrot",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 278,
},
279: LanguageInfo{
Name: "Parrot Assembly",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Parrot",
Aliases: []string{
"pasm",
},
Extensions: []string{
".pasm",
},
Interpreters: []string{
"parrot",
},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 279,
},
280: LanguageInfo{
Name: "Parrot Internal Representation",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Parrot",
Aliases: []string{
"pir",
},
Extensions: []string{
".pir",
},
Interpreters: []string{
"parrot",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.parrot.pir",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 280,
},
281: LanguageInfo{
Name: "Pascal",
FSName: "",
Type: TypeForString("programming"),
Color: "#E3F171",
Group: "",
Aliases: []string{
"delphi",
"objectpascal",
},
Extensions: []string{
".pas",
".dfm",
".dpr",
".inc",
".lpr",
".pascal",
".pp",
},
Interpreters: []string{
"instantfpc",
},
Filenames: []string{},
MimeType: "text/x-pascal",
TMScope: "source.pascal",
AceMode: "pascal",
CodeMirrorMode: "pascal",
Wrap: false,
LanguageID: 281,
},
271: LanguageInfo{
Name: "Pawn",
FSName: "",
Type: TypeForString("programming"),
Color: "#dbb284",
Group: "",
Aliases: []string{},
Extensions: []string{
".pwn",
".inc",
".sma",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pawn",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 271,
},
840372442: LanguageInfo{
Name: "Pep8",
FSName: "",
Type: TypeForString("programming"),
Color: "#C76F5B",
Group: "",
Aliases: []string{},
Extensions: []string{
".pep",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pep8",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 840372442,
},
282: LanguageInfo{
Name: "Perl",
FSName: "",
Type: TypeForString("programming"),
Color: "#0298c3",
Group: "",
Aliases: []string{
"cperl",
},
Extensions: []string{
".pl",
".al",
".cgi",
".fcgi",
".perl",
".ph",
".plx",
".pm",
".psgi",
".t",
},
Interpreters: []string{
"cperl",
"perl",
},
Filenames: []string{
"Makefile.PL",
"Rexfile",
"ack",
"cpanfile",
},
MimeType: "text/x-perl",
TMScope: "source.perl",
AceMode: "perl",
CodeMirrorMode: "perl",
Wrap: false,
LanguageID: 282,
},
425: LanguageInfo{
Name: "Pic",
FSName: "",
Type: TypeForString("markup"),
Color: "",
Group: "Roff",
Aliases: []string{},
Extensions: []string{
".pic",
".chem",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/troff",
TMScope: "source.pic",
AceMode: "text",
CodeMirrorMode: "troff",
Wrap: false,
LanguageID: 425,
},
284: LanguageInfo{
Name: "Pickle",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pkl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 284,
},
285: LanguageInfo{
Name: "PicoLisp",
FSName: "",
Type: TypeForString("programming"),
Color: "#6067af",
Group: "",
Aliases: []string{},
Extensions: []string{
".l",
},
Interpreters: []string{
"picolisp",
"pil",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 285,
},
286: LanguageInfo{
Name: "PigLatin",
FSName: "",
Type: TypeForString("programming"),
Color: "#fcd7de",
Group: "",
Aliases: []string{},
Extensions: []string{
".pig",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pig_latin",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 286,
},
287: LanguageInfo{
Name: "Pike",
FSName: "",
Type: TypeForString("programming"),
Color: "#005390",
Group: "",
Aliases: []string{},
Extensions: []string{
".pike",
".pmod",
},
Interpreters: []string{
"pike",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.pike",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 287,
},
833504686: LanguageInfo{
Name: "PlantUML",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".puml",
".iuml",
".plantuml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.wsd",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 833504686,
},
288: LanguageInfo{
Name: "Pod",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pod",
},
Interpreters: []string{
"perl",
},
Filenames: []string{},
MimeType: "text/x-perl",
TMScope: "none",
AceMode: "perl",
CodeMirrorMode: "perl",
Wrap: true,
LanguageID: 288,
},
155357471: LanguageInfo{
Name: "Pod 6",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pod",
".pod6",
},
Interpreters: []string{
"perl6",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.raku",
AceMode: "perl",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 155357471,
},
289: LanguageInfo{
Name: "PogoScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#d80074",
Group: "",
Aliases: []string{},
Extensions: []string{
".pogo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pogoscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 289,
},
290: LanguageInfo{
Name: "Pony",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pony",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.pony",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 290,
},
262764437: LanguageInfo{
Name: "PostCSS",
FSName: "",
Type: TypeForString("markup"),
Color: "#dc3a0c",
Group: "CSS",
Aliases: []string{},
Extensions: []string{
".pcss",
".postcss",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.postcss",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 262764437,
},
291: LanguageInfo{
Name: "PostScript",
FSName: "",
Type: TypeForString("markup"),
Color: "#da291c",
Group: "",
Aliases: []string{
"postscr",
},
Extensions: []string{
".ps",
".eps",
".epsi",
".pfa",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.postscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 291,
},
292: LanguageInfo{
Name: "PowerBuilder",
FSName: "",
Type: TypeForString("programming"),
Color: "#8f0f8d",
Group: "",
Aliases: []string{},
Extensions: []string{
".pbt",
".sra",
".sru",
".srw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 292,
},
293: LanguageInfo{
Name: "PowerShell",
FSName: "",
Type: TypeForString("programming"),
Color: "#012456",
Group: "",
Aliases: []string{
"posh",
"pwsh",
},
Extensions: []string{
".ps1",
".psd1",
".psm1",
},
Interpreters: []string{
"pwsh",
},
Filenames: []string{},
MimeType: "application/x-powershell",
TMScope: "source.powershell",
AceMode: "powershell",
CodeMirrorMode: "powershell",
Wrap: false,
LanguageID: 293,
},
499933428: LanguageInfo{
Name: "Prisma",
FSName: "",
Type: TypeForString("data"),
Color: "#0c344b",
Group: "",
Aliases: []string{},
Extensions: []string{
".prisma",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.prisma",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 499933428,
},
294: LanguageInfo{
Name: "Processing",
FSName: "",
Type: TypeForString("programming"),
Color: "#0096D8",
Group: "",
Aliases: []string{},
Extensions: []string{
".pde",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.processing",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 294,
},
305313959: LanguageInfo{
Name: "Procfile",
FSName: "",
Type: TypeForString("programming"),
Color: "#3B2F63",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"Procfile",
},
MimeType: "",
TMScope: "source.procfile",
AceMode: "batchfile",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 305313959,
},
716513858: LanguageInfo{
Name: "Proguard",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pro",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 716513858,
},
295: LanguageInfo{
Name: "Prolog",
FSName: "",
Type: TypeForString("programming"),
Color: "#74283c",
Group: "",
Aliases: []string{},
Extensions: []string{
".pl",
".pro",
".prolog",
".yap",
},
Interpreters: []string{
"swipl",
"yap",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.prolog",
AceMode: "prolog",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 295,
},
441858312: LanguageInfo{
Name: "Promela",
FSName: "",
Type: TypeForString("programming"),
Color: "#de0000",
Group: "",
Aliases: []string{},
Extensions: []string{
".pml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.promela",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 441858312,
},
296: LanguageInfo{
Name: "Propeller Spin",
FSName: "",
Type: TypeForString("programming"),
Color: "#7fa2a7",
Group: "",
Aliases: []string{},
Extensions: []string{
".spin",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.spin",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 296,
},
297: LanguageInfo{
Name: "Protocol Buffer",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"protobuf",
"Protocol Buffers",
},
Extensions: []string{
".proto",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-protobuf",
TMScope: "source.proto",
AceMode: "protobuf",
CodeMirrorMode: "protobuf",
Wrap: false,
LanguageID: 297,
},
436568854: LanguageInfo{
Name: "Protocol Buffer Text Format",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"text proto",
"protobuf text format",
},
Extensions: []string{
".textproto",
".pbt",
".pbtxt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.textproto",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 436568854,
},
298: LanguageInfo{
Name: "Public Key",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".asc",
".pub",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/pgp",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "asciiarmor",
Wrap: false,
LanguageID: 298,
},
179: LanguageInfo{
Name: "Pug",
FSName: "",
Type: TypeForString("markup"),
Color: "#a86454",
Group: "",
Aliases: []string{},
Extensions: []string{
".jade",
".pug",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-pug",
TMScope: "text.jade",
AceMode: "jade",
CodeMirrorMode: "pug",
Wrap: false,
LanguageID: 179,
},
299: LanguageInfo{
Name: "Puppet",
FSName: "",
Type: TypeForString("programming"),
Color: "#302B6D",
Group: "",
Aliases: []string{},
Extensions: []string{
".pp",
},
Interpreters: []string{},
Filenames: []string{
"Modulefile",
},
MimeType: "text/x-puppet",
TMScope: "source.puppet",
AceMode: "text",
CodeMirrorMode: "puppet",
Wrap: false,
LanguageID: 299,
},
300: LanguageInfo{
Name: "Pure Data",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 300,
},
301: LanguageInfo{
Name: "PureBasic",
FSName: "",
Type: TypeForString("programming"),
Color: "#5a6986",
Group: "",
Aliases: []string{},
Extensions: []string{
".pb",
".pbi",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 301,
},
302: LanguageInfo{
Name: "PureScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#1D222D",
Group: "",
Aliases: []string{},
Extensions: []string{
".purs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-haskell",
TMScope: "source.purescript",
AceMode: "haskell",
CodeMirrorMode: "haskell",
Wrap: false,
LanguageID: 302,
},
303: LanguageInfo{
Name: "Python",
FSName: "",
Type: TypeForString("programming"),
Color: "#3572A5",
Group: "",
Aliases: []string{
"python3",
"rusthon",
},
Extensions: []string{
".py",
".cgi",
".fcgi",
".gyp",
".gypi",
".lmi",
".py3",
".pyde",
".pyi",
".pyp",
".pyt",
".pyw",
".rpy",
".smk",
".spec",
".tac",
".wsgi",
".xpy",
},
Interpreters: []string{
"python",
"python2",
"python3",
},
Filenames: []string{
".gclient",
"DEPS",
"SConscript",
"SConstruct",
"Snakefile",
"wscript",
},
MimeType: "text/x-python",
TMScope: "source.python",
AceMode: "python",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 303,
},
428: LanguageInfo{
Name: "Python console",
FSName: "",
Type: TypeForString("programming"),
Color: "#3572A5",
Group: "Python",
Aliases: []string{
"pycon",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.python.console",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 428,
},
304: LanguageInfo{
Name: "Python traceback",
FSName: "",
Type: TypeForString("data"),
Color: "#3572A5",
Group: "Python",
Aliases: []string{},
Extensions: []string{
".pytb",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.python.traceback",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 304,
},
697448245: LanguageInfo{
Name: "Q#",
FSName: "",
Type: TypeForString("programming"),
Color: "#fed659",
Group: "",
Aliases: []string{
"qsharp",
},
Extensions: []string{
".qs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.qsharp",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 697448245,
},
305: LanguageInfo{
Name: "QML",
FSName: "",
Type: TypeForString("programming"),
Color: "#44a51c",
Group: "",
Aliases: []string{},
Extensions: []string{
".qml",
".qbs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.qml",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 305,
},
306: LanguageInfo{
Name: "QMake",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".pro",
".pri",
},
Interpreters: []string{
"qmake",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.qmake",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 306,
},
558193693: LanguageInfo{
Name: "Qt Script",
FSName: "",
Type: TypeForString("programming"),
Color: "#00b841",
Group: "",
Aliases: []string{},
Extensions: []string{
".qs",
},
Interpreters: []string{},
Filenames: []string{
"installscript.qs",
"toolchain_installscript.qs",
},
MimeType: "text/javascript",
TMScope: "source.js",
AceMode: "javascript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 558193693,
},
375265331: LanguageInfo{
Name: "Quake",
FSName: "",
Type: TypeForString("programming"),
Color: "#882233",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"m3makefile",
"m3overrides",
},
MimeType: "",
TMScope: "source.quake",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 375265331,
},
307: LanguageInfo{
Name: "R",
FSName: "",
Type: TypeForString("programming"),
Color: "#198CE7",
Group: "",
Aliases: []string{
"R",
"Rscript",
"splus",
},
Extensions: []string{
".r",
".rd",
".rsx",
},
Interpreters: []string{
"Rscript",
},
Filenames: []string{
".Rprofile",
"expr-dist",
},
MimeType: "text/x-rsrc",
TMScope: "source.r",
AceMode: "r",
CodeMirrorMode: "r",
Wrap: false,
LanguageID: 307,
},
308: LanguageInfo{
Name: "RAML",
FSName: "",
Type: TypeForString("markup"),
Color: "#77d9fb",
Group: "",
Aliases: []string{},
Extensions: []string{
".raml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.yaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 308,
},
309: LanguageInfo{
Name: "RDoc",
FSName: "",
Type: TypeForString("prose"),
Color: "#701516",
Group: "",
Aliases: []string{},
Extensions: []string{
".rdoc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.rdoc",
AceMode: "rdoc",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 309,
},
310: LanguageInfo{
Name: "REALbasic",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".rbbas",
".rbfrm",
".rbmnu",
".rbres",
".rbtbar",
".rbuistate",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.vbnet",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 310,
},
311: LanguageInfo{
Name: "REXX",
FSName: "",
Type: TypeForString("programming"),
Color: "#d90e09",
Group: "",
Aliases: []string{
"arexx",
},
Extensions: []string{
".rexx",
".pprx",
".rex",
},
Interpreters: []string{
"regina",
"rexx",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.rexx",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 311,
},
313: LanguageInfo{
Name: "RMarkdown",
FSName: "",
Type: TypeForString("prose"),
Color: "#198ce7",
Group: "",
Aliases: []string{},
Extensions: []string{
".rmd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-gfm",
TMScope: "source.gfm",
AceMode: "markdown",
CodeMirrorMode: "gfm",
Wrap: true,
LanguageID: 313,
},
1031374237: LanguageInfo{
Name: "RPC",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"rpcgen",
"oncrpc",
"xdr",
},
Extensions: []string{
".x",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1031374237,
},
609977990: LanguageInfo{
Name: "RPGLE",
FSName: "",
Type: TypeForString("programming"),
Color: "#2BDE21",
Group: "",
Aliases: []string{
"ile rpg",
"sqlrpgle",
},
Extensions: []string{
".rpgle",
".sqlrpgle",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.rpgle",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 609977990,
},
314: LanguageInfo{
Name: "RPM Spec",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"specfile",
},
Extensions: []string{
".spec",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-rpm-spec",
TMScope: "source.rpm-spec",
AceMode: "text",
CodeMirrorMode: "rpm",
Wrap: false,
LanguageID: 314,
},
315: LanguageInfo{
Name: "RUNOFF",
FSName: "",
Type: TypeForString("markup"),
Color: "#665a4e",
Group: "",
Aliases: []string{},
Extensions: []string{
".rnh",
".rno",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.runoff",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 315,
},
316: LanguageInfo{
Name: "Racket",
FSName: "",
Type: TypeForString("programming"),
Color: "#3c5caa",
Group: "",
Aliases: []string{},
Extensions: []string{
".rkt",
".rktd",
".rktl",
".scrbl",
},
Interpreters: []string{
"racket",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.racket",
AceMode: "lisp",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 316,
},
317: LanguageInfo{
Name: "Ragel",
FSName: "",
Type: TypeForString("programming"),
Color: "#9d5200",
Group: "",
Aliases: []string{
"ragel-rb",
"ragel-ruby",
},
Extensions: []string{
".rl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 317,
},
283: LanguageInfo{
Name: "Raku",
FSName: "",
Type: TypeForString("programming"),
Color: "#0000fb",
Group: "",
Aliases: []string{
"perl6",
"perl-6",
},
Extensions: []string{
".6pl",
".6pm",
".nqp",
".p6",
".p6l",
".p6m",
".pl",
".pl6",
".pm",
".pm6",
".raku",
".rakumod",
".t",
},
Interpreters: []string{
"perl6",
"raku",
"rakudo",
},
Filenames: []string{},
MimeType: "text/x-perl",
TMScope: "source.raku",
AceMode: "perl",
CodeMirrorMode: "perl",
Wrap: false,
LanguageID: 283,
},
173616037: LanguageInfo{
Name: "Rascal",
FSName: "",
Type: TypeForString("programming"),
Color: "#fffaa0",
Group: "",
Aliases: []string{},
Extensions: []string{
".rsc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.rascal",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 173616037,
},
318: LanguageInfo{
Name: "Raw token data",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"raw",
},
Extensions: []string{
".raw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 318,
},
501875647: LanguageInfo{
Name: "ReScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#ed5051",
Group: "",
Aliases: []string{},
Extensions: []string{
".res",
},
Interpreters: []string{
"ocaml",
},
Filenames: []string{},
MimeType: "text/x-rustsrc",
TMScope: "source.rescript",
AceMode: "rust",
CodeMirrorMode: "rust",
Wrap: false,
LanguageID: 501875647,
},
538732839: LanguageInfo{
Name: "Readline Config",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "INI",
Aliases: []string{
"inputrc",
"readline",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".inputrc",
"inputrc",
},
MimeType: "",
TMScope: "source.inputrc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 538732839,
},
869538413: LanguageInfo{
Name: "Reason",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff5847",
Group: "",
Aliases: []string{},
Extensions: []string{
".re",
".rei",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-rustsrc",
TMScope: "source.reason",
AceMode: "rust",
CodeMirrorMode: "rust",
Wrap: false,
LanguageID: 869538413,
},
319: LanguageInfo{
Name: "Rebol",
FSName: "",
Type: TypeForString("programming"),
Color: "#358a5b",
Group: "",
Aliases: []string{},
Extensions: []string{
".reb",
".r",
".r2",
".r3",
".rebol",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.rebol",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 319,
},
865765202: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("data"),
Color: "#0673ba",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"language-subtag-registry.txt",
},
MimeType: "text/x-properties",
TMScope: "source.record-jar",
AceMode: "text",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 865765202,
},
320: LanguageInfo{
Name: "Red",
FSName: "",
Type: TypeForString("programming"),
Color: "#f50000",
Group: "",
Aliases: []string{
"red/system",
},
Extensions: []string{
".red",
".reds",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.red",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 320,
},
321: LanguageInfo{
Name: "Redcode",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".cw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 321,
},
1020148948: LanguageInfo{
Name: "Redirect Rules",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"redirects",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"_redirects",
},
MimeType: "",
TMScope: "source.redirects",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1020148948,
},
363378884: LanguageInfo{
Name: "Regular Expression",
FSName: "",
Type: TypeForString("data"),
Color: "#009a00",
Group: "",
Aliases: []string{
"regexp",
"regex",
},
Extensions: []string{
".regexp",
".regex",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.regexp",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 363378884,
},
322: LanguageInfo{
Name: "Ren'Py",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff7f7f",
Group: "",
Aliases: []string{
"renpy",
},
Extensions: []string{
".rpy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.renpy",
AceMode: "python",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 322,
},
323: LanguageInfo{
Name: "RenderScript",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".rs",
".rsh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 323,
},
51601661: LanguageInfo{
Name: "Rich Text Format",
FSName: "",
Type: TypeForString("markup"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".rtf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.rtf",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 51601661,
},
431: LanguageInfo{
Name: "Ring",
FSName: "",
Type: TypeForString("programming"),
Color: "#2D54CB",
Group: "",
Aliases: []string{},
Extensions: []string{
".ring",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ring",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 431,
},
878396783: LanguageInfo{
Name: "Riot",
FSName: "",
Type: TypeForString("markup"),
Color: "#A71E49",
Group: "",
Aliases: []string{},
Extensions: []string{
".riot",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.riot",
AceMode: "html",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 878396783,
},
324: LanguageInfo{
Name: "RobotFramework",
FSName: "",
Type: TypeForString("programming"),
Color: "#00c0b5",
Group: "",
Aliases: []string{},
Extensions: []string{
".robot",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.robot",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 324,
},
141: LanguageInfo{
Name: "Roff",
FSName: "",
Type: TypeForString("markup"),
Color: "#ecdebe",
Group: "",
Aliases: []string{
"groff",
"man",
"manpage",
"man page",
"man-page",
"mdoc",
"nroff",
"troff",
},
Extensions: []string{
".roff",
".1",
".1in",
".1m",
".1x",
".2",
".3",
".3in",
".3m",
".3p",
".3pm",
".3qt",
".3x",
".4",
".5",
".6",
".7",
".8",
".9",
".l",
".man",
".mdoc",
".me",
".ms",
".n",
".nr",
".rno",
".tmac",
},
Interpreters: []string{},
Filenames: []string{
"eqnrc",
"mmn",
"mmt",
"troffrc",
"troffrc-end",
},
MimeType: "text/troff",
TMScope: "text.roff",
AceMode: "text",
CodeMirrorMode: "troff",
Wrap: true,
LanguageID: 141,
},
612669833: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("markup"),
Color: "#ecdebe",
Group: "Roff",
Aliases: []string{},
Extensions: []string{
".1",
".1in",
".1m",
".1x",
".2",
".3",
".3in",
".3m",
".3p",
".3pm",
".3qt",
".3x",
".4",
".5",
".6",
".7",
".8",
".9",
".man",
".mdoc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/troff",
TMScope: "text.roff",
AceMode: "text",
CodeMirrorMode: "troff",
Wrap: true,
LanguageID: 612669833,
},
325: LanguageInfo{
Name: "Rouge",
FSName: "",
Type: TypeForString("programming"),
Color: "#cc0088",
Group: "",
Aliases: []string{},
Extensions: []string{
".rg",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-clojure",
TMScope: "source.clojure",
AceMode: "clojure",
CodeMirrorMode: "clojure",
Wrap: false,
LanguageID: 325,
},
326: LanguageInfo{
Name: "Ruby",
FSName: "",
Type: TypeForString("programming"),
Color: "#701516",
Group: "",
Aliases: []string{
"jruby",
"macruby",
"rake",
"rb",
"rbx",
},
Extensions: []string{
".rb",
".builder",
".eye",
".fcgi",
".gemspec",
".god",
".jbuilder",
".mspec",
".pluginspec",
".podspec",
".prawn",
".rabl",
".rake",
".rbi",
".rbuild",
".rbw",
".rbx",
".ru",
".ruby",
".spec",
".thor",
".watchr",
},
Interpreters: []string{
"ruby",
"macruby",
"rake",
"jruby",
"rbx",
},
Filenames: []string{
".irbrc",
".pryrc",
".simplecov",
"Appraisals",
"Berksfile",
"Brewfile",
"Buildfile",
"Capfile",
"Dangerfile",
"Deliverfile",
"Fastfile",
"Gemfile",
"Guardfile",
"Jarfile",
"Mavenfile",
"Podfile",
"Puppetfile",
"Rakefile",
"Snapfile",
"Steepfile",
"Thorfile",
"Vagrantfile",
"buildfile",
},
MimeType: "text/x-ruby",
TMScope: "source.ruby",
AceMode: "ruby",
CodeMirrorMode: "ruby",
Wrap: false,
LanguageID: 326,
},
327: LanguageInfo{
Name: "Rust",
FSName: "",
Type: TypeForString("programming"),
Color: "#dea584",
Group: "",
Aliases: []string{
"rs",
},
Extensions: []string{
".rs",
".rs.in",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-rustsrc",
TMScope: "source.rust",
AceMode: "rust",
CodeMirrorMode: "rust",
Wrap: false,
LanguageID: 327,
},
328: LanguageInfo{
Name: "SAS",
FSName: "",
Type: TypeForString("programming"),
Color: "#B34936",
Group: "",
Aliases: []string{},
Extensions: []string{
".sas",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sas",
TMScope: "source.sas",
AceMode: "text",
CodeMirrorMode: "sas",
Wrap: false,
LanguageID: 328,
},
329: LanguageInfo{
Name: "SCSS",
FSName: "",
Type: TypeForString("markup"),
Color: "#c6538c",
Group: "",
Aliases: []string{},
Extensions: []string{
".scss",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-scss",
TMScope: "source.css.scss",
AceMode: "scss",
CodeMirrorMode: "css",
Wrap: false,
LanguageID: 329,
},
880010326: LanguageInfo{
Name: "SELinux Policy",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"SELinux Kernel Policy Language",
"sepolicy",
},
Extensions: []string{
".te",
},
Interpreters: []string{},
Filenames: []string{
"file_contexts",
"genfs_contexts",
"initial_sids",
"port_contexts",
"security_classes",
},
MimeType: "",
TMScope: "source.sepolicy",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 880010326,
},
330: LanguageInfo{
Name: "SMT",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".smt2",
".smt",
},
Interpreters: []string{
"boolector",
"cvc4",
"mathsat5",
"opensmt",
"smtinterpol",
"smt-rat",
"stp",
"verit",
"yices2",
"z3",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.smt",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 330,
},
331: LanguageInfo{
Name: "SPARQL",
FSName: "",
Type: TypeForString("data"),
Color: "#0C4597",
Group: "",
Aliases: []string{},
Extensions: []string{
".sparql",
".rq",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/sparql-query",
TMScope: "source.sparql",
AceMode: "text",
CodeMirrorMode: "sparql",
Wrap: false,
LanguageID: 331,
},
332: LanguageInfo{
Name: "SQF",
FSName: "",
Type: TypeForString("programming"),
Color: "#3F3F3F",
Group: "",
Aliases: []string{},
Extensions: []string{
".sqf",
".hqf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.sqf",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 332,
},
333: LanguageInfo{
Name: "SQL",
FSName: "",
Type: TypeForString("data"),
Color: "#e38c00",
Group: "",
Aliases: []string{},
Extensions: []string{
".sql",
".cql",
".ddl",
".inc",
".mysql",
".prc",
".tab",
".udf",
".viw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sql",
TMScope: "source.sql",
AceMode: "sql",
CodeMirrorMode: "sql",
Wrap: false,
LanguageID: 333,
},
334: LanguageInfo{
Name: "SQLPL",
FSName: "",
Type: TypeForString("programming"),
Color: "#e38c00",
Group: "",
Aliases: []string{},
Extensions: []string{
".sql",
".db2",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sql",
TMScope: "source.sql",
AceMode: "sql",
CodeMirrorMode: "sql",
Wrap: false,
LanguageID: 334,
},
335: LanguageInfo{
Name: "SRecode Template",
FSName: "",
Type: TypeForString("markup"),
Color: "#348a34",
Group: "",
Aliases: []string{},
Extensions: []string{
".srt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.lisp",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 335,
},
554920715: LanguageInfo{
Name: "SSH Config",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "INI",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"ssh-config",
"ssh_config",
"sshconfig",
"sshconfig.snip",
"sshd-config",
"sshd_config",
},
MimeType: "",
TMScope: "source.ssh-config",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 554920715,
},
336: LanguageInfo{
Name: "STON",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "Smalltalk",
Aliases: []string{},
Extensions: []string{
".ston",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.smalltalk",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 336,
},
337: LanguageInfo{
Name: "SVG",
FSName: "",
Type: TypeForString("data"),
Color: "#ff9900",
Group: "",
Aliases: []string{},
Extensions: []string{
".svg",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml.svg",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 337,
},
1066250075: LanguageInfo{
Name: "SWIG",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".i",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.c++",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 1066250075,
},
338: LanguageInfo{
Name: "Sage",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".sage",
".sagews",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-python",
TMScope: "source.python",
AceMode: "python",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 338,
},
339: LanguageInfo{
Name: "SaltStack",
FSName: "",
Type: TypeForString("programming"),
Color: "#646464",
Group: "",
Aliases: []string{
"saltstate",
"salt",
},
Extensions: []string{
".sls",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.yaml.salt",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 339,
},
340: LanguageInfo{
Name: "Sass",
FSName: "",
Type: TypeForString("markup"),
Color: "#a53b70",
Group: "",
Aliases: []string{},
Extensions: []string{
".sass",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sass",
TMScope: "source.sass",
AceMode: "sass",
CodeMirrorMode: "sass",
Wrap: false,
LanguageID: 340,
},
341: LanguageInfo{
Name: "Scala",
FSName: "",
Type: TypeForString("programming"),
Color: "#c22d40",
Group: "",
Aliases: []string{},
Extensions: []string{
".scala",
".kojo",
".sbt",
".sc",
},
Interpreters: []string{
"scala",
},
Filenames: []string{},
MimeType: "text/x-scala",
TMScope: "source.scala",
AceMode: "scala",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 341,
},
342: LanguageInfo{
Name: "Scaml",
FSName: "",
Type: TypeForString("markup"),
Color: "#bd181a",
Group: "",
Aliases: []string{},
Extensions: []string{
".scaml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.scaml",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 342,
},
343: LanguageInfo{
Name: "Scheme",
FSName: "",
Type: TypeForString("programming"),
Color: "#1e4aec",
Group: "",
Aliases: []string{},
Extensions: []string{
".scm",
".sch",
".sld",
".sls",
".sps",
".ss",
},
Interpreters: []string{
"scheme",
"guile",
"bigloo",
"chicken",
"csi",
"gosh",
"r6rs",
},
Filenames: []string{},
MimeType: "text/x-scheme",
TMScope: "source.scheme",
AceMode: "scheme",
CodeMirrorMode: "scheme",
Wrap: false,
LanguageID: 343,
},
344: LanguageInfo{
Name: "Scilab",
FSName: "",
Type: TypeForString("programming"),
Color: "#ca0f21",
Group: "",
Aliases: []string{},
Extensions: []string{
".sci",
".sce",
".tst",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.scilab",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 344,
},
345: LanguageInfo{
Name: "Self",
FSName: "",
Type: TypeForString("programming"),
Color: "#0579aa",
Group: "",
Aliases: []string{},
Extensions: []string{
".self",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 345,
},
664257356: LanguageInfo{
Name: "ShaderLab",
FSName: "",
Type: TypeForString("programming"),
Color: "#222c37",
Group: "",
Aliases: []string{},
Extensions: []string{
".shader",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.shaderlab",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 664257356,
},
346: LanguageInfo{
Name: "Shell",
FSName: "",
Type: TypeForString("programming"),
Color: "#89e051",
Group: "",
Aliases: []string{
"sh",
"shell-script",
"bash",
"zsh",
},
Extensions: []string{
".sh",
".bash",
".bats",
".cgi",
".command",
".env",
".fcgi",
".ksh",
".sh.in",
".tmux",
".tool",
".zsh",
".zsh-theme",
},
Interpreters: []string{
"ash",
"bash",
"dash",
"ksh",
"mksh",
"pdksh",
"rc",
"sh",
"zsh",
},
Filenames: []string{
".bash_aliases",
".bash_history",
".bash_logout",
".bash_profile",
".bashrc",
".cshrc",
".env",
".env.example",
".flaskenv",
".kshrc",
".login",
".profile",
".zlogin",
".zlogout",
".zprofile",
".zshenv",
".zshrc",
"9fs",
"PKGBUILD",
"bash_aliases",
"bash_logout",
"bash_profile",
"bashrc",
"cshrc",
"gradlew",
"kshrc",
"login",
"man",
"profile",
"zlogin",
"zlogout",
"zprofile",
"zshenv",
"zshrc",
},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 346,
},
687511714: LanguageInfo{
Name: "ShellCheck Config",
FSName: "",
Type: TypeForString("data"),
Color: "#cecfcb",
Group: "",
Aliases: []string{
"shellcheckrc",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".shellcheckrc",
},
MimeType: "text/x-properties",
TMScope: "source.shellcheckrc",
AceMode: "ini",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 687511714,
},
347: LanguageInfo{
Name: "ShellSession",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"bash session",
"console",
},
Extensions: []string{
".sh-session",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-sh",
TMScope: "text.shell-session",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 347,
},
348: LanguageInfo{
Name: "Shen",
FSName: "",
Type: TypeForString("programming"),
Color: "#120F14",
Group: "",
Aliases: []string{},
Extensions: []string{
".shen",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.shen",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 348,
},
208976687: LanguageInfo{
Name: "Sieve",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".sieve",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/sieve",
TMScope: "source.sieve",
AceMode: "text",
CodeMirrorMode: "sieve",
Wrap: false,
LanguageID: 208976687,
},
987024632: LanguageInfo{
Name: "Singularity",
FSName: "",
Type: TypeForString("programming"),
Color: "#64E6AD",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"Singularity",
},
MimeType: "",
TMScope: "source.singularity",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 987024632,
},
349: LanguageInfo{
Name: "Slash",
FSName: "",
Type: TypeForString("programming"),
Color: "#007eff",
Group: "",
Aliases: []string{},
Extensions: []string{
".sl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.slash",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 349,
},
894641667: LanguageInfo{
Name: "Slice",
FSName: "",
Type: TypeForString("programming"),
Color: "#003fa2",
Group: "",
Aliases: []string{},
Extensions: []string{
".ice",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.slice",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 894641667,
},
350: LanguageInfo{
Name: "Slim",
FSName: "",
Type: TypeForString("markup"),
Color: "#2b2b2b",
Group: "",
Aliases: []string{},
Extensions: []string{
".slim",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-slim",
TMScope: "text.slim",
AceMode: "text",
CodeMirrorMode: "slim",
Wrap: false,
LanguageID: 350,
},
164123055: LanguageInfo{
Name: "SmPL",
FSName: "",
Type: TypeForString("programming"),
Color: "#c94949",
Group: "",
Aliases: []string{
"coccinelle",
},
Extensions: []string{
".cocci",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.smpl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 164123055,
},
351: LanguageInfo{
Name: "Smali",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".smali",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.smali",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 351,
},
352: LanguageInfo{
Name: "Smalltalk",
FSName: "",
Type: TypeForString("programming"),
Color: "#596706",
Group: "",
Aliases: []string{
"squeak",
},
Extensions: []string{
".st",
".cs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-stsrc",
TMScope: "source.smalltalk",
AceMode: "text",
CodeMirrorMode: "smalltalk",
Wrap: false,
LanguageID: 352,
},
353: LanguageInfo{
Name: "Smarty",
FSName: "",
Type: TypeForString("programming"),
Color: "#f0c040",
Group: "",
Aliases: []string{},
Extensions: []string{
".tpl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-smarty",
TMScope: "text.html.smarty",
AceMode: "smarty",
CodeMirrorMode: "smarty",
Wrap: false,
LanguageID: 353,
},
237469032: LanguageInfo{
Name: "Solidity",
FSName: "",
Type: TypeForString("programming"),
Color: "#AA6746",
Group: "",
Aliases: []string{},
Extensions: []string{
".sol",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.solidity",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 237469032,
},
222900098: LanguageInfo{
Name: "Soong",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"Android.bp",
},
MimeType: "",
TMScope: "source.bp",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 222900098,
},
354: LanguageInfo{
Name: "SourcePawn",
FSName: "",
Type: TypeForString("programming"),
Color: "#f69e1d",
Group: "",
Aliases: []string{
"sourcemod",
},
Extensions: []string{
".sp",
".inc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.sourcepawn",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 354,
},
767169629: LanguageInfo{
Name: "Spline Font Database",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".sfd",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.sfd",
AceMode: "yaml",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 767169629,
},
355: LanguageInfo{
Name: "Squirrel",
FSName: "",
Type: TypeForString("programming"),
Color: "#800000",
Group: "",
Aliases: []string{},
Extensions: []string{
".nut",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-c++src",
TMScope: "source.nut",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 355,
},
356: LanguageInfo{
Name: "Stan",
FSName: "",
Type: TypeForString("programming"),
Color: "#b2011d",
Group: "",
Aliases: []string{},
Extensions: []string{
".stan",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.stan",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 356,
},
357: LanguageInfo{
Name: "Standard ML",
FSName: "",
Type: TypeForString("programming"),
Color: "#dc566d",
Group: "",
Aliases: []string{
"sml",
},
Extensions: []string{
".ml",
".fun",
".sig",
".sml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-ocaml",
TMScope: "source.ml",
AceMode: "text",
CodeMirrorMode: "mllike",
Wrap: false,
LanguageID: 357,
},
960266174: LanguageInfo{
Name: "Starlark",
FSName: "",
Type: TypeForString("programming"),
Color: "#76d275",
Group: "",
Aliases: []string{
"bazel",
"bzl",
},
Extensions: []string{
".bzl",
},
Interpreters: []string{},
Filenames: []string{
"BUCK",
"BUILD",
"BUILD.bazel",
"Tiltfile",
"WORKSPACE",
},
MimeType: "text/x-python",
TMScope: "source.python",
AceMode: "python",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 960266174,
},
358: LanguageInfo{
Name: "Stata",
FSName: "",
Type: TypeForString("programming"),
Color: "#1a5f91",
Group: "",
Aliases: []string{},
Extensions: []string{
".do",
".ado",
".doh",
".ihlp",
".mata",
".matah",
".sthlp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.stata",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 358,
},
89855901: LanguageInfo{
Name: "StringTemplate",
FSName: "",
Type: TypeForString("markup"),
Color: "#3fb34f",
Group: "",
Aliases: []string{},
Extensions: []string{
".st",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "source.string-template",
AceMode: "html",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 89855901,
},
359: LanguageInfo{
Name: "Stylus",
FSName: "",
Type: TypeForString("markup"),
Color: "#ff6347",
Group: "",
Aliases: []string{},
Extensions: []string{
".styl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.stylus",
AceMode: "stylus",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 359,
},
360: LanguageInfo{
Name: "SubRip Text",
FSName: "",
Type: TypeForString("data"),
Color: "#9e0101",
Group: "",
Aliases: []string{},
Extensions: []string{
".srt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.srt",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 360,
},
826404698: LanguageInfo{
Name: "SugarSS",
FSName: "",
Type: TypeForString("markup"),
Color: "#2fcc9f",
Group: "CSS",
Aliases: []string{},
Extensions: []string{
".sss",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.css.postcss.sugarss",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 826404698,
},
361: LanguageInfo{
Name: "SuperCollider",
FSName: "",
Type: TypeForString("programming"),
Color: "#46390b",
Group: "",
Aliases: []string{},
Extensions: []string{
".sc",
".scd",
},
Interpreters: []string{
"sclang",
"scsynth",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.supercollider",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 361,
},
928734530: LanguageInfo{
Name: "Svelte",
FSName: "",
Type: TypeForString("markup"),
Color: "#ff3e00",
Group: "",
Aliases: []string{},
Extensions: []string{
".svelte",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/html",
TMScope: "source.svelte",
AceMode: "html",
CodeMirrorMode: "htmlmixed",
Wrap: false,
LanguageID: 928734530,
},
362: LanguageInfo{
Name: "Swift",
FSName: "",
Type: TypeForString("programming"),
Color: "#F05138",
Group: "",
Aliases: []string{},
Extensions: []string{
".swift",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-swift",
TMScope: "source.swift",
AceMode: "text",
CodeMirrorMode: "swift",
Wrap: false,
LanguageID: 362,
},
363: LanguageInfo{
Name: "SystemVerilog",
FSName: "",
Type: TypeForString("programming"),
Color: "#DAE1C2",
Group: "",
Aliases: []string{},
Extensions: []string{
".sv",
".svh",
".vh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-systemverilog",
TMScope: "source.systemverilog",
AceMode: "verilog",
CodeMirrorMode: "verilog",
Wrap: false,
LanguageID: 363,
},
422: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("programming"),
Color: "#A0AA87",
Group: "",
Aliases: []string{},
Extensions: []string{
".8xp",
".8xk",
".8xk.txt",
".8xp.txt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 422,
},
364: LanguageInfo{
Name: "TLA",
FSName: "",
Type: TypeForString("programming"),
Color: "#4b0079",
Group: "",
Aliases: []string{},
Extensions: []string{
".tla",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.tla",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 364,
},
365: LanguageInfo{
Name: "TOML",
FSName: "",
Type: TypeForString("data"),
Color: "#9c4221",
Group: "",
Aliases: []string{},
Extensions: []string{
".toml",
},
Interpreters: []string{},
Filenames: []string{
"Cargo.lock",
"Gopkg.lock",
"Pipfile",
"poetry.lock",
},
MimeType: "text/x-toml",
TMScope: "source.toml",
AceMode: "toml",
CodeMirrorMode: "toml",
Wrap: false,
LanguageID: 365,
},
918334941: LanguageInfo{
Name: "TSQL",
FSName: "",
Type: TypeForString("programming"),
Color: "#e38c00",
Group: "",
Aliases: []string{},
Extensions: []string{
".sql",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.tsql",
AceMode: "sql",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 918334941,
},
1035892117: LanguageInfo{
Name: "TSV",
FSName: "",
Type: TypeForString("data"),
Color: "#237346",
Group: "",
Aliases: []string{},
Extensions: []string{
".tsv",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.generic-db",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1035892117,
},
94901924: LanguageInfo{
Name: "TSX",
FSName: "",
Type: TypeForString("programming"),
Color: "#2b7489",
Group: "TypeScript",
Aliases: []string{},
Extensions: []string{
".tsx",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/jsx",
TMScope: "source.tsx",
AceMode: "javascript",
CodeMirrorMode: "jsx",
Wrap: false,
LanguageID: 94901924,
},
366: LanguageInfo{
Name: "TXL",
FSName: "",
Type: TypeForString("programming"),
Color: "#0178b8",
Group: "",
Aliases: []string{},
Extensions: []string{
".txl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.txl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 366,
},
367: LanguageInfo{
Name: "Tcl",
FSName: "",
Type: TypeForString("programming"),
Color: "#e4cc98",
Group: "",
Aliases: []string{},
Extensions: []string{
".tcl",
".adp",
".tcl.in",
".tm",
},
Interpreters: []string{
"tclsh",
"wish",
},
Filenames: []string{
"owh",
"starfield",
},
MimeType: "text/x-tcl",
TMScope: "source.tcl",
AceMode: "tcl",
CodeMirrorMode: "tcl",
Wrap: false,
LanguageID: 367,
},
368: LanguageInfo{
Name: "Tcsh",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Shell",
Aliases: []string{},
Extensions: []string{
".tcsh",
".csh",
},
Interpreters: []string{
"tcsh",
"csh",
},
Filenames: []string{},
MimeType: "text/x-sh",
TMScope: "source.shell",
AceMode: "sh",
CodeMirrorMode: "shell",
Wrap: false,
LanguageID: 368,
},
369: LanguageInfo{
Name: "TeX",
FSName: "",
Type: TypeForString("markup"),
Color: "#3D6117",
Group: "",
Aliases: []string{
"latex",
},
Extensions: []string{
".tex",
".aux",
".bbx",
".cbx",
".cls",
".dtx",
".ins",
".lbx",
".ltx",
".mkii",
".mkiv",
".mkvi",
".sty",
".toc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-stex",
TMScope: "text.tex.latex",
AceMode: "tex",
CodeMirrorMode: "stex",
Wrap: true,
LanguageID: 369,
},
370: LanguageInfo{
Name: "Tea",
FSName: "",
Type: TypeForString("markup"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".tea",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.tea",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 370,
},
371: LanguageInfo{
Name: "Terra",
FSName: "",
Type: TypeForString("programming"),
Color: "#00004c",
Group: "",
Aliases: []string{},
Extensions: []string{
".t",
},
Interpreters: []string{
"lua",
},
Filenames: []string{},
MimeType: "text/x-lua",
TMScope: "source.terra",
AceMode: "lua",
CodeMirrorMode: "lua",
Wrap: false,
LanguageID: 371,
},
988020015: LanguageInfo{
Name: "Texinfo",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".texinfo",
".texi",
".txi",
},
Interpreters: []string{
"makeinfo",
},
Filenames: []string{},
MimeType: "",
TMScope: "text.texinfo",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 988020015,
},
372: LanguageInfo{
Name: "Text",
FSName: "",
Type: TypeForString("prose"),
Color: "",
Group: "",
Aliases: []string{
"fundamental",
"plain text",
},
Extensions: []string{
".txt",
".fr",
".nb",
".ncl",
".no",
},
Interpreters: []string{},
Filenames: []string{
"CITATION",
"CITATIONS",
"COPYING",
"COPYING.regex",
"COPYRIGHT.regex",
"FONTLOG",
"INSTALL",
"INSTALL.mysql",
"LICENSE",
"LICENSE.mysql",
"NEWS",
"README.me",
"README.mysql",
"README.nss",
"click.me",
"delete.me",
"keep.me",
"package.mask",
"package.use.mask",
"package.use.stable.mask",
"read.me",
"readme.1st",
"test.me",
"use.mask",
"use.stable.mask",
},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 372,
},
981795023: LanguageInfo{
Name: "TextMate Properties",
FSName: "",
Type: TypeForString("data"),
Color: "#df66e4",
Group: "",
Aliases: []string{
"tm-properties",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".tm_properties",
},
MimeType: "text/x-properties",
TMScope: "source.tm-properties",
AceMode: "properties",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 981795023,
},
373: LanguageInfo{
Name: "Textile",
FSName: "",
Type: TypeForString("prose"),
Color: "#ffe7ac",
Group: "",
Aliases: []string{},
Extensions: []string{
".textile",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-textile",
TMScope: "none",
AceMode: "textile",
CodeMirrorMode: "textile",
Wrap: true,
LanguageID: 373,
},
374: LanguageInfo{
Name: "Thrift",
FSName: "",
Type: TypeForString("programming"),
Color: "#D12127",
Group: "",
Aliases: []string{},
Extensions: []string{
".thrift",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.thrift",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 374,
},
375: LanguageInfo{
Name: "Turing",
FSName: "",
Type: TypeForString("programming"),
Color: "#cf142b",
Group: "",
Aliases: []string{},
Extensions: []string{
".t",
".tu",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.turing",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 375,
},
376: LanguageInfo{
Name: "Turtle",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".ttl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/turtle",
TMScope: "source.turtle",
AceMode: "text",
CodeMirrorMode: "turtle",
Wrap: false,
LanguageID: 376,
},
377: LanguageInfo{
Name: "Twig",
FSName: "",
Type: TypeForString("markup"),
Color: "#c1d026",
Group: "",
Aliases: []string{},
Extensions: []string{
".twig",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-twig",
TMScope: "text.html.twig",
AceMode: "twig",
CodeMirrorMode: "twig",
Wrap: false,
LanguageID: 377,
},
632765617: LanguageInfo{
Name: "Type Language",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"tl",
},
Extensions: []string{
".tl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.tl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 632765617,
},
378: LanguageInfo{
Name: "TypeScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#2b7489",
Group: "",
Aliases: []string{
"ts",
},
Extensions: []string{
".ts",
},
Interpreters: []string{
"deno",
"ts-node",
},
Filenames: []string{},
MimeType: "application/typescript",
TMScope: "source.ts",
AceMode: "typescript",
CodeMirrorMode: "javascript",
Wrap: false,
LanguageID: 378,
},
379: LanguageInfo{
Name: "Unified Parallel C",
FSName: "",
Type: TypeForString("programming"),
Color: "#4e3617",
Group: "C",
Aliases: []string{},
Extensions: []string{
".upc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 379,
},
380: LanguageInfo{
Name: "Unity3D Asset",
FSName: "",
Type: TypeForString("data"),
Color: "#222c37",
Group: "",
Aliases: []string{},
Extensions: []string{
".anim",
".asset",
".mask",
".mat",
".meta",
".prefab",
".unity",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-yaml",
TMScope: "source.yaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 380,
},
120: LanguageInfo{
Name: "Unix Assembly",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "Assembly",
Aliases: []string{},
Extensions: []string{
".s",
".ms",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.x86",
AceMode: "assembly_x86",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 120,
},
381: LanguageInfo{
Name: "Uno",
FSName: "",
Type: TypeForString("programming"),
Color: "#9933cc",
Group: "",
Aliases: []string{},
Extensions: []string{
".uno",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csharp",
TMScope: "source.cs",
AceMode: "csharp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 381,
},
382: LanguageInfo{
Name: "UnrealScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#a54c4d",
Group: "",
Aliases: []string{},
Extensions: []string{
".uc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-java",
TMScope: "source.java",
AceMode: "java",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 382,
},
383: LanguageInfo{
Name: "UrWeb",
FSName: "",
Type: TypeForString("programming"),
Color: "#ccccee",
Group: "",
Aliases: []string{
"Ur/Web",
"Ur",
},
Extensions: []string{
".ur",
".urs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ur",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 383,
},
603371597: LanguageInfo{
Name: "V",
FSName: "",
Type: TypeForString("programming"),
Color: "#4f87c4",
Group: "",
Aliases: []string{
"vlang",
},
Extensions: []string{
".v",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-go",
TMScope: "source.v",
AceMode: "golang",
CodeMirrorMode: "go",
Wrap: false,
LanguageID: 603371597,
},
399230729: LanguageInfo{
Name: "VBA",
FSName: "",
Type: TypeForString("programming"),
Color: "#867db1",
Group: "",
Aliases: []string{
"vb6",
"visual basic 6",
"visual basic for applications",
},
Extensions: []string{
".bas",
".cls",
".frm",
".frx",
".vba",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-vb",
TMScope: "source.vbnet",
AceMode: "text",
CodeMirrorMode: "vb",
Wrap: false,
LanguageID: 399230729,
},
408016005: LanguageInfo{
Name: "VBScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#15dcdc",
Group: "",
Aliases: []string{},
Extensions: []string{
".vbs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/vbscript",
TMScope: "source.vbnet",
AceMode: "text",
CodeMirrorMode: "vbscript",
Wrap: false,
LanguageID: 408016005,
},
384: LanguageInfo{
Name: "VCL",
FSName: "",
Type: TypeForString("programming"),
Color: "#148AA8",
Group: "",
Aliases: []string{},
Extensions: []string{
".vcl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.varnish.vcl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 384,
},
385: LanguageInfo{
Name: "VHDL",
FSName: "",
Type: TypeForString("programming"),
Color: "#adb2cb",
Group: "",
Aliases: []string{},
Extensions: []string{
".vhdl",
".vhd",
".vhf",
".vhi",
".vho",
".vhs",
".vht",
".vhw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-vhdl",
TMScope: "source.vhdl",
AceMode: "vhdl",
CodeMirrorMode: "vhdl",
Wrap: false,
LanguageID: 385,
},
386: LanguageInfo{
Name: "Vala",
FSName: "",
Type: TypeForString("programming"),
Color: "#fbe5cd",
Group: "",
Aliases: []string{},
Extensions: []string{
".vala",
".vapi",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.vala",
AceMode: "vala",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 386,
},
544060961: LanguageInfo{
Name: "Valve Data Format",
FSName: "",
Type: TypeForString("data"),
Color: "#f26025",
Group: "",
Aliases: []string{
"keyvalues",
"vdf",
},
Extensions: []string{
".vdf",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.keyvalues",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 544060961,
},
387: LanguageInfo{
Name: "Verilog",
FSName: "",
Type: TypeForString("programming"),
Color: "#b2b7f8",
Group: "",
Aliases: []string{},
Extensions: []string{
".v",
".veo",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-verilog",
TMScope: "source.verilog",
AceMode: "verilog",
CodeMirrorMode: "verilog",
Wrap: false,
LanguageID: 387,
},
508563686: LanguageInfo{
Name: "Vim Help File",
FSName: "",
Type: TypeForString("prose"),
Color: "#199f4b",
Group: "",
Aliases: []string{
"help",
"vimhelp",
},
Extensions: []string{
".txt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.vim-help",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 508563686,
},
388: LanguageInfo{
Name: "Vim Script",
FSName: "",
Type: TypeForString("programming"),
Color: "#199f4b",
Group: "",
Aliases: []string{
"vim",
"viml",
"nvim",
},
Extensions: []string{
".vim",
".vba",
".vimrc",
".vmb",
},
Interpreters: []string{},
Filenames: []string{
".exrc",
".gvimrc",
".nvimrc",
".vimrc",
"_vimrc",
"gvimrc",
"nvimrc",
"vimrc",
},
MimeType: "",
TMScope: "source.viml",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 388,
},
81265970: LanguageInfo{
Name: "Vim Snippet",
FSName: "",
Type: TypeForString("markup"),
Color: "#199f4b",
Group: "",
Aliases: []string{
"SnipMate",
"UltiSnip",
"UltiSnips",
"NeoSnippet",
},
Extensions: []string{
".snip",
".snippet",
".snippets",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.vim-snippet",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 81265970,
},
389: LanguageInfo{
Name: "Visual Basic .NET",
FSName: "",
Type: TypeForString("programming"),
Color: "#945db7",
Group: "",
Aliases: []string{
"visual basic",
"vbnet",
"vb .net",
"vb.net",
},
Extensions: []string{
".vb",
".vbhtml",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-vb",
TMScope: "source.vbnet",
AceMode: "text",
CodeMirrorMode: "vb",
Wrap: false,
LanguageID: 389,
},
390: LanguageInfo{
Name: "Volt",
FSName: "",
Type: TypeForString("programming"),
Color: "#1F1F1F",
Group: "",
Aliases: []string{},
Extensions: []string{
".volt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-d",
TMScope: "source.d",
AceMode: "d",
CodeMirrorMode: "d",
Wrap: false,
LanguageID: 390,
},
391: LanguageInfo{
Name: "Vue",
FSName: "",
Type: TypeForString("markup"),
Color: "#41b883",
Group: "",
Aliases: []string{},
Extensions: []string{
".vue",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.vue",
AceMode: "html",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 391,
},
1055641948: LanguageInfo{
Name: "Vyper",
FSName: "",
Type: TypeForString("programming"),
Color: "#2980b9",
Group: "",
Aliases: []string{},
Extensions: []string{
".vy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.vyper",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 1055641948,
},
392: LanguageInfo{
Name: "Wavefront Material",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".mtl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.wavefront.mtl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 392,
},
393: LanguageInfo{
Name: "Wavefront Object",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".obj",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.wavefront.obj",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 393,
},
394: LanguageInfo{
Name: "Web Ontology Language",
FSName: "",
Type: TypeForString("data"),
Color: "#5b70bd",
Group: "",
Aliases: []string{},
Extensions: []string{
".owl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 394,
},
956556503: LanguageInfo{
Name: "WebAssembly",
FSName: "",
Type: TypeForString("programming"),
Color: "#04133b",
Group: "",
Aliases: []string{
"wast",
"wasm",
},
Extensions: []string{
".wast",
".wat",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-common-lisp",
TMScope: "source.webassembly",
AceMode: "lisp",
CodeMirrorMode: "commonlisp",
Wrap: false,
LanguageID: 956556503,
},
395: LanguageInfo{
Name: "WebIDL",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".webidl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-webidl",
TMScope: "source.webidl",
AceMode: "text",
CodeMirrorMode: "webidl",
Wrap: false,
LanguageID: 395,
},
658679714: LanguageInfo{
Name: "WebVTT",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".vtt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.vtt",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 658679714,
},
668457123: LanguageInfo{
Name: "Wget Config",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "INI",
Aliases: []string{
"wgetrc",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".wgetrc",
},
MimeType: "",
TMScope: "source.wgetrc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 668457123,
},
228: LanguageInfo{
Name: "Wikitext",
FSName: "",
Type: TypeForString("prose"),
Color: "#fc5757",
Group: "",
Aliases: []string{
"mediawiki",
"wiki",
},
Extensions: []string{
".mediawiki",
".wiki",
".wikitext",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "text.html.mediawiki",
AceMode: "text",
CodeMirrorMode: "",
Wrap: true,
LanguageID: 228,
},
969674868: LanguageInfo{
Name: "Windows Registry Entries",
FSName: "",
Type: TypeForString("data"),
Color: "#52d5ff",
Group: "",
Aliases: []string{},
Extensions: []string{
".reg",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-properties",
TMScope: "source.reg",
AceMode: "ini",
CodeMirrorMode: "properties",
Wrap: false,
LanguageID: 969674868,
},
686821385: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("programming"),
Color: "#ff0000",
Group: "",
Aliases: []string{},
Extensions: []string{
".ws",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.witcherscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 686821385,
},
632745969: LanguageInfo{
Name: "Wollok",
FSName: "",
Type: TypeForString("programming"),
Color: "#a23738",
Group: "",
Aliases: []string{},
Extensions: []string{
".wlk",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.wollok",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 632745969,
},
396: LanguageInfo{
Name: "World of Warcraft Addon Data",
FSName: "",
Type: TypeForString("data"),
Color: "#f7e43f",
Group: "",
Aliases: []string{},
Extensions: []string{
".toc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.toc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 396,
},
782911107: LanguageInfo{
Name: "<NAME>",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "C",
Aliases: []string{
"xbm",
},
Extensions: []string{
".xbm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 782911107,
},
208700028: LanguageInfo{
Name: "X Font Directory Index",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"encodings.dir",
"fonts.alias",
"fonts.dir",
"fonts.scale",
},
MimeType: "",
TMScope: "source.fontdir",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 208700028,
},
781846279: LanguageInfo{
Name: "X PixMap",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "C",
Aliases: []string{
"xpm",
},
Extensions: []string{
".xpm",
".pm",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 781846279,
},
397: LanguageInfo{
Name: "X10",
FSName: "",
Type: TypeForString("programming"),
Color: "#4B6BEF",
Group: "",
Aliases: []string{
"xten",
},
Extensions: []string{
".x10",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.x10",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 397,
},
398: LanguageInfo{
Name: "XC",
FSName: "",
Type: TypeForString("programming"),
Color: "#99DA07",
Group: "",
Aliases: []string{},
Extensions: []string{
".xc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.xc",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 398,
},
225167241: LanguageInfo{
Name: "XCompose",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".XCompose",
"XCompose",
"xcompose",
},
MimeType: "",
TMScope: "config.xcompose",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 225167241,
},
399: LanguageInfo{
Name: "XML",
FSName: "",
Type: TypeForString("data"),
Color: "#0060ac",
Group: "",
Aliases: []string{
"rss",
"xsd",
"wsdl",
},
Extensions: []string{
".xml",
".adml",
".admx",
".ant",
".axaml",
".axml",
".builds",
".ccproj",
".ccxml",
".clixml",
".cproject",
".cscfg",
".csdef",
".csl",
".csproj",
".ct",
".depproj",
".dita",
".ditamap",
".ditaval",
".dll.config",
".dotsettings",
".filters",
".fsproj",
".fxml",
".glade",
".gml",
".gmx",
".grxml",
".gst",
".hzp",
".iml",
".ivy",
".jelly",
".jsproj",
".kml",
".launch",
".mdpolicy",
".mjml",
".mm",
".mod",
".mxml",
".natvis",
".ncl",
".ndproj",
".nproj",
".nuspec",
".odd",
".osm",
".pkgproj",
".pluginspec",
".proj",
".props",
".ps1xml",
".psc1",
".pt",
".rdf",
".res",
".resx",
".rs",
".rss",
".sch",
".scxml",
".sfproj",
".shproj",
".srdf",
".storyboard",
".sublime-snippet",
".targets",
".tml",
".ts",
".tsx",
".ui",
".urdf",
".ux",
".vbproj",
".vcxproj",
".vsixmanifest",
".vssettings",
".vstemplate",
".vxml",
".wixproj",
".workflow",
".wsdl",
".wsf",
".wxi",
".wxl",
".wxs",
".x3d",
".xacro",
".xaml",
".xib",
".xlf",
".xliff",
".xmi",
".xml.dist",
".xmp",
".xproj",
".xsd",
".xspec",
".xul",
".zcml",
},
Interpreters: []string{},
Filenames: []string{
".classpath",
".cproject",
".project",
"App.config",
"NuGet.config",
"Settings.StyleCop",
"Web.Debug.config",
"Web.Release.config",
"Web.config",
"packages.config",
},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 399,
},
75622871: LanguageInfo{
Name: "XML Property List",
FSName: "",
Type: TypeForString("data"),
Color: "#0060ac",
Group: "XML",
Aliases: []string{},
Extensions: []string{
".plist",
".stTheme",
".tmCommand",
".tmLanguage",
".tmPreferences",
".tmSnippet",
".tmTheme",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml.plist",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 75622871,
},
400: LanguageInfo{
Name: "XPages",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".xsp-config",
".xsp.metadata",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 400,
},
401: LanguageInfo{
Name: "XProc",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".xpl",
".xproc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 401,
},
402: LanguageInfo{
Name: "XQuery",
FSName: "",
Type: TypeForString("programming"),
Color: "#5232e7",
Group: "",
Aliases: []string{},
Extensions: []string{
".xquery",
".xq",
".xql",
".xqm",
".xqy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "application/xquery",
TMScope: "source.xq",
AceMode: "xquery",
CodeMirrorMode: "xquery",
Wrap: false,
LanguageID: 402,
},
403: LanguageInfo{
Name: "XS",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".xs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-csrc",
TMScope: "source.c",
AceMode: "c_cpp",
CodeMirrorMode: "clike",
Wrap: false,
LanguageID: 403,
},
404: LanguageInfo{
Name: "XSLT",
FSName: "",
Type: TypeForString("programming"),
Color: "#EB8CEB",
Group: "",
Aliases: []string{
"xsl",
},
Extensions: []string{
".xslt",
".xsl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/xml",
TMScope: "text.xml.xsl",
AceMode: "xml",
CodeMirrorMode: "xml",
Wrap: false,
LanguageID: 404,
},
405: LanguageInfo{
Name: "Xojo",
FSName: "",
Type: TypeForString("programming"),
Color: "#81bd41",
Group: "",
Aliases: []string{},
Extensions: []string{
".xojo_code",
".xojo_menu",
".xojo_report",
".xojo_script",
".xojo_toolbar",
".xojo_window",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.xojo",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 405,
},
614078284: LanguageInfo{
Name: "Xonsh",
FSName: "",
Type: TypeForString("programming"),
Color: "#285EEF",
Group: "",
Aliases: []string{},
Extensions: []string{
".xsh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-python",
TMScope: "source.python",
AceMode: "text",
CodeMirrorMode: "python",
Wrap: false,
LanguageID: 614078284,
},
406: LanguageInfo{
Name: "Xtend",
FSName: "",
Type: TypeForString("programming"),
Color: "#24255d",
Group: "",
Aliases: []string{},
Extensions: []string{
".xtend",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.xtend",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 406,
},
407: LanguageInfo{
Name: "YAML",
FSName: "",
Type: TypeForString("data"),
Color: "#cb171e",
Group: "",
Aliases: []string{
"yml",
},
Extensions: []string{
".yml",
".mir",
".reek",
".rviz",
".sublime-syntax",
".syntax",
".yaml",
".yaml-tmlanguage",
".yaml.sed",
".yml.mysql",
},
Interpreters: []string{},
Filenames: []string{
".clang-format",
".clang-tidy",
".gemrc",
"CITATION.cff",
"glide.lock",
"yarn.lock",
},
MimeType: "text/x-yaml",
TMScope: "source.yaml",
AceMode: "yaml",
CodeMirrorMode: "yaml",
Wrap: false,
LanguageID: 407,
},
408: LanguageInfo{
Name: "YANG",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".yang",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.yang",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 408,
},
805122868: LanguageInfo{
Name: "YARA",
FSName: "",
Type: TypeForString("programming"),
Color: "#220000",
Group: "",
Aliases: []string{},
Extensions: []string{
".yar",
".yara",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.yara",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 805122868,
},
378760102: LanguageInfo{
Name: "YASnippet",
FSName: "",
Type: TypeForString("markup"),
Color: "#32AB90",
Group: "",
Aliases: []string{
"snippet",
"yas",
},
Extensions: []string{
".yasnippet",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.yasnippet",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 378760102,
},
409: LanguageInfo{
Name: "Yacc",
FSName: "",
Type: TypeForString("programming"),
Color: "#4B6C4B",
Group: "",
Aliases: []string{},
Extensions: []string{
".y",
".yacc",
".yy",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.yacc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 409,
},
952972794: LanguageInfo{
Name: "ZAP",
FSName: "",
Type: TypeForString("programming"),
Color: "#0d665e",
Group: "",
Aliases: []string{},
Extensions: []string{
".zap",
".xzap",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.zap",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 952972794,
},
973483626: LanguageInfo{
Name: "ZIL",
FSName: "",
Type: TypeForString("programming"),
Color: "#dc75e5",
Group: "",
Aliases: []string{},
Extensions: []string{
".zil",
".mud",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.zil",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 973483626,
},
40: LanguageInfo{
Name: "Zeek",
FSName: "",
Type: TypeForString("programming"),
Color: "",
Group: "",
Aliases: []string{
"bro",
},
Extensions: []string{
".zeek",
".bro",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.zeek",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 40,
},
494938890: LanguageInfo{
Name: "ZenScript",
FSName: "",
Type: TypeForString("programming"),
Color: "#00BCD1",
Group: "",
Aliases: []string{},
Extensions: []string{
".zs",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.zenscript",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 494938890,
},
410: LanguageInfo{
Name: "Zephir",
FSName: "",
Type: TypeForString("programming"),
Color: "#118f9e",
Group: "",
Aliases: []string{},
Extensions: []string{
".zep",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.php.zephir",
AceMode: "php",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 410,
},
646424281: LanguageInfo{
Name: "Zig",
FSName: "",
Type: TypeForString("programming"),
Color: "#ec915c",
Group: "",
Aliases: []string{},
Extensions: []string{
".zig",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.zig",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 646424281,
},
411: LanguageInfo{
Name: "Zimpl",
FSName: "",
Type: TypeForString("programming"),
Color: "#d67711",
Group: "",
Aliases: []string{},
Extensions: []string{
".zimpl",
".zmpl",
".zpl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "none",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 411,
},
992375436: LanguageInfo{
Name: "cURL Config",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "INI",
Aliases: []string{
"curlrc",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
".curlrc",
"_curlrc",
},
MimeType: "",
TMScope: "source.curlrc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 992375436,
},
412: LanguageInfo{
Name: "desktop",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".desktop",
".desktop.in",
".service",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.desktop",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 412,
},
691605112: LanguageInfo{
Name: "dircolors",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".dircolors",
},
Interpreters: []string{},
Filenames: []string{
".dir_colors",
".dircolors",
"DIR_COLORS",
"_dir_colors",
"_dircolors",
"dir_colors",
},
MimeType: "",
TMScope: "source.dircolors",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 691605112,
},
413: LanguageInfo{
Name: "eC",
FSName: "",
Type: TypeForString("programming"),
Color: "#913960",
Group: "",
Aliases: []string{},
Extensions: []string{
".ec",
".eh",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.c.ec",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 413,
},
414: LanguageInfo{
Name: "edn",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{},
Extensions: []string{
".edn",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-clojure",
TMScope: "source.clojure",
AceMode: "clojure",
CodeMirrorMode: "clojure",
Wrap: false,
LanguageID: 414,
},
415: LanguageInfo{
Name: "fish",
FSName: "",
Type: TypeForString("programming"),
Color: "#4aae47",
Group: "Shell",
Aliases: []string{},
Extensions: []string{
".fish",
},
Interpreters: []string{
"fish",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.fish",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 415,
},
560883276: LanguageInfo{
Name: "hoon",
FSName: "",
Type: TypeForString("programming"),
Color: "#00b171",
Group: "",
Aliases: []string{},
Extensions: []string{
".hoon",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.hoon",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 560883276,
},
905371884: LanguageInfo{
Name: "jq",
FSName: "",
Type: TypeForString("programming"),
Color: "#c7254e",
Group: "",
Aliases: []string{},
Extensions: []string{
".jq",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.jq",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 905371884,
},
970675279: LanguageInfo{
Name: "kvlang",
FSName: "",
Type: TypeForString("markup"),
Color: "#1da6e0",
Group: "",
Aliases: []string{},
Extensions: []string{
".kv",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.python.kivy",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 970675279,
},
517654727: LanguageInfo{
Name: "mIRC Script",
FSName: "",
Type: TypeForString("programming"),
Color: "#3d57c3",
Group: "",
Aliases: []string{},
Extensions: []string{
".mrc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.msl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 517654727,
},
462488745: LanguageInfo{
Name: "mcfunction",
FSName: "",
Type: TypeForString("programming"),
Color: "#E22837",
Group: "",
Aliases: []string{},
Extensions: []string{
".mcfunction",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mcfunction",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 462488745,
},
416: LanguageInfo{
Name: "mupad",
FSName: "",
Type: TypeForString("programming"),
Color: "#244963",
Group: "",
Aliases: []string{},
Extensions: []string{
".mu",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.mupad",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 416,
},
775996197: LanguageInfo{
Name: "nanorc",
FSName: "",
Type: TypeForString("data"),
Color: "#2d004d",
Group: "INI",
Aliases: []string{},
Extensions: []string{
".nanorc",
},
Interpreters: []string{},
Filenames: []string{
".nanorc",
"nanorc",
},
MimeType: "",
TMScope: "source.nanorc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 775996197,
},
417: LanguageInfo{
Name: "nesC",
FSName: "",
Type: TypeForString("programming"),
Color: "#94B0C7",
Group: "",
Aliases: []string{},
Extensions: []string{
".nc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.nesc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 417,
},
418: LanguageInfo{
Name: "ooc",
FSName: "",
Type: TypeForString("programming"),
Color: "#b0b77e",
Group: "",
Aliases: []string{},
Extensions: []string{
".ooc",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.ooc",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 418,
},
970539067: LanguageInfo{
Name: "q",
FSName: "",
Type: TypeForString("programming"),
Color: "#0040cd",
Group: "",
Aliases: []string{},
Extensions: []string{
".q",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.q",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 970539067,
},
419: LanguageInfo{
Name: "reStructuredText",
FSName: "",
Type: TypeForString("prose"),
Color: "#141414",
Group: "",
Aliases: []string{
"rst",
},
Extensions: []string{
".rst",
".rest",
".rest.txt",
".rst.txt",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-rst",
TMScope: "text.restructuredtext",
AceMode: "text",
CodeMirrorMode: "rst",
Wrap: true,
LanguageID: 419,
},
674736065: LanguageInfo{
Name: "robots.txt",
FSName: "",
Type: TypeForString("data"),
Color: "",
Group: "",
Aliases: []string{
"robots",
"robots txt",
},
Extensions: []string{},
Interpreters: []string{},
Filenames: []string{
"robots.txt",
},
MimeType: "",
TMScope: "text.robots-txt",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 674736065,
},
847830017: LanguageInfo{
Name: "sed",
FSName: "",
Type: TypeForString("programming"),
Color: "#64b970",
Group: "",
Aliases: []string{},
Extensions: []string{
".sed",
},
Interpreters: []string{
"gsed",
"minised",
"sed",
"ssed",
},
Filenames: []string{},
MimeType: "",
TMScope: "source.sed",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 847830017,
},
374521672: LanguageInfo{
Name: "wdl",
FSName: "",
Type: TypeForString("programming"),
Color: "#42f1f4",
Group: "",
Aliases: []string{},
Extensions: []string{
".wdl",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.wdl",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 374521672,
},
420: LanguageInfo{
Name: "wisp",
FSName: "",
Type: TypeForString("programming"),
Color: "#7582D1",
Group: "",
Aliases: []string{},
Extensions: []string{
".wisp",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "text/x-clojure",
TMScope: "source.clojure",
AceMode: "clojure",
CodeMirrorMode: "clojure",
Wrap: false,
LanguageID: 420,
},
421: LanguageInfo{
Name: "xBase",
FSName: "",
Type: TypeForString("programming"),
Color: "#403a40",
Group: "",
Aliases: []string{
"advpl",
"clipper",
"foxpro",
},
Extensions: []string{
".prg",
".ch",
".prw",
},
Interpreters: []string{},
Filenames: []string{},
MimeType: "",
TMScope: "source.harbour",
AceMode: "text",
CodeMirrorMode: "",
Wrap: false,
LanguageID: 421,
},
} | data/languageInfo.go | 0.727201 | 0.577555 | languageInfo.go | starcoder |
package api
// GetAllSubjects gets the list of subjects that show up in the current policy.
func (e *Enforcer) GetAllSubjects() []string {
return e.model.GetValuesForFieldInPolicy("p", "p", 0)
}
// GetAllObjects gets the list of objects that show up in the current policy.
func (e *Enforcer) GetAllObjects() []string {
return e.model.GetValuesForFieldInPolicy("p", "p", 1)
}
// GetAllActions gets the list of actions that show up in the current policy.
func (e *Enforcer) GetAllActions() []string {
return e.model.GetValuesForFieldInPolicy("p", "p", 2)
}
// GetAllRoles gets the list of roles that show up in the current policy.
func (e *Enforcer) GetAllRoles() []string {
return e.model.GetValuesForFieldInPolicy("g", "g", 1)
}
// GetPolicy gets all the authorization rules in the policy.
func (e *Enforcer) GetPolicy() [][]string {
return e.model.GetPolicy("p", "p")
}
// GetFilteredPolicy gets all the authorization rules in the policy, a field filter can be specified.
func (e *Enforcer) GetFilteredPolicy(fieldIndex int, fieldValue string) [][]string {
return e.model.GetFilteredPolicy("p", "p", fieldIndex, fieldValue)
}
// GetGroupingPolicy gets all the role inheritance rules in the policy.
func (e *Enforcer) GetGroupingPolicy() [][]string {
return e.model.GetPolicy("g", "g")
}
// AddPolicy adds an authorization rule to the current policy.
func (e *Enforcer) AddPolicy(policy []string) {
e.model.AddPolicy("p", "p", policy)
}
// RemovePolicy removes an authorization rule from the current policy.
func (e *Enforcer) RemovePolicy(policy []string) {
e.model.RemovePolicy("p", "p", policy)
}
// RemoveFilteredPolicy removes an authorization rule from the current policy, a field filter can be specified.
func (e *Enforcer) RemoveFilteredPolicy(fieldIndex int, fieldValue string) {
e.model.RemoveFilteredPolicy("p", "p", fieldIndex, fieldValue)
}
// AddGroupingPolicy adds a role inheritance rule to the current policy.
func (e *Enforcer) AddGroupingPolicy(policy []string) {
e.model.AddPolicy("g", "g", policy)
e.model.BuildRoleLinks()
}
// RemoveGroupingPolicy removes a role inheritance rule from the current policy.
func (e *Enforcer) RemoveGroupingPolicy(policy []string) {
e.model.RemovePolicy("g", "g", policy)
e.model.BuildRoleLinks()
}
// RemoveFilteredGroupingPolicy removes a role inheritance rule from the current policy, a field filter can be specified.
func (e *Enforcer) RemoveFilteredGroupingPolicy(fieldIndex int, fieldValue string) {
e.model.RemoveFilteredPolicy("g", "g", fieldIndex, fieldValue)
e.model.BuildRoleLinks()
}
// AddSubjectAttributeFunction adds the function that gets attributes for a subject in ABAC.
func (e *Enforcer) AddSubjectAttributeFunction(function func(args ...interface{}) (interface{}, error)) {
e.fm.AddFunction("subAttr", function)
}
// AddObjectAttributeFunction adds the function that gets attributes for a object in ABAC.
func (e *Enforcer) AddObjectAttributeFunction(function func(args ...interface{}) (interface{}, error)) {
e.fm.AddFunction("objAttr", function)
}
// AddActionAttributeFunction adds the function that gets attributes for a object in ABAC.
func (e *Enforcer) AddActionAttributeFunction(function func(args ...interface{}) (interface{}, error)) {
e.fm.AddFunction("actAttr", function)
} | api/management_api.go | 0.703244 | 0.407569 | management_api.go | starcoder |
package tree
import "github.com/pkg/errors"
var (
// ErrCyclicDependencyEncountered is triggered a tree has a cyclic dependency
ErrCyclicDependencyEncountered = errors.New("a cycle dependency encountered in the tree")
)
// MultiRootTree - represents a data type which has multiple independent root nodes
// all root nodes have their independent tree based on depdencies of TreeNode.
// it also maintains a map of nodes for faster lookups and managing node data.
type MultiRootTree struct {
rootNodes []string
dataMap map[string]*TreeNode
}
func (t *MultiRootTree) GetRootNodes() []*TreeNode {
nodes := []*TreeNode{}
for _, name := range t.rootNodes {
node, _ := t.GetNodeByName(name)
nodes = append(nodes, node)
}
return nodes
}
// MarkRoot marks a node as root
func (t *MultiRootTree) MarkRoot(node *TreeNode) {
t.rootNodes = append(t.rootNodes, node.GetName())
}
func (t *MultiRootTree) AddNode(node *TreeNode) {
t.dataMap[node.GetName()] = node
}
func (t *MultiRootTree) AddNodeIfNotExist(node *TreeNode) {
_, ok := t.GetNodeByName(node.GetName())
if !ok {
t.AddNode(node)
}
}
func (t *MultiRootTree) GetNodeByName(dagName string) (*TreeNode, bool) {
value, ok := t.dataMap[dagName]
return value, ok
}
// IsCyclic - detects if there are any cycles in the tree
func (t *MultiRootTree) IsCyclic() error {
visitedMap := make(map[string]bool)
for _, node := range t.dataMap {
if _, visited := visitedMap[node.GetName()]; !visited {
pathMap := make(map[string]bool)
err := t.hasCycle(node, visitedMap, pathMap)
if err != nil {
return err
}
}
}
return nil
}
// runs a DFS on a given tree using visitor pattern
func (t *MultiRootTree) hasCycle(root *TreeNode, visited, pathMap map[string]bool) error {
_, isNodeVisited := visited[root.GetName()]
if !isNodeVisited || !visited[root.GetName()] {
pathMap[root.GetName()] = true
visited[root.GetName()] = true
var cyclicErr error
for _, child := range root.Dependents {
n, _ := t.GetNodeByName(child.GetName())
_, isChildVisited := visited[child.GetName()]
if !isChildVisited || !visited[child.GetName()] {
cyclicErr = t.hasCycle(n, visited, pathMap)
}
if cyclicErr != nil {
return cyclicErr
}
_, childAlreadyInPath := pathMap[child.GetName()] // 1 -> 2 -> 1
if childAlreadyInPath && pathMap[child.GetName()] {
cyclicErr = errors.Wrap(ErrCyclicDependencyEncountered, root.GetName())
}
if cyclicErr != nil {
return cyclicErr
}
}
pathMap[root.GetName()] = false
}
return nil
}
// NewMultiRootTree returns an instance of multi root dag tree
func NewMultiRootTree() *MultiRootTree {
return &MultiRootTree{
dataMap: map[string]*TreeNode{},
rootNodes: []string{},
}
} | core/tree/multi_root_tree.go | 0.680879 | 0.447098 | multi_root_tree.go | starcoder |
package encoding
import (
"encoding/binary"
"io"
"math"
"math/big"
"golang.org/x/text/transform"
)
const writeScratchSize = 4096
// Encoder encodes hdb protocol datatypes an basis of an io.Writer.
type Encoder struct {
wr io.Writer
err error
b []byte // scratch buffer (min 15 Bytes - Decimal)
tr transform.Transformer
}
// NewEncoder creates a new Encoder instance.
func NewEncoder(wr io.Writer, encoder func() transform.Transformer) *Encoder {
return &Encoder{
wr: wr,
b: make([]byte, writeScratchSize),
tr: encoder(),
}
}
// Zeroes writes cnt zero byte values.
func (e *Encoder) Zeroes(cnt int) {
if e.err != nil {
return
}
// zero out scratch area
l := cnt
if l > len(e.b) {
l = len(e.b)
}
for i := 0; i < l; i++ {
e.b[i] = 0
}
for i := 0; i < cnt; {
j := cnt - i
if j > len(e.b) {
j = len(e.b)
}
n, _ := e.wr.Write(e.b[:j])
if n != j {
return
}
i += n
}
}
// Bytes writes a byte slice.
func (e *Encoder) Bytes(p []byte) {
if e.err != nil {
return
}
e.wr.Write(p)
}
// Byte writes a byte.
func (e *Encoder) Byte(b byte) { // WriteB as sig differs from WriteByte (vet issues)
if e.err != nil {
return
}
e.b[0] = b
e.Bytes(e.b[:1])
}
// Bool writes a boolean.
func (e *Encoder) Bool(v bool) {
if e.err != nil {
return
}
if v {
e.Byte(1)
} else {
e.Byte(0)
}
}
// Int8 writes an int8.
func (e *Encoder) Int8(i int8) {
if e.err != nil {
return
}
e.Byte(byte(i))
}
// Int16 writes an int16.
func (e *Encoder) Int16(i int16) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint16(e.b[:2], uint16(i))
e.wr.Write(e.b[:2])
}
// Uint16 writes an uint16.
func (e *Encoder) Uint16(i uint16) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint16(e.b[:2], i)
e.wr.Write(e.b[:2])
}
// Int32 writes an int32.
func (e *Encoder) Int32(i int32) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint32(e.b[:4], uint32(i))
e.wr.Write(e.b[:4])
}
// Uint32 writes an uint32.
func (e *Encoder) Uint32(i uint32) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint32(e.b[:4], i)
e.wr.Write(e.b[:4])
}
// Int64 writes an int64.
func (e *Encoder) Int64(i int64) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint64(e.b[:8], uint64(i))
e.wr.Write(e.b[:8])
}
// Uint64 writes an uint64.
func (e *Encoder) Uint64(i uint64) {
if e.err != nil {
return
}
binary.LittleEndian.PutUint64(e.b[:8], i)
e.wr.Write(e.b[:8])
}
// Float32 writes a float32.
func (e *Encoder) Float32(f float32) {
if e.err != nil {
return
}
bits := math.Float32bits(f)
binary.LittleEndian.PutUint32(e.b[:4], bits)
e.wr.Write(e.b[:4])
}
// Float64 writes a float64.
func (e *Encoder) Float64(f float64) {
if e.err != nil {
return
}
bits := math.Float64bits(f)
binary.LittleEndian.PutUint64(e.b[:8], bits)
e.wr.Write(e.b[:8])
}
// Decimal writes a decimal value.
func (e *Encoder) Decimal(m *big.Int, exp int) {
b := e.b[:decSize]
// little endian bigint words (significand) -> little endian db decimal format
j := 0
for _, d := range m.Bits() {
for i := 0; i < _S; i++ {
b[j] = byte(d)
d >>= 8
j++
}
}
// clear scratch buffer
for i := j; i < decSize; i++ {
b[i] = 0
}
exp += dec128Bias
b[14] |= (byte(exp) << 1)
b[15] = byte(uint16(exp) >> 7)
if m.Sign() == -1 {
b[15] |= 0x80
}
e.wr.Write(b)
}
// Fixed writes a fixed decimal value.
func (e *Encoder) Fixed(m *big.Int, size int) {
b := e.b[:size]
neg := m.Sign() == -1
fill := byte(0)
if neg {
// make positive
m.Neg(m)
// 2s complement
bits := m.Bits()
// - invert all bits
for i := 0; i < len(bits); i++ {
bits[i] = ^bits[i]
}
// - add 1
m.Add(m, natOne)
fill = 0xff
}
// little endian bigint words (significand) -> little endian db decimal format
j := 0
for _, d := range m.Bits() {
/*
check j < size as number of bytes in m.Bits words can exceed number of fixed size bytes
e.g. 64 bit architecture:
- two words equals 16 bytes but fixed size might be 12 bytes
- invariant: all 'skipped' bytes in most significant word are zero
*/
for i := 0; i < _S && j < size; i++ {
b[j] = byte(d)
d >>= 8
j++
}
}
// clear scratch buffer
for i := j; i < size; i++ {
b[i] = fill
}
e.wr.Write(b)
}
// String writes a string.
func (e *Encoder) String(s string) {
if e.err != nil {
return
}
e.Bytes([]byte(s))
}
// CESU8Bytes writes an UTF-8 byte slice as CESU-8 and returns the CESU-8 bytes written.
func (e *Encoder) CESU8Bytes(p []byte) int {
if e.err != nil {
return 0
}
e.tr.Reset()
cnt := 0
for i := 0; i < len(p); {
nDst, nSrc, err := e.tr.Transform(e.b, p[i:], true)
if nDst != 0 {
n, _ := e.wr.Write(e.b[:nDst])
cnt += n
}
if err != nil && err != transform.ErrShortDst {
e.err = err
return cnt
}
i += nSrc
}
return cnt
}
// CESU8String is like WriteCesu8 with an UTF-8 string as parameter.
func (e *Encoder) CESU8String(s string) int {
return e.CESU8Bytes([]byte(s))
} | driver/internal/protocol/encoding/encode.go | 0.665737 | 0.442637 | encode.go | starcoder |
package main
const helpScriggo = `
Scriggo is a template engine and Go interpreter. The scriggo command is a tool
that can be used to execute a template, initialize an interpreter, generate
the source files for a package importer and also provides a web server that
serves a template rooted at the current directory, useful to learn Scriggo
templates.
For more about executing a template, see 'scriggo help run'.
The commands are:
run run a template
serve run a web server and serve the template rooted at the current
directory
init initialize an interpreter for Go programs
import generate the source for an importer used by Scriggo to import
a package when an 'import' statement is executed
version print the scriggo command version
stdlib print the packages imported by the instruction
'IMPORT STANDARD LIBRARY' in the Scriggofile
Use 'scriggo help <command>' for more information about a command.
Additional help topics:
Scriggofile syntax of the Scriggofile
limitations limitations of the Scriggo compiler/runtime
`
const helpInit = `
usage: scriggo init [dir]
Init initializes an interpreter for Go programs creating the files in the
directory dir. If no argument is given, the files are created in the current
directory.
It creates in the directory:
* a go.mod file, if it does not already exist, with the directory name as
module path
* a packages.go file with the native packages that can be imported with the
import statement in the interpreted code
* a main.go file with the 'main' function
* a Scriggofile named 'Scriggofile' with the instructions to import the
packages of the standard library
then it calls the 'go mod tidy' command.
The interpreter can then be compiled with the 'go build' command or installed
with the 'go install' command.
For example:
scriggo init ./example
will initialize an interpreter in the directory 'example'.
In the directory 'example', executing the command
go install
an executable interpreter is installed in the 'bin' directory of the GOPATH with
name 'example' ('example.exe' on Windows).
To execute a Go source file called 'program.go' with the 'example' interpreter,
execute the command:
example program.go
You can change the Scriggofile to import other packages and than use the
scriggo import command to rebuild the packages.go file with the package
importer.
For more about the Scriggofile specific format, see 'scriggo help Scriggofile'.
See also: scriggo import.
`
const helpImport = `
usage: scriggo import [-f Scriggofile] [-v] [-x] [-o output] [module]
Import generate the code for a package importer. An importer is used by Scriggo
to import a package when an 'import' statement is executed.
The code for the importer is generated from the instructions in a Scriggofile.
The Scriggofile should be in a Go module.
Import prints the generated source to the standard output. Use the flag -o
to redirect the source to a named output file.
If an argument is given, it must be a local rooted path or must begin with
a . or .. element and it must be a module root directory. import looks for
a Scriggofile named 'Scriggofile' in that directory.
If no argument is given, the action applies to the current directory.
The -f flag forces import to read the given Scriggofile instead of the
Scriggofile of the module.
The importer in the generated Go file have type native.Importer and it is
assigned to a variable named 'packages'. The variable can be used as an
argument to the Build and BuildTemplate functions in the scriggo package.
To give a different name to the variable use the instruction SET VARIABLE in
the Scriggofile:
SET VARIABLE foo
The package name in the generated Go file is by default 'main', to give
a different name to the package use the instruction SET PACKAGE in the
Scriggofile:
SET PACKAGE boo
The -v flag prints the imported packages as defined in the Scriggofile.
The -x flag prints the executed commands.
The -o flag writes the generated Go file to the named output file, instead to
the standard output.
For more about the Scriggofile specific format, see 'scriggo help Scriggofile'.
`
const helpRun = `
usage: scriggo run [-o output] [run flags] file
Run runs a template file and its extended, imported and rendered files.
For example:
scriggo run article.html
runs the file 'article.html' as HTML and prints the result to the standard
output. Extended, imported and rendered file paths are relative to the
directory of the executed file.
The -o flag writes the result to the named output file or directory, instead to
the standard output.
Markdown is converted to HTML with the Goldmark parser with the options
html.WithUnsafe, parser.WithAutoHeadingID and extension.GFM.
The run flags are:
-root dir
set the root directory to dir instead of the file's directory.
-const name=value
run the template file with a global constant with the given name and
value. name should be a Go identifier and value should be a string
literal, a number literal, true or false. There can be multiple
name=value pairs.
-format format
use the named file format: Text, HTML, Markdown, CSS, JS or JSON.
-metrics
print metrics about execution time.
-S n
print the assembly code of the executed file to the standard error.
n determines the maximum length, in runes, of disassembled Text
instructions:
n > 0: at most n runes; leading and trailing white space are removed
n == 0: no text
n < 0: all text
Examples:
scriggo run index.html
scriggo run -const 'version=1.12 title="The ancient art of tea"' index.md
scriggo run -root . docs/article.html
scriggo run -format Markdown index
scriggo run -o ./public ./sources/index.html
`
const helpServe = `
usage: scriggo serve [-S n] [--metrics]
Serve runs a web server and serves the template rooted at the current
directory. It is useful to learn Scriggo templates.
It renders HTML and Markdown files based on file extension.
For example:
http://localhost:8080/article
it renders the file 'article.html' as HTML if exists, otherwise renders the
file 'article.md' as Markdown.
Serving a URL terminating with a slash:
http://localhost:8080/blog/
it renders 'blog/index.html' or 'blog/index.md'.
Markdown is converted to HTML with the Goldmark parser with the options
html.WithUnsafe, parser.WithAutoHeadingID and extension.GFM.
Templates are automatically rebuilt when a file changes.
The -S flag prints the assembly code of the served file and n determines the
maximum length, in runes, of disassembled Text instructions
n > 0: at most n runes; leading and trailing white space are removed
n == 0: no text
n < 0: all text
The --metrics flags prints metrics about execution time.
`
const helpScriggofile = `
A Scriggofile is a file with a specific format used by the scriggo command.
The scriggo command uses the instructions in a Scriggofile to initialize an
interpreter or a Go source file used in an application that embeds Scriggo.
A Scriggofile defines which packages an interpreted program and script can
import, what exported declarations in a package are accessible and so on.
The format of the Scriggofile is:
# A comment
INSTRUCTION arguments
A line starting with '#' is a comment, and the instructions are case
insensitive but for convention are written in uppercase (the syntax recalls
that used by Dockerfile).
A Scriggofile must be encoded as UTF-8 and it should be named 'Scriggofile'
or with the extension '.Scriggofile' as for 'example.Scriggofile'.
The instructions are:
IMPORT STANDARD LIBRARY
Makes the packages in the Go standard library (almost all) importable
in a program or script executed by the interpreter.
To view all packages imported run 'scriggo stdlib'.
IMPORT <package>
Make the package with path <package> importable.
IMPORT <package> INCLUDING <A> <B> <C>
As for 'IMPORT <package>' but only the exported names <A>, <B> and <C>
are imported.
IMPORT <package> EXCLUDING <A> <B> <C>
As for 'IMPORT <package>' but the exported names <A>, <B> and <C> are
not imported.
IMPORT <package> AS <as>
As for 'IMPORT <package>' but the path with which it can be imported
is named <as>. INCLUDING and EXCLUDING can be used as for the other
forms of IMPORT at the end of the instruction. Is not possible to use
a path <as> that would conflict with a Go standard library package path,
even if this latter is not imported in the Scriggofile.
IMPORT <package> AS main
Make the package with path <package> imported as the main package.
It is the same as writing 'import . "<package>"' in a Go program.
INCLUDING and EXCLUDING can be used as for the other forms of IMPORT at
the end of the instruction.
IMPORT <package> AS main NOT CAPITALIZED
As for 'IMPORT <package> AS main' but the exported names in the package
will be imported not capitalized. For example a name 'FooFoo' declared
in the package will be imported in the script as 'fooFoo'.
SET VARIABLE <name>
Set the name of the variable to witch is assigned the value of type
scriggo.PackageImporter with the packages to import. By default the
name is 'packages'. This instruction is only read by the 'import'
command.
SET PACKAGE <name>
Set the name of the package of the generated Go source file. By default
the name of the package is 'main'. This instruction is read only by the
command 'scriggo import'.
GOOS linux windows
Specifies the operating systems that will be supported by the built
interpreter. If the GOOS at the time the Scriggofile is parsed is not
listed in the GOOS instruction, the 'init' and 'import' commands
fail. If there is no GOOS instruction, all the operating systems are
supported.
To view possible GOOS values run 'go tool dist list'.
`
const helpLimitations = `
Limitations
These limitations are features that Scriggo currently lacks but that are
under development. To check the state of a limitation please refer to the
Github issue linked in the list below.
* methods declarations (issue #458)
* interface types definition (issue #218)
* assigning to non-variables in 'for range' statements (issue #182)
* importing the "unsafe" package from Scriggo (issue #288)
* importing the "runtime" package from Scriggo (issue #524)
* labeled continue and break statements (issue #83)
* some kinds of pointer shorthands (issue #383)
* compilation of non-main packages without importing them (issue #521)
For a comprehensive list of not-yet-implemented features
see https://github.com/open2b/scriggo/labels/missing-feature.
Limitations due to maintain the interoperability with Go official compiler 'gc'
* types defined in Scriggo are not correctly seen by the 'reflect' package.
This manifests itself, for example, when calling the function
'fmt.Printf("%T", v)' where 'v' is a value with a Scriggo defined type.
The user expects to see the name of the type but 'fmt' (which internally
relies on the package 'reflect') prints the name of the type that wrapped
the value in 'v' before passing it to gc.
* unexported fields of struct types defined in Scriggo are still accessible
from native packages with the reflect methods. This is caused by the
reflect methods that does not allow, by design, to change the value of an
unexported field, so they are created with an empty package path. By the
way, such fields (that should be unexported) can not be changed without
the reflect and have a particular prefix to avoid accidental accessing.
* in a structure, types can be embedded but, apart from interfaces, if they
have methods then they must be the first field of the struct. This is a
limitation of the StructOf function of reflect.
See Go issue #15924 (https://github.com/golang/go/issues/15924).
* cannot define functions without a body (TODO)
* a select supports a maximum of 65536 cases.
* Native packages can be imported only if they have been precompiled into
the Scriggo interpreter/execution environment.
Also see the commands 'scriggo import' and 'scriggo init'.
* types are not garbage collected.
See Go issue #28783 (https://github.com/golang/go/issues/28783).
Arbitrary limitations
These limitations have been arbitrarily added to Scriggo to enhance
performances:
* 127 registers of a given type (integer, floating point, string or
general) per function
* 256 function literal declarations plus unique functions calls per
function
* 256 types available per function
* 256 unique native functions per function
* 16384 integer values per function
* 256 string values per function
* 16384 floating-point values per function
* 256 general values per function
` | cmd/scriggo/help.go | 0.827619 | 0.602062 | help.go | starcoder |
package tree
import (
"bytes"
"errors"
"fmt"
"math"
"reflect"
"github.com/stefantds/go-epi-judge/data_structures/stack"
utils "github.com/stefantds/go-epi-judge/test_utils"
)
type TreeLike interface {
GetData() int
GetLeft() TreeLike
GetRight() TreeLike
}
func isNil(tree TreeLike) bool {
return tree == nil || reflect.ValueOf(tree).IsNil()
}
func binaryTreeToString(tree TreeLike) (string, error) {
var buf bytes.Buffer
nodes := make([]TreeLike, 0)
visited := make(map[TreeLike]bool)
first := true
nullNodesPending := 0
fmt.Fprint(&buf, "[")
nodes = append(nodes, tree)
for currentIdx := 0; currentIdx < len(nodes); currentIdx++ {
node := nodes[currentIdx]
if _, found := visited[node]; found {
return "", errors.New("detected a cycle in the tree")
}
if !reflect.ValueOf(node).IsNil() {
if first {
first = false
} else {
fmt.Fprint(&buf, ", ")
}
for nullNodesPending > 0 {
fmt.Fprint(&buf, "null, ")
nullNodesPending--
}
fmt.Fprintf(&buf, "%v", node.GetData())
visited[node] = true
nodes = append(nodes, node.GetLeft())
nodes = append(nodes, node.GetRight())
} else {
nullNodesPending++
}
}
fmt.Fprint(&buf, "]")
return buf.String(), nil
}
func FindNode(startNode TreeLike, val int) TreeLike {
s := make(stack.Stack, 0)
s = s.Push(startNode)
var node interface{}
for len(s) > 0 {
s, node = s.Pop()
treeNode := node.(TreeLike)
if isNil(treeNode) {
continue
}
if treeNode.GetData() == val {
return treeNode
}
s = s.Push(treeNode.GetLeft())
s = s.Push(treeNode.GetRight())
}
return nil
}
func MustFindNode(startNode TreeLike, val int) TreeLike {
n := FindNode(startNode, val)
if n == nil {
panic(fmt.Errorf("didn't find the node with value %d in the tree", val))
}
return n
}
func GenerateInorder(tree TreeLike) []int {
result := make([]int, 0)
if tree == nil {
return result
}
s := make(stack.Stack, 0)
s = s.Push(tree)
initial := true
var node interface{}
for !s.IsEmpty() {
s, node = s.Pop()
treeNode := node.(TreeLike)
if initial {
initial = false
} else {
result = append(result, treeNode.GetData())
treeNode = treeNode.GetRight()
}
for !isNil(treeNode) {
s = s.Push(treeNode)
treeNode = treeNode.GetLeft()
}
}
return result
}
type TreePath struct {
prev *TreePath
toLeft bool
}
func (t *TreePath) WithLeft() *TreePath {
return &TreePath{
prev: t,
toLeft: true,
}
}
func (t *TreePath) WithRight() *TreePath {
return &TreePath{
prev: t,
toLeft: false,
}
}
type IntRange struct {
Low int
High int
}
func (r *IntRange) contains(value int) bool {
return r.Low <= value && value <= r.High
}
func (r *IntRange) limitFromBottom(newLow int) *IntRange {
if newLow > r.Low {
return &IntRange{newLow, r.High}
} else {
return r
}
}
func (r *IntRange) limitFromTop(newHigh int) *IntRange {
if newHigh < r.High {
return &IntRange{r.Low, newHigh}
} else {
return r
}
}
func (r IntRange) String() string {
return fmt.Sprintf("range between %d and %d", r.Low, r.High)
}
func AssertTreeIsBST(tree TreeLike) error {
type treePathIntRange struct {
Tree TreeLike
Path *TreePath
Range *IntRange
}
s := make(stack.Stack, 0)
s = s.Push(treePathIntRange{
Tree: tree,
Path: &TreePath{},
Range: &IntRange{
math.MinInt64,
math.MaxInt64,
},
})
var n interface{}
for !s.IsEmpty() {
s, n = s.Pop()
node := n.(treePathIntRange)
if isNil(node.Tree) {
continue
}
value := node.Tree.GetData()
if !node.Range.contains(value) {
return fmt.Errorf(
"binary search tree constraints violation: expected value in %s; got %d",
node.Range,
value,
)
}
s = s.Push(treePathIntRange{
Tree: node.Tree.GetLeft(),
Path: node.Path.WithLeft(),
Range: node.Range.limitFromTop(value),
})
s = s.Push(treePathIntRange{
Tree: node.Tree.GetRight(),
Path: node.Path.WithRight(),
Range: node.Range.limitFromBottom(value),
})
}
return nil
}
func BinaryTreeHeight(tree TreeLike) int {
type treeWithHeight struct {
Tree TreeLike
Height int
}
s := make(stack.Stack, 0)
s = s.Push(treeWithHeight{tree, 1})
height := 0
var n interface{}
for !s.IsEmpty() {
s, n = s.Pop()
node := n.(treeWithHeight)
if isNil(node.Tree) {
continue
}
height = utils.Max(height, node.Height)
s = s.Push(treeWithHeight{
Tree: node.Tree.GetLeft(),
Height: node.Height + 1,
})
s = s.Push(treeWithHeight{
Tree: node.Tree.GetRight(),
Height: node.Height + 1,
})
}
return height
}
// DeepCopy iterates through a TreeLike structure and creates a deep copy of it.
// It delegates the actual copy of a node to the given function. This way different types
// of (binary) trees can be copied.
func DeepCopy(src TreeLike, createNode func(data interface{}, left, right TreeLike) TreeLike) TreeLike {
if isNil(src) {
return nil
}
left := DeepCopy(src.GetLeft(), createNode)
right := DeepCopy(src.GetRight(), createNode)
return createNode(src.GetData(), left, right)
} | data_structures/tree/utils.go | 0.684053 | 0.406037 | utils.go | starcoder |
package datastructures
import "fmt"
func NewBinarySearchTree() binarySearchTree {
return binarySearchTree{}
}
// Iterative Insert
func (bst *binarySearchTree) Insert(entry int) {
tNode := bst.root
var pNode *treeNode = nil
for tNode != nil {
pNode = tNode
if entry > tNode.entry {
tNode = tNode.rightNode
} else if entry < tNode.entry {
tNode = tNode.leftNode
}
}
newTNode := &treeNode{entry: entry}
if pNode == nil {
bst.root = newTNode
} else {
if entry < pNode.entry {
pNode.leftNode = newTNode
} else {
pNode.rightNode = newTNode
}
}
}
// Recursive Insert
func (bst *binarySearchTree) InsertR(entry int) {
bst.root = bst.insertR(entry, bst.root)
}
func (bst *binarySearchTree) insertR(entry int, ptreeNode *treeNode) *treeNode {
if ptreeNode == nil {
ptreeNode = &treeNode{entry: entry}
return ptreeNode
}
if entry > ptreeNode.entry {
ptreeNode.rightNode = bst.insertR(entry, ptreeNode.rightNode)
} else if entry < ptreeNode.entry {
ptreeNode.leftNode = bst.insertR(entry, ptreeNode.leftNode)
}
return ptreeNode
}
// Iterative Search
func (bst *binarySearchTree) Search(entry int) bool {
tNode := bst.root
var inTree bool = false
for tNode != nil && tNode.entry != entry {
if entry > tNode.entry {
tNode = tNode.rightNode
} else if entry < tNode.entry {
tNode = tNode.leftNode
}
}
if tNode != nil {
inTree = true
}
return inTree
}
// Recursive Search
func (bst *binarySearchTree) SearchR(entry int) bool {
return bst.searchR(entry, bst.root)
}
func (bst *binarySearchTree) searchR(entry int, ptreeNode *treeNode) bool {
// Base Case
if ptreeNode == nil {
return false
}
// Base Case
if ptreeNode.entry == entry {
return true
}
if entry > ptreeNode.entry {
return bst.searchR(entry, ptreeNode.rightNode)
} else {
return bst.searchR(entry, ptreeNode.leftNode)
}
}
// Iterative Remove
func (bst *binarySearchTree) Remove(entry int) {
bst.root = bst.remove(entry, bst.root)
}
func (bst *binarySearchTree) remove(entry int, ptreeNode *treeNode) *treeNode {
currNode := ptreeNode
var prevNode *treeNode = nil
for currNode != nil && currNode.entry != entry {
prevNode = currNode
if entry > currNode.entry {
currNode = currNode.rightNode
} else {
currNode = currNode.leftNode
}
}
if currNode == nil {
fmt.Println("\nEntry not in Tree")
return nil
}
newCurr := &treeNode{}
if currNode.rightNode == nil || currNode.leftNode == nil {
if currNode.rightNode == nil {
if currNode.leftNode == nil { // Node has 0 children
newCurr = nil
}
newCurr = currNode.leftNode // Node has one child, in its left
} else if currNode.leftNode == nil {
newCurr = currNode.rightNode
}
if prevNode == nil { // Node to be removed is in the root
return newCurr
}
if currNode == prevNode.leftNode {
prevNode.leftNode = newCurr
} else {
prevNode.rightNode = newCurr
}
} else { // Node has 2 children
var aux *treeNode = nil
temp := currNode.rightNode
for temp.leftNode != nil {
aux = temp
temp = temp.leftNode
}
if aux != nil {
aux.leftNode = temp.rightNode
} else {
currNode.rightNode = temp.rightNode
}
currNode.entry = temp.entry
}
return ptreeNode
}
// Recursive Remove
func (bst *binarySearchTree) RemoveR(entry int) {
bst.root = bst.removeR(entry, bst.root)
}
func (bst *binarySearchTree) removeR(entry int, ptreeNode *treeNode) *treeNode {
if ptreeNode == nil {
return ptreeNode
}
if entry > ptreeNode.entry {
ptreeNode.rightNode = bst.removeR(entry, ptreeNode.rightNode)
} else if entry < ptreeNode.entry {
ptreeNode.leftNode = bst.removeR(entry, ptreeNode.leftNode)
} else {
if ptreeNode.rightNode == nil {
if ptreeNode.leftNode == nil { // Node has 0 children
return nil
}
return ptreeNode.leftNode
} else if ptreeNode.leftNode == nil {
return ptreeNode.rightNode
}
// Node has 2 children
ptreeNode.entry = bst.min(ptreeNode.rightNode)
ptreeNode.rightNode = bst.removeR(ptreeNode.entry, ptreeNode.rightNode)
}
return ptreeNode
}
func (bst *binarySearchTree) Clear() {
bst.root = nil
}
func (bst *binarySearchTree) Empty() bool {
return bst.root == nil
}
func (bst *binarySearchTree) Size() int {
if bst.Empty() {
return 0
}
return bst.size(bst.root)
}
func (bst *binarySearchTree) size(ptreeNode *treeNode) int {
leftSize, rightSize := 0, 0
if ptreeNode.leftNode != nil {
leftSize = bst.size(ptreeNode.leftNode)
}
if ptreeNode.rightNode != nil {
rightSize = bst.size(ptreeNode.rightNode)
}
return 1 + leftSize + rightSize
}
func (bst *binarySearchTree) InOrder() {
if bst.Empty() {
fmt.Println("\nEmpty Tree")
return
}
fmt.Print("\nBinary Search Tree In Order : ")
inOrder(bst.root)
}
func inOrder(tNode *treeNode) {
if tNode == nil {
return
}
inOrder(tNode.leftNode)
fmt.Printf("%d ", tNode.entry)
inOrder(tNode.rightNode)
}
func (bst *binarySearchTree) min(ptreeNode *treeNode) int {
min := ptreeNode.entry
for ptreeNode.leftNode != nil {
min = ptreeNode.leftNode.entry
ptreeNode = ptreeNode.leftNode
}
return min
}
type treeNode struct {
entry int
leftNode *treeNode
rightNode *treeNode
}
type binarySearchTree struct {
root *treeNode
} | binarysearchtree.go | 0.650356 | 0.452113 | binarysearchtree.go | starcoder |
package clang
// #include "go-clang.h"
import "C"
import (
"fmt"
"reflect"
"unsafe"
)
/**
* \brief A semantic string that describes a code-completion result.
*
* A semantic string that describes the formatting of a code-completion
* result as a single "template" of text that should be inserted into the
* source buffer when a particular code-completion result is selected.
* Each semantic string is made up of some number of "chunks", each of which
* contains some text along with a description of what that text means, e.g.,
* the name of the entity being referenced, whether the text chunk is part of
* the template, or whether it is a "placeholder" that the user should replace
* with actual code,of a specific kind. See \c CXCompletionChunkKind for a
* description of the different kinds of chunks.
*/
type CompletionString struct {
c C.CXCompletionString
}
/**
* \brief Determine the priority of this code completion.
*
* The priority of a code completion indicates how likely it is that this
* particular completion is the completion that the user will select. The
* priority is selected by various internal heuristics.
*
* \param completion_string The completion string to query.
*
* \returns The priority of this completion string. Smaller values indicate
* higher-priority (more likely) completions.
*/
func (cs CompletionString) Priority() int {
return int(C.clang_getCompletionPriority(cs.c))
}
/**
* \brief Determine the availability of the entity that this code-completion
* string refers to.
*
* \param completion_string The completion string to query.
*
* \returns The availability of the completion string.
*/
func (cs CompletionString) Availability() AvailabilityKind {
return AvailabilityKind(C.clang_getCompletionAvailability(cs.c))
}
/**
* \brief Retrieve the number of annotations associated with the given
* completion string.
*
* \param completion_string the completion string to query.
*
* \returns the number of annotations associated with the given completion
* string.
*/
func (cs CompletionString) NumAnnotations() int {
return int(C.clang_getCompletionNumAnnotations(cs.c))
}
/**
* \brief Retrieve the annotation associated with the given completion string.
*
* \param completion_string the completion string to query.
*
* \param annotation_number the 0-based index of the annotation of the
* completion string.
*
* \returns annotation string associated with the completion at index
* \c annotation_number, or a NULL string if that annotation is not available.
*/
func (cs CompletionString) Annotation(i int) string {
cx := cxstring{C.clang_getCompletionAnnotation(cs.c, C.uint(i))}
defer cx.Dispose()
return cx.String()
}
/**
* \brief Retrieve the parent context of the given completion string.
*
* The parent context of a completion string is the semantic parent of
* the declaration (if any) that the code completion represents. For example,
* a code completion for an Objective-C method would have the method's class
* or protocol as its context.
*
* \param completion_string The code completion string whose parent is
* being queried.
*
* \param kind DEPRECATED: always set to CXCursor_NotImplemented if non-NULL.
*
* \returns The name of the completion parent, e.g., "NSObject" if
* the completion string represents a method in the NSObject class.
*/
func (cs CompletionString) CompletionParent() string {
o := cxstring{C.clang_getCompletionParent(cs.c, nil)}
defer o.Dispose()
return o.String()
}
/**
* \brief Retrieve the brief documentation comment attached to the declaration
* that corresponds to the given completion string.
*/
func (cs CompletionString) CompletionBriefComment() string {
o := cxstring{C.clang_getCompletionBriefComment(cs.c)}
defer o.Dispose()
return o.String()
}
/**
* \brief Retrieve the annotation associated with the given completion string.
*
* \param completion_string the completion string to query.
*
* \param annotation_number the 0-based index of the annotation of the
* completion string.
*
* \returns annotation string associated with the completion at index
* \c annotation_number, or a NULL string if that annotation is not available.
*/
func (cs CompletionString) Chunks() (ret []CompletionChunk) {
ret = make([]CompletionChunk, C.clang_getNumCompletionChunks(cs.c))
for i := range ret {
ret[i].cs = cs.c
ret[i].number = C.uint(i)
}
return
}
type CompletionChunk struct {
cs C.CXCompletionString
number C.uint
}
func (cc CompletionChunk) String() string {
return fmt.Sprintf("%s %s", cc.Kind(), cc.Text())
}
/**
* \brief Retrieve the text associated with a particular chunk within a
* completion string.
*
* \param completion_string the completion string to query.
*
* \param chunk_number the 0-based index of the chunk in the completion string.
*
* \returns the text associated with the chunk at index \c chunk_number.
*/
func (cc CompletionChunk) Text() string {
cx := cxstring{C.clang_getCompletionChunkText(cc.cs, cc.number)}
defer cx.Dispose()
return cx.String()
}
/**
* \brief Determine the kind of a particular chunk within a completion string.
*
* \param completion_string the completion string to query.
*
* \param chunk_number the 0-based index of the chunk in the completion string.
*
* \returns the kind of the chunk at the index \c chunk_number.
*/
func (cs CompletionChunk) Kind() CompletionChunkKind {
return CompletionChunkKind(C.clang_getCompletionChunkKind(cs.cs, cs.number))
}
/**
* \brief A single result of code completion.
*/
type CompletionResult struct {
/**
* \brief The kind of entity that this completion refers to.
*
* The cursor kind will be a macro, keyword, or a declaration (one of the
* *Decl cursor kinds), describing the entity that the completion is
* referring to.
*
* \todo In the future, we would like to provide a full cursor, to allow
* the client to extract additional information from declaration.
*/
CursorKind CursorKind
/**
* \brief The code-completion string that describes how to insert this
* code-completion result into the editing buffer.
*/
CompletionString CompletionString
}
/**
* \brief Describes a single piece of text within a code-completion string.
*
* Each "chunk" within a code-completion string (\c CXCompletionString) is
* either a piece of text with a specific "kind" that describes how that text
* should be interpreted by the client or is another completion string.
*/
type CompletionChunkKind int
const (
/**
* \brief A code-completion string that describes "optional" text that
* could be a part of the template (but is not required).
*
* The Optional chunk is the only kind of chunk that has a code-completion
* string for its representation, which is accessible via
* \c clang_getCompletionChunkCompletionString(). The code-completion string
* describes an additional part of the template that is completely optional.
* For example, optional chunks can be used to describe the placeholders for
* arguments that match up with defaulted function parameters, e.g. given:
*
* \code
* void f(int x, float y = 3.14, double z = 2.71828);
* \endcode
*
* The code-completion string for this function would contain:
* - a TypedText chunk for "f".
* - a LeftParen chunk for "(".
* - a Placeholder chunk for "int x"
* - an Optional chunk containing the remaining defaulted arguments, e.g.,
* - a Comma chunk for ","
* - a Placeholder chunk for "float y"
* - an Optional chunk containing the last defaulted argument:
* - a Comma chunk for ","
* - a Placeholder chunk for "double z"
* - a RightParen chunk for ")"
*
* There are many ways to handle Optional chunks. Two simple approaches are:
* - Completely ignore optional chunks, in which case the template for the
* function "f" would only include the first parameter ("int x").
* - Fully expand all optional chunks, in which case the template for the
* function "f" would have all of the parameters.
*/
CompletionChunk_Optional CompletionChunkKind = C.CXCompletionChunk_Optional
/**
* \brief Text that a user would be expected to type to get this
* code-completion result.
*
* There will be exactly one "typed text" chunk in a semantic string, which
* will typically provide the spelling of a keyword or the name of a
* declaration that could be used at the current code point. Clients are
* expected to filter the code-completion results based on the text in this
* chunk.
*/
CompletionChunk_TypedText CompletionChunkKind = C.CXCompletionChunk_TypedText
/**
* \brief Text that should be inserted as part of a code-completion result.
*
* A "text" chunk represents text that is part of the template to be
* inserted into user code should this particular code-completion result
* be selected.
*/
CompletionChunk_Text CompletionChunkKind = C.CXCompletionChunk_Text
/**
* \brief Placeholder text that should be replaced by the user.
*
* A "placeholder" chunk marks a place where the user should insert text
* into the code-completion template. For example, placeholders might mark
* the function parameters for a function declaration, to indicate that the
* user should provide arguments for each of those parameters. The actual
* text in a placeholder is a suggestion for the text to display before
* the user replaces the placeholder with real code.
*/
CompletionChunk_Placeholder CompletionChunkKind = C.CXCompletionChunk_Placeholder
/**
* \brief Informative text that should be displayed but never inserted as
* part of the template.
*
* An "informative" chunk contains annotations that can be displayed to
* help the user decide whether a particular code-completion result is the
* right option, but which is not part of the actual template to be inserted
* by code completion.
*/
CompletionChunk_Informative CompletionChunkKind = C.CXCompletionChunk_Informative
/**
* \brief Text that describes the current parameter when code-completion is
* referring to function call, message send, or template specialization.
*
* A "current parameter" chunk occurs when code-completion is providing
* information about a parameter corresponding to the argument at the
* code-completion point. For example, given a function
*
* \code
* int add(int x, int y);
* \endcode
*
* and the source code \c add(, where the code-completion point is after the
* "(", the code-completion string will contain a "current parameter" chunk
* for "int x", indicating that the current argument will initialize that
* parameter. After typing further, to \c add(17, (where the code-completion
* point is after the ","), the code-completion string will contain a
* "current paremeter" chunk to "int y".
*/
CompletionChunk_CurrentParameter CompletionChunkKind = C.CXCompletionChunk_CurrentParameter
/**
* \brief A left parenthesis ('('), used to initiate a function call or
* signal the beginning of a function parameter list.
*/
CompletionChunk_LeftParen CompletionChunkKind = C.CXCompletionChunk_LeftParen
/**
* \brief A right parenthesis (')'), used to finish a function call or
* signal the end of a function parameter list.
*/
CompletionChunk_RightParen CompletionChunkKind = C.CXCompletionChunk_RightParen
/**
* \brief A left bracket ('[').
*/
CompletionChunk_LeftBracket CompletionChunkKind = C.CXCompletionChunk_LeftBracket
/**
* \brief A right bracket (']').
*/
CompletionChunk_RightBracket CompletionChunkKind = C.CXCompletionChunk_RightBracket
/**
* \brief A left brace ('{').
*/
CompletionChunk_LeftBrace CompletionChunkKind = C.CXCompletionChunk_LeftBrace
/**
* \brief A right brace ('}').
*/
CompletionChunk_RightBrace CompletionChunkKind = C.CXCompletionChunk_RightBrace
/**
* \brief A left angle bracket ('<').
*/
CompletionChunk_LeftAngle CompletionChunkKind = C.CXCompletionChunk_LeftAngle
/**
* \brief A right angle bracket ('>').
*/
CompletionChunk_RightAngle CompletionChunkKind = C.CXCompletionChunk_RightAngle
/**
* \brief A comma separator (',').
*/
CompletionChunk_Comma CompletionChunkKind = C.CXCompletionChunk_Comma
/**
* \brief Text that specifies the result type of a given result.
*
* This special kind of informative chunk is not meant to be inserted into
* the text buffer. Rather, it is meant to illustrate the type that an
* expression using the given completion string would have.
*/
CompletionChunk_ResultType CompletionChunkKind = C.CXCompletionChunk_ResultType
/**
* \brief A colon (':').
*/
CompletionChunk_Colon CompletionChunkKind = C.CXCompletionChunk_Colon
/**
* \brief A semicolon (';').
*/
CompletionChunk_SemiColon CompletionChunkKind = C.CXCompletionChunk_SemiColon
/**
* \brief An '=' sign.
*/
CompletionChunk_Equal CompletionChunkKind = C.CXCompletionChunk_Equal
/**
* Horizontal space (' ').
*/
CompletionChunk_HorizontalSpace CompletionChunkKind = C.CXCompletionChunk_HorizontalSpace
/**
* Vertical space ('\n'), after which it is generally a good idea to
* perform indentation.
*/
CompletionChunk_VerticalSpace CompletionChunkKind = C.CXCompletionChunk_VerticalSpace
)
func (cck CompletionChunkKind) String() string {
switch cck {
case CompletionChunk_Optional:
return "Optional"
case CompletionChunk_TypedText:
return "TypedText"
case CompletionChunk_Text:
return "Text"
case CompletionChunk_Placeholder:
return "Placeholder"
case CompletionChunk_Informative:
return "Informative"
case CompletionChunk_CurrentParameter:
return "CurrentParameter"
case CompletionChunk_LeftParen:
return "LeftParen"
case CompletionChunk_RightParen:
return "RightParen"
case CompletionChunk_LeftBracket:
return "LeftBracket"
case CompletionChunk_RightBracket:
return "RightBracket"
case CompletionChunk_LeftBrace:
return "LeftBrace"
case CompletionChunk_RightBrace:
return "RightBrace"
case CompletionChunk_LeftAngle:
return "LeftAngle"
case CompletionChunk_RightAngle:
return "RightAngle"
case CompletionChunk_Comma:
return "Comma"
case CompletionChunk_ResultType:
return "ResultType"
case CompletionChunk_Colon:
return "Colon"
case CompletionChunk_SemiColon:
return "SemiColon"
case CompletionChunk_Equal:
return "Equal"
case CompletionChunk_HorizontalSpace:
return "HorizontalSpace"
case CompletionChunk_VerticalSpace:
return "VerticalSpace"
default:
return "Invalid"
}
}
/**
* \brief Contains the results of code-completion.
*
* This data structure contains the results of code completion, as
* produced by \c clang_codeCompleteAt(). Its contents must be freed by
* \c clang_disposeCodeCompleteResults.
*/
type CodeCompleteResults struct {
c *C.CXCodeCompleteResults
}
func (ccr CodeCompleteResults) IsValid() bool {
return ccr.c != nil
}
// TODO(): is there a better way to handle this?
func (ccr CodeCompleteResults) Results() (ret []CompletionResult) {
header := (*reflect.SliceHeader)((unsafe.Pointer(&ret)))
header.Cap = int(ccr.c.NumResults)
header.Len = int(ccr.c.NumResults)
header.Data = uintptr(unsafe.Pointer(ccr.c.Results))
return
}
/**
* \brief Sort the code-completion results in case-insensitive alphabetical
* order.
*
* \param Results The set of results to sort.
* \param NumResults The number of results in \p Results.
*/
func (ccr CodeCompleteResults) Sort() {
C.clang_sortCodeCompletionResults(ccr.c.Results, ccr.c.NumResults)
}
/**
* \brief Free the given set of code-completion results.
*/
func (ccr CodeCompleteResults) Dispose() {
C.clang_disposeCodeCompleteResults(ccr.c)
}
/**
* \brief Retrieve a diagnostic associated with the given code completion.
*
* \param Results the code completion results to query.
* \param Index the zero-based diagnostic number to retrieve.
*
* \returns the requested diagnostic. This diagnostic must be freed
* via a call to \c clang_disposeDiagnostic().
*/
func (ccr CodeCompleteResults) Diagnostics() (ret Diagnostics) {
ret = make(Diagnostics, C.clang_codeCompleteGetNumDiagnostics(ccr.c))
for i := range ret {
ret[i].c = C.clang_codeCompleteGetDiagnostic(ccr.c, C.uint(i))
}
return
}
/**
* \brief Flags that can be passed to \c clang_codeCompleteAt() to
* modify its behavior.
*
* The enumerators in this enumeration can be bitwise-OR'd together to
* provide multiple options to \c clang_codeCompleteAt().
*/
type CodeCompleteFlags int
const (
/**
* \brief Whether to include macros within the set of code
* completions returned.
*/
CodeCompleteFlags_IncludeMacros CodeCompleteFlags = C.CXCodeComplete_IncludeMacros
/**
* \brief Whether to include code patterns for language constructs
* within the set of code completions, e.g., for loops.
*/
CodeCompleteFlags_IncludeCodePatterns = C.CXCodeComplete_IncludeCodePatterns
/**
* \brief Whether to include brief documentation within the set of code
* completions returned.
*/
CodeCompleteFlags_IncludeBriefComments = C.CXCodeComplete_IncludeBriefComments
)
/**
* \brief Bits that represent the context under which completion is occurring.
*
* The enumerators in this enumeration may be bitwise-OR'd together if multiple
* contexts are occurring simultaneously.
*/
type CompletionContext int
const (
/**
* \brief The context for completions is unexposed, as only Clang results
* should be included. (This is equivalent to having no context bits set.)
*/
CompletionContext_Unexposed CompletionContext = C.CXCompletionContext_Unexposed
/**
* \brief Completions for any possible type should be included in the results.
*/
CompletionContext_AnyType CompletionContext = C.CXCompletionContext_AnyType
/**
* \brief Completions for any possible value (variables, function calls, etc.)
* should be included in the results.
*/
CompletionContext_AnyValue CompletionContext = C.CXCompletionContext_AnyValue
/**
* \brief Completions for values that resolve to an Objective-C object should
* be included in the results.
*/
CompletionContext_ObjCObjectValue CompletionContext = C.CXCompletionContext_ObjCObjectValue
/**
* \brief Completions for values that resolve to an Objective-C selector
* should be included in the results.
*/
CompletionContext_ObjCSelectorValue CompletionContext = C.CXCompletionContext_ObjCSelectorValue
/**
* \brief Completions for values that resolve to a C++ class type should be
* included in the results.
*/
CompletionContext_CXXClassTypeValue CompletionContext = C.CXCompletionContext_CXXClassTypeValue
/**
* \brief Completions for fields of the member being accessed using the dot
* operator should be included in the results.
*/
CompletionContext_DotMemberAccess CompletionContext = C.CXCompletionContext_DotMemberAccess
/**
* \brief Completions for fields of the member being accessed using the arrow
* operator should be included in the results.
*/
CompletionContext_ArrowMemberAccess CompletionContext = C.CXCompletionContext_ArrowMemberAccess
/**
* \brief Completions for properties of the Objective-C object being accessed
* using the dot operator should be included in the results.
*/
CompletionContext_ObjCPropertyAccess CompletionContext = C.CXCompletionContext_ObjCPropertyAccess
/**
* \brief Completions for enum tags should be included in the results.
*/
CompletionContext_EnumTag CompletionContext = C.CXCompletionContext_EnumTag
/**
* \brief Completions for union tags should be included in the results.
*/
CompletionContext_UnionTag CompletionContext = C.CXCompletionContext_UnionTag
/**
* \brief Completions for struct tags should be included in the results.
*/
CompletionContext_StructTag CompletionContext = C.CXCompletionContext_StructTag
/**
* \brief Completions for C++ class names should be included in the results.
*/
CompletionContext_ClassTag CompletionContext = C.CXCompletionContext_ClassTag
/**
* \brief Completions for C++ namespaces and namespace aliases should be
* included in the results.
*/
CompletionContext_Namespace CompletionContext = C.CXCompletionContext_Namespace
/**
* \brief Completions for C++ nested name specifiers should be included in
* the results.
*/
CompletionContext_NestedNameSpecifier CompletionContext = C.CXCompletionContext_NestedNameSpecifier
/**
* \brief Completions for Objective-C interfaces (classes) should be included
* in the results.
*/
CompletionContext_ObjCInterface CompletionContext = C.CXCompletionContext_ObjCInterface
/**
* \brief Completions for Objective-C protocols should be included in
* the results.
*/
CompletionContext_ObjCProtocol CompletionContext = C.CXCompletionContext_ObjCProtocol
/**
* \brief Completions for Objective-C categories should be included in
* the results.
*/
CompletionContext_ObjCCategory CompletionContext = C.CXCompletionContext_ObjCCategory
/**
* \brief Completions for Objective-C instance messages should be included
* in the results.
*/
CompletionContext_ObjCInstanceMessage CompletionContext = C.CXCompletionContext_ObjCInstanceMessage
/**
* \brief Completions for Objective-C class messages should be included in
* the results.
*/
CompletionContext_ObjCClassMessage CompletionContext = C.CXCompletionContext_ObjCClassMessage
/**
* \brief Completions for Objective-C selector names should be included in
* the results.
*/
CompletionContext_ObjCSelectorName CompletionContext = C.CXCompletionContext_ObjCSelectorName
/**
* \brief Completions for preprocessor macro names should be included in
* the results.
*/
CompletionContext_MacroName CompletionContext = C.CXCompletionContext_MacroName
/**
* \brief Natural language completions should be included in the results.
*/
CompletionContext_NaturalLanguage CompletionContext = C.CXCompletionContext_NaturalLanguage
/**
* \brief The current context is unknown, so set all contexts.
*/
CompletionContext_Unknown CompletionContext = C.CXCompletionContext_Unknown
) | complete.go | 0.810329 | 0.478224 | complete.go | starcoder |
package rule
import (
"errors"
"fmt"
"go/token"
"strconv"
"hash/fnv"
)
// An Expr is a logical expression that can be evaluated to a value.
type Expr interface {
Eval(Params) (*Value, error)
}
// A Params is a set of parameters passed on rule evaluation.
// It provides type safe methods to query params.
type Params interface {
GetString(key string) (string, error)
GetBool(key string) (bool, error)
GetInt64(key string) (int64, error)
GetFloat64(key string) (float64, error)
Keys() []string
EncodeValue(key string) (string, error)
}
type exprNot struct {
operator
}
// Not creates an expression that evaluates the given operand e and returns its opposite.
// e must evaluate to a boolean.
func Not(e Expr) Expr {
return &exprNot{
operator: operator{
kind: "not",
operands: []Expr{e},
},
}
}
func (n *exprNot) Eval(params Params) (*Value, error) {
if len(n.operands) < 1 {
return nil, errors.New("invalid number of operands in Not func")
}
op := n.operands[0]
v, err := op.Eval(params)
if err != nil {
return nil, err
}
if v.Type != "bool" {
return nil, errors.New("invalid operand type for Not func")
}
if v.Equal(BoolValue(true)) {
return BoolValue(false), nil
}
return BoolValue(true), nil
}
type exprOr struct {
operator
}
// Or creates an expression that takes at least two operands and evaluates to true if one of the operands evaluates to true.
// All the given operands must evaluate to a boolean.
func Or(v1, v2 Expr, vN ...Expr) Expr {
return &exprOr{
operator: operator{
kind: "or",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprOr) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in Or func")
}
opA := n.operands[0]
vA, err := opA.Eval(params)
if err != nil {
return nil, err
}
if vA.Type != "bool" {
return nil, errors.New("invalid operand type for Or func")
}
if vA.Equal(BoolValue(true)) {
return vA, nil
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
if vB.Type != "bool" {
return nil, errors.New("invalid operand type for Or func")
}
if vB.Equal(BoolValue(true)) {
return vB, nil
}
}
return BoolValue(false), nil
}
type exprAnd struct {
operator
}
// And creates an expression that takes at least two operands and evaluates to true if all the operands evaluate to true.
// All the given operands must evaluate to a boolean.
func And(v1, v2 Expr, vN ...Expr) Expr {
return &exprAnd{
operator: operator{
kind: "and",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprAnd) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in And func")
}
opA := n.operands[0]
vA, err := opA.Eval(params)
if err != nil {
return nil, err
}
if vA.Type != "bool" {
return nil, errors.New("invalid operand type for And func")
}
if vA.Equal(BoolValue(false)) {
return vA, nil
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
if vB.Type != "bool" {
return nil, errors.New("invalid operand type for And func")
}
if vB.Equal(BoolValue(false)) {
return vB, nil
}
}
return BoolValue(true), nil
}
type exprEq struct {
operator
}
// Eq creates an expression that takes at least two operands and evaluates to true if all the operands are equal.
func Eq(v1, v2 Expr, vN ...Expr) Expr {
return &exprEq{
operator: operator{
kind: "eq",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprEq) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in Eq func")
}
opA := n.operands[0]
vA, err := opA.Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
if !vA.Equal(vB) {
return BoolValue(false), nil
}
}
return BoolValue(true), nil
}
type exprIn struct {
operator
}
// In creates an expression that takes at least two operands and evaluates to true if the first one is equal to one of the others.
func In(v, e1 Expr, eN ...Expr) Expr {
return &exprIn{
operator: operator{
kind: "in",
operands: append([]Expr{v, e1}, eN...),
},
}
}
func (n *exprIn) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in In func")
}
toFind := n.operands[0]
vA, err := toFind.Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
if vA.Equal(vB) {
return BoolValue(true), nil
}
}
return BoolValue(false), nil
}
type exprGT struct {
operator
}
// GT creates an expression that takes at least two operands and
// evaluates to true if each successive operand has a higher value than
// the next.
func GT(v1, v2 Expr, vN ...Expr) Expr {
return &exprGT{
operator: operator{
kind: "gt",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprGT) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in GT func")
}
vA, err := n.operands[0].Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
res, err := vA.GT(vB)
if err != nil {
return nil, err
}
if !res {
return BoolValue(false), nil
}
}
return BoolValue(true), nil
}
type exprGTE struct {
operator
}
// GTE creates an expression that takes at least two operands and
// evaluates to true if each successive operand has a greater or equal value
// compared to the next.
func GTE(v1, v2 Expr, vN ...Expr) Expr {
return &exprGTE{
operator: operator{
kind: "gte",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprGTE) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in GTE func")
}
vA, err := n.operands[0].Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
res, err := vA.GTE(vB)
if err != nil {
return nil, err
}
if !res {
return BoolValue(false), nil
}
}
return BoolValue(true), nil
}
type exprLT struct {
operator
}
// LT creates an expression that takes at least two operands and
// evaluates to true if each successive operand has a lower value
// compared to the next.
func LT(v1, v2 Expr, vN ...Expr) Expr {
return &exprLT{
operator: operator{
kind: "lt",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprLT) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in LT func")
}
vA, err := n.operands[0].Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
res, err := vA.LT(vB)
if err != nil {
return nil, err
}
if !res {
return BoolValue(false), nil
}
}
return BoolValue(true), nil
}
type exprLTE struct {
operator
}
// LTE creates an expression that takes at least two operands and
// evaluates to true if each successive operand has a lower or equal value
// compared to the next.
func LTE(v1, v2 Expr, vN ...Expr) Expr {
return &exprLTE{
operator: operator{
kind: "lte",
operands: append([]Expr{v1, v2}, vN...),
},
}
}
func (n *exprLTE) Eval(params Params) (*Value, error) {
if len(n.operands) < 2 {
return nil, errors.New("invalid number of operands in LTE func")
}
vA, err := n.operands[0].Eval(params)
if err != nil {
return nil, err
}
for i := 1; i < len(n.operands); i++ {
vB, err := n.operands[i].Eval(params)
if err != nil {
return nil, err
}
res, err := vA.LTE(vB)
if err != nil {
return nil, err
}
if !res {
return BoolValue(false), nil
}
}
return BoolValue(true), nil
}
type exprFNV struct {
operator
}
// FNV returns an Integer hash of any value it is provided. It uses
// the Fowler-Noll-Vo non-cryptographic hash function.
func FNV(v Expr) Expr {
return &exprFNV{
operator: operator{
kind: "fnv",
operands: []Expr{v},
},
}
}
func (n *exprFNV) Eval(params Params) (*Value, error) {
if len(n.operands) != 1 {
return nil, errors.New("invalid number of operands in FNV func")
}
h32 := fnv.New32()
op := n.operands[0]
v, err := op.Eval(params)
if err != nil {
return nil, err
}
_, err = h32.Write([]byte(v.Data))
if err != nil {
return nil, err
}
return Int64Value(int64(h32.Sum32())), nil
}
type exprPercentile struct {
operator
}
// Percentile indicates whether the provided value is within a given
// percentile of the group of all such values. It is intended to be
// used to assign values to groups for experimentation.
func Percentile(v, p Expr) Expr {
return &exprPercentile{
operator: operator{
kind: "percentile",
operands: []Expr{v, p},
},
}
}
func (n *exprPercentile) Eval(params Params) (*Value, error) {
if len(n.operands) != 2 {
return nil, errors.New("invalid number of operands in Percentile func")
}
hash := FNV(n.operands[0])
v, err := exprToInt64(hash, params)
if err != nil {
return nil, err
}
p, err := exprToInt64(n.operands[1], params)
if err != nil {
return nil, err
}
if (v % 100) <= p {
return BoolValue(true), nil
}
return BoolValue(false), nil
}
// Param is an expression used to select a parameter passed during evaluation and return its corresponding value.
type Param struct {
Kind string `json:"kind"`
Type string `json:"type"`
Name string `json:"name"`
}
// StringParam creates a Param that looks up in the set of params passed during evaluation and returns the value
// of the variable that corresponds to the given name.
// The corresponding value must be a string. If not found it returns an error.
func StringParam(name string) *Param {
return &Param{
Kind: "param",
Type: "string",
Name: name,
}
}
// BoolParam creates a Param that looks up in the set of params passed during evaluation and returns the value
// of the variable that corresponds to the given name.
// The corresponding value must be a boolean. If not found it returns an error.
func BoolParam(name string) *Param {
return &Param{
Kind: "param",
Type: "bool",
Name: name,
}
}
// Int64Param creates a Param that looks up in the set of params passed during evaluation and returns the value
// of the variable that corresponds to the given name.
// The corresponding value must be an int64. If not found it returns an error.
func Int64Param(name string) *Param {
return &Param{
Kind: "param",
Type: "int64",
Name: name,
}
}
// Float64Param creates a Param that looks up in the set of params passed during evaluation and returns the value
// of the variable that corresponds to the given name.
// The corresponding value must be a float64. If not found it returns an error.
func Float64Param(name string) *Param {
return &Param{
Kind: "param",
Type: "float64",
Name: name,
}
}
// Eval extracts a value from the given parameters.
func (p *Param) Eval(params Params) (*Value, error) {
if params == nil {
return nil, errors.New("params is nil")
}
switch p.Type {
case "string":
v, err := params.GetString(p.Name)
if err != nil {
return nil, err
}
return StringValue(v), nil
case "bool":
v, err := params.GetBool(p.Name)
if err != nil {
return nil, err
}
return BoolValue(v), nil
case "int64":
v, err := params.GetInt64(p.Name)
if err != nil {
return nil, err
}
return Int64Value(v), nil
case "float64":
v, err := params.GetFloat64(p.Name)
if err != nil {
return nil, err
}
return Float64Value(v), nil
}
return nil, errors.New("unsupported param type")
}
// True creates an expression that always evaluates to true.
func True() Expr {
return BoolValue(true)
}
// A Value is the result of the evaluation of an expression.
type Value struct {
Kind string `json:"kind"`
Type string `json:"type"`
Data string `json:"data"`
}
func newValue(typ, data string) *Value {
return &Value{
Kind: "value",
Type: typ,
Data: data,
}
}
// BoolValue creates a bool type value.
func BoolValue(value bool) *Value {
return newValue("bool", strconv.FormatBool(value))
}
// StringValue creates a string type value.
func StringValue(value string) *Value {
return newValue("string", value)
}
// Int64Value creates an int64 type value.
func Int64Value(value int64) *Value {
return newValue("int64", strconv.FormatInt(value, 10))
}
// Float64Value creates a float64 type value.
func Float64Value(value float64) *Value {
return newValue("float64", strconv.FormatFloat(value, 'f', 6, 64))
}
// Eval evaluates the value to itself.
func (v *Value) Eval(Params) (*Value, error) {
return v, nil
}
func (v *Value) compare(op token.Token, other *Value) bool {
if op != token.EQL {
return false
}
return *v == *other
}
// Equal reports whether v and other represent the same value.
func (v *Value) Equal(other *Value) bool {
return v.compare(token.EQL, other)
}
// GT reports whether v is greater than other.
func (v *Value) GT(other *Value) (bool, error) {
switch v.Type {
case "bool":
v1, v2, err := parseBoolValues(v, other)
if err != nil {
return false, err
}
if !v1 {
// If v1 is False then it's not greater than v2, and we can be done already.
return false, nil
}
if v2 {
// If v2 is True then v1 can't be greater than it..
return false, nil
}
return true, nil
case "string":
if v.Data <= other.Data {
return false, nil
}
return true, nil
case "int64":
v1, v2, err := parseInt64Values(v, other)
if err != nil {
return false, err
}
if v1 <= v2 {
return false, nil
}
return true, nil
case "float64":
v1, v2, err := parseFloat64Values(v, other)
if err != nil {
return false, err
}
if v1 <= v2 {
return false, nil
}
return true, nil
}
return false, fmt.Errorf("unknown Value type: %s", v.Type)
}
// GTE reports whether v is greater or equal than other.
func (v *Value) GTE(other *Value) (bool, error) {
switch v.Type {
case "bool":
v1, v2, err := parseBoolValues(v, other)
if err != nil {
return false, err
}
if !v1 && v2 {
return false, nil
}
return true, nil
case "string":
if v.Data < other.Data {
return false, nil
}
return true, nil
case "int64":
v1, v2, err := parseInt64Values(v, other)
if err != nil {
return false, err
}
if v1 < v2 {
return false, nil
}
return true, nil
case "float64":
v1, v2, err := parseFloat64Values(v, other)
if err != nil {
return false, err
}
if v1 < v2 {
return false, nil
}
return true, nil
}
return false, fmt.Errorf("unknown Value type: %s", v.Type)
}
// LT reports whether v is less than other.
func (v *Value) LT(other *Value) (bool, error) {
switch v.Type {
case "bool":
v1, v2, err := parseBoolValues(v, other)
if err != nil {
return false, err
}
if v1 {
// If v1 is True then it's not less than v2, and we can be done already.
return false, nil
}
if !v2 {
// If v2 is False then v1 can't be less than it..
return false, nil
}
return true, nil
case "string":
if v.Data >= other.Data {
return false, nil
}
return true, nil
case "int64":
v1, v2, err := parseInt64Values(v, other)
if err != nil {
return false, err
}
if v1 >= v2 {
return false, nil
}
return true, nil
case "float64":
v1, v2, err := parseFloat64Values(v, other)
if err != nil {
return false, err
}
if v1 >= v2 {
return false, nil
}
return true, nil
}
return false, fmt.Errorf("unknown Value type: %s", v.Type)
}
// LTE reports whether v is less or equal than other.
func (v *Value) LTE(other *Value) (bool, error) {
switch v.Type {
case "bool":
v1, v2, err := parseBoolValues(v, other)
if err != nil {
return false, err
}
if v1 && !v2 {
return false, nil
}
return true, nil
case "string":
if v.Data > other.Data {
return false, nil
}
return true, nil
case "int64":
v1, v2, err := parseInt64Values(v, other)
if err != nil {
return false, nil
}
if v1 > v2 {
return false, nil
}
return true, nil
case "float64":
v1, v2, err := parseFloat64Values(v, other)
if err != nil {
return false, err
}
if v1 > v2 {
return false, nil
}
return true, nil
}
return false, fmt.Errorf("unknown Value type: %s", v.Type)
}
func parseBoolValues(v1, v2 *Value) (b1, b2 bool, err error) {
if b1, err = strconv.ParseBool(v1.Data); err != nil {
return
}
b2, err = strconv.ParseBool(v2.Data)
return
}
func parseInt64Values(v1, v2 *Value) (i1, i2 int64, err error) {
if i1, err = strconv.ParseInt(v1.Data, 10, 64); err != nil {
return
}
i2, err = strconv.ParseInt(v2.Data, 10, 64)
return
}
func parseFloat64Values(v1, v2 *Value) (f1, f2 float64, err error) {
if f1, err = strconv.ParseFloat(v1.Data, 64); err != nil {
return
}
f2, err = strconv.ParseFloat(v2.Data, 64)
return
}
type operander interface {
Operands() []Expr
}
func walk(expr Expr, fn func(Expr) error) error {
err := fn(expr)
if err != nil {
return err
}
if o, ok := expr.(operander); ok {
ops := o.Operands()
for _, op := range ops {
err := walk(op, fn)
if err != nil {
return err
}
}
}
return nil
}
// exprToInt64 returns the go-native int64 value of an expression
// evaluated with params.
func exprToInt64(e Expr, params Params) (int64, error) {
v, err := e.Eval(params)
if err != nil {
return 0, err
}
i, err := strconv.ParseInt(v.Data, 10, 64)
if err != nil {
return 0, err
}
return i, err
} | rule/expr.go | 0.740925 | 0.422922 | expr.go | starcoder |
package geom
import "math"
type Element = float32
type Vector2 struct {
X Element
Y Element
}
type Vector3 struct {
X Element
Y Element
Z Element
}
type Vector4 struct {
X Element
Y Element
Z Element
W Element
}
// column-major matrix
type Matrix4 [16]Element
func (v *Vector2) Add(v2 *Vector2) *Vector2 {
return &Vector2{X: v.X + v2.X, Y: v.Y + v2.Y}
}
func (v *Vector2) Sub(v2 *Vector2) *Vector2 {
return &Vector2{X: v.X - v2.X, Y: v.Y - v2.Y}
}
func (v *Vector2) Dot(v2 *Vector2) Element {
return v.X*v2.X + v.Y*v2.Y
}
func (v *Vector2) Cross(v2 *Vector2) Element {
return v.X*v2.Y - v.Y*v2.X
}
func (v *Vector2) Len() Element {
return Element(math.Sqrt(float64(v.X*v.X + v.Y*v.Y)))
}
func (v *Vector2) LenSqr() Element {
return v.X*v.X + v.Y*v.Y
}
func (v *Vector2) Normalize() *Vector2 {
l := v.Len()
if l > 0 {
v.X /= l
v.Y /= l
} else {
v.X = 1
}
return v
}
func NewVector3FromArray(arr [3]Element) *Vector3 {
return &Vector3{X: arr[0], Y: arr[1], Z: arr[2]}
}
func NewVector3FromSlice(arr []Element) *Vector3 {
return &Vector3{X: arr[0], Y: arr[1], Z: arr[2]}
}
func (v *Vector3) Add(v2 *Vector3) *Vector3 {
return &Vector3{X: v.X + v2.X, Y: v.Y + v2.Y, Z: v.Z + v2.Z}
}
func (v *Vector3) Sub(v2 *Vector3) *Vector3 {
return &Vector3{X: v.X - v2.X, Y: v.Y - v2.Y, Z: v.Z - v2.Z}
}
func (v *Vector3) Dot(v2 *Vector3) Element {
return v.X*v2.X + v.Y*v2.Y + v.Z*v2.Z
}
func (v *Vector3) Cross(v2 *Vector3) *Vector3 {
return &Vector3{
X: v.Y*v2.Z - v.Z*v2.Y,
Y: v.Z*v2.X - v.X*v2.Z,
Z: v.X*v2.Y - v.Y*v2.X,
}
}
func (v *Vector3) Scale(s Element) *Vector3 {
return &Vector3{X: v.X * s, Y: v.Y * s, Z: v.Z * s}
}
func (v *Vector3) Len() Element {
return Element(math.Sqrt(float64(v.X*v.X + v.Y*v.Y + v.Z*v.Z)))
}
func (v *Vector3) LenSqr() Element {
return v.X*v.X + v.Y*v.Y + v.Z*v.Z
}
func (v *Vector3) Normalize() *Vector3 {
l := v.Len()
if l > 0 {
v.X /= l
v.Y /= l
v.Z /= l
} else {
v.X = 1
}
return v
}
func (v *Vector3) ToArray(array []Element) {
array[0] = v.X
array[1] = v.Y
array[2] = v.Z
}
func (v *Vector4) Add(v2 *Vector4) *Vector4 {
return &Vector4{X: v.X + v2.X, Y: v.Y + v2.Y, Z: v.Z + v2.Z, W: v.W + v2.W}
}
func (v *Vector4) Sub(v2 *Vector4) *Vector4 {
return &Vector4{X: v.X - v2.X, Y: v.Y - v2.Y, Z: v.Z - v2.Z, W: v.W - v2.W}
}
func (v *Vector4) Dot(v2 *Vector4) Element {
return v.X*v2.X + v.Y*v2.Y + v.Z*v2.Z + v.W*v2.W
}
func (v *Vector4) Len() Element {
return Element(math.Sqrt(float64(v.X*v.X + v.Y*v.Y + v.Z*v.Z + v.W*v.W)))
}
func (v *Vector4) LenSqr() Element {
return v.X*v.X + v.Y*v.Y + v.Z*v.Z + v.W*v.W
}
func (v *Vector4) Normalize() *Vector4 {
l := v.Len()
if l > 0 {
v.X /= l
v.Y /= l
v.Z /= l
v.W /= l
} else {
v.W = 1
}
return v
}
func (v *Vector4) Inverse() *Vector4 {
return &Vector4{X: -v.X, Y: -v.Y, Z: -v.Z, W: v.W}
}
// Returns Hamilton product
func (a *Vector4) Mul(b *Vector4) *Vector4 {
return &Vector4{
W: a.W*b.W - a.X*b.X - a.Y*b.Y - a.Z*b.Z, // 1
X: a.W*b.X + a.X*b.W + a.Y*b.Z - a.Z*b.Y, // i
Y: a.W*b.Y - a.X*b.Z + a.Y*b.W + a.Z*b.X, // j
Z: a.W*b.Z + a.X*b.Y - a.Y*b.X + a.Z*b.W, // k
}
}
func (mat *Matrix4) ApplyTo(v *Vector3) *Vector3 {
return &Vector3{
mat[0]*v.X + mat[4]*v.Y + mat[8]*v.Z + mat[12],
mat[1]*v.X + mat[5]*v.Y + mat[9]*v.Z + mat[13],
mat[2]*v.X + mat[6]*v.Y + mat[10]*v.Z + mat[14],
}
}
func NewMatrix4() *Matrix4 {
return &Matrix4{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
}
}
func NewMatrix4FromSlice(a []Element) *Matrix4 {
mat := &Matrix4{}
copy(mat[:], a[:])
return mat
}
func NewScaleMatrix4(x, y, z Element) *Matrix4 {
return &Matrix4{
x, 0, 0, 0,
0, y, 0, 0,
0, 0, z, 0,
0, 0, 0, 1,
}
}
func NewTranslateMatrix4(x, y, z Element) *Matrix4 {
return &Matrix4{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
x, y, z, 1,
}
}
func NewRotationMatrix4FromQuaternion(q *Vector4) *Matrix4 {
var (
x = q.X
y = q.Y
z = q.Z
w = q.W
)
return &Matrix4{
1 - 2*y*y - 2*z*z, 2*x*y - 2*z*w, 2*x*z + 2*y*w, 0,
2*x*y + 2*z*w, 1 - 2*x*x - 2*z*z, 2*y*z - 2*x*w, 0,
2*x*z - 2*y*w, 2*y*z + 2*x*w, 1 - 2*x*x - 2*y*y, 0,
0, 0, 0, 1,
}
}
func NewEulerRotationMatrix4(x, y, z Element, rev int) *Matrix4 {
m := NewMatrix4()
cx := Element(math.Cos(float64(x)))
sx := Element(math.Sin(float64(x)))
cy := Element(math.Cos(float64(y)))
sy := Element(math.Sin(float64(y)))
cz := Element(math.Cos(float64(z)))
sz := Element(math.Sin(float64(z)))
if rev == 0 {
m[0] = cy * cz
m[4] = -cy * sz
m[8] = sy
m[1] = cx*sz + sx*cz*sy
m[5] = cx*cz - sx*sz*sy
m[9] = -sx * cy
m[2] = sx*sz - cx*cz*sy
m[6] = sx*cz + cx*sz*sy
m[10] = cx * cy
} else {
m[0] = cy * cz
m[4] = sx*cz*sy - cx*sz
m[8] = cx*cz*sy + sx*sz
m[1] = cy * sz
m[5] = sx*sz*sy + cx*cz
m[9] = cx*sz*sy - sx*cz
m[2] = -sy
m[6] = sx * cy
m[10] = cx * cy
}
return m
}
func (b *Matrix4) Mul(a *Matrix4) *Matrix4 {
r := &Matrix4{}
r[0] = a[0]*b[0] + a[1]*b[4] + a[2]*b[8] + a[3]*b[12]
r[1] = a[0]*b[1] + a[1]*b[5] + a[2]*b[9] + a[3]*b[13]
r[2] = a[0]*b[2] + a[1]*b[6] + a[2]*b[10] + a[3]*b[14]
r[3] = a[0]*b[3] + a[1]*b[7] + a[2]*b[11] + a[3]*b[15]
r[4] = a[4]*b[0] + a[5]*b[4] + a[6]*b[8] + a[7]*b[12]
r[5] = a[4]*b[1] + a[5]*b[5] + a[6]*b[9] + a[7]*b[13]
r[6] = a[4]*b[2] + a[5]*b[6] + a[6]*b[10] + a[7]*b[14]
r[7] = a[4]*b[3] + a[5]*b[7] + a[6]*b[11] + a[7]*b[15]
r[8] = a[8]*b[0] + a[9]*b[4] + a[10]*b[8] + a[11]*b[12]
r[9] = a[8]*b[1] + a[9]*b[5] + a[10]*b[9] + a[11]*b[13]
r[10] = a[8]*b[2] + a[9]*b[6] + a[10]*b[10] + a[11]*b[14]
r[11] = a[8]*b[3] + a[9]*b[7] + a[10]*b[11] + a[11]*b[15]
r[12] = a[12]*b[0] + a[13]*b[4] + a[14]*b[8] + a[15]*b[12]
r[13] = a[12]*b[1] + a[13]*b[5] + a[14]*b[9] + a[15]*b[13]
r[14] = a[12]*b[2] + a[13]*b[6] + a[14]*b[10] + a[15]*b[14]
r[15] = a[12]*b[3] + a[13]*b[7] + a[14]*b[11] + a[15]*b[15]
return r
}
func (m *Matrix4) Det() float32 {
var (
t11 = m[9]*m[14]*m[7] - m[13]*m[10]*m[7] + m[13]*m[6]*m[11] - m[5]*m[14]*m[11] - m[9]*m[6]*m[15] + m[5]*m[10]*m[15]
t12 = m[12]*m[10]*m[7] - m[8]*m[14]*m[7] - m[12]*m[6]*m[11] + m[4]*m[14]*m[11] + m[8]*m[6]*m[15] - m[4]*m[10]*m[15]
t13 = m[8]*m[13]*m[7] - m[12]*m[9]*m[7] + m[12]*m[5]*m[11] - m[4]*m[13]*m[11] - m[8]*m[5]*m[15] + m[4]*m[9]*m[15]
t14 = m[12]*m[9]*m[6] - m[8]*m[13]*m[6] - m[12]*m[5]*m[10] + m[4]*m[13]*m[10] + m[8]*m[5]*m[14] - m[4]*m[9]*m[14]
det = m[0]*t11 + m[1]*t12 + m[2]*t13 + m[3]*t14
)
return det
}
func (m *Matrix4) Inverse() *Matrix4 {
var (
t11 = m[9]*m[14]*m[7] - m[13]*m[10]*m[7] + m[13]*m[6]*m[11] - m[5]*m[14]*m[11] - m[9]*m[6]*m[15] + m[5]*m[10]*m[15]
t12 = m[12]*m[10]*m[7] - m[8]*m[14]*m[7] - m[12]*m[6]*m[11] + m[4]*m[14]*m[11] + m[8]*m[6]*m[15] - m[4]*m[10]*m[15]
t13 = m[8]*m[13]*m[7] - m[12]*m[9]*m[7] + m[12]*m[5]*m[11] - m[4]*m[13]*m[11] - m[8]*m[5]*m[15] + m[4]*m[9]*m[15]
t14 = m[12]*m[9]*m[6] - m[8]*m[13]*m[6] - m[12]*m[5]*m[10] + m[4]*m[13]*m[10] + m[8]*m[5]*m[14] - m[4]*m[9]*m[14]
det = m[0]*t11 + m[1]*t12 + m[2]*t13 + m[3]*t14
)
r := &Matrix4{}
if det == 0 {
return r
}
r[0] = t11 / det
r[1] = (m[13]*m[10]*m[3] - m[9]*m[14]*m[3] - m[13]*m[2]*m[11] + m[1]*m[14]*m[11] + m[9]*m[2]*m[15] - m[1]*m[10]*m[15]) / det
r[2] = (m[5]*m[14]*m[3] - m[13]*m[6]*m[3] + m[13]*m[2]*m[7] - m[1]*m[14]*m[7] - m[5]*m[2]*m[15] + m[1]*m[6]*m[15]) / det
r[3] = (m[9]*m[6]*m[3] - m[5]*m[10]*m[3] - m[9]*m[2]*m[7] + m[1]*m[10]*m[7] + m[5]*m[2]*m[11] - m[1]*m[6]*m[11]) / det
r[4] = t12 / det
r[5] = (m[8]*m[14]*m[3] - m[12]*m[10]*m[3] + m[12]*m[2]*m[11] - m[0]*m[14]*m[11] - m[8]*m[2]*m[15] + m[0]*m[10]*m[15]) / det
r[6] = (m[12]*m[6]*m[3] - m[4]*m[14]*m[3] - m[12]*m[2]*m[7] + m[0]*m[14]*m[7] + m[4]*m[2]*m[15] - m[0]*m[6]*m[15]) / det
r[7] = (m[4]*m[10]*m[3] - m[8]*m[6]*m[3] + m[8]*m[2]*m[7] - m[0]*m[10]*m[7] - m[4]*m[2]*m[11] + m[0]*m[6]*m[11]) / det
r[8] = t13 / det
r[9] = (m[12]*m[9]*m[3] - m[8]*m[13]*m[3] - m[12]*m[1]*m[11] + m[0]*m[13]*m[11] + m[8]*m[1]*m[15] - m[0]*m[9]*m[15]) / det
r[10] = (m[4]*m[13]*m[3] - m[12]*m[5]*m[3] + m[12]*m[1]*m[7] - m[0]*m[13]*m[7] - m[4]*m[1]*m[15] + m[0]*m[5]*m[15]) / det
r[11] = (m[8]*m[5]*m[3] - m[4]*m[9]*m[3] - m[8]*m[1]*m[7] + m[0]*m[9]*m[7] + m[4]*m[1]*m[11] - m[0]*m[5]*m[11]) / det
r[12] = t14 / det
r[13] = (m[8]*m[13]*m[2] - m[12]*m[9]*m[2] + m[12]*m[1]*m[10] - m[0]*m[13]*m[10] - m[8]*m[1]*m[14] + m[0]*m[9]*m[14]) / det
r[14] = (m[12]*m[5]*m[2] - m[4]*m[13]*m[2] - m[12]*m[1]*m[6] + m[0]*m[13]*m[6] + m[4]*m[1]*m[14] - m[0]*m[5]*m[14]) / det
r[15] = (m[4]*m[9]*m[2] - m[8]*m[5]*m[2] + m[8]*m[1]*m[6] - m[0]*m[9]*m[6] - m[4]*m[1]*m[10] + m[0]*m[5]*m[10]) / det
return r
}
func (m *Matrix4) Transposed() *Matrix4 {
return &Matrix4{
m[0], m[4], m[8], m[12],
m[1], m[5], m[9], m[13],
m[2], m[6], m[10], m[14],
m[3], m[7], m[11], m[15],
}
}
func (m *Matrix4) Clone() *Matrix4 {
r := *m
return &r
}
func (mat *Matrix4) ToArray(a []Element) {
copy(a, mat[:])
} | geom/geometry.go | 0.798462 | 0.679175 | geometry.go | starcoder |
package brotli
import "encoding/binary"
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function for fast encoding of an input fragment, independently from the input
history. This function uses two-pass processing: in the first pass we save
the found backward matches and literal bytes into a buffer, and in the
second pass we emit them into the bit stream using prefix codes built based
on the actual command and literal byte histograms. */
const kCompressFragmentTwoPassBlockSize uint = 1 << 17
func hash1(p []byte, shift uint, length uint) uint32 {
var h uint64 = (binary.LittleEndian.Uint64(p) << ((8 - length) * 8)) * uint64(kHashMul32)
return uint32(h >> shift)
}
func hashBytesAtOffset(v uint64, offset uint, shift uint, length uint) uint32 {
assert(offset <= 8-length)
{
var h uint64 = ((v >> (8 * offset)) << ((8 - length) * 8)) * uint64(kHashMul32)
return uint32(h >> shift)
}
}
func isMatch1(p1 []byte, p2 []byte, length uint) bool {
if binary.LittleEndian.Uint32(p1) != binary.LittleEndian.Uint32(p2) {
return false
}
if length == 4 {
return true
}
return p1[4] == p2[4] && p1[5] == p2[5]
}
/* Builds a command and distance prefix code (each 64 symbols) into "depth" and
"bits" based on "histogram" and stores it into the bit stream. */
func buildAndStoreCommandPrefixCode(histogram []uint32, depth []byte, bits []uint16, bw *bitWriter) {
var tree [129]huffmanTree
var cmd_depth = [numCommandSymbols]byte{0}
/* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */
var cmd_bits [64]uint16
createHuffmanTree(histogram, 64, 15, tree[:], depth)
createHuffmanTree(histogram[64:], 64, 14, tree[:], depth[64:])
/* We have to jump through a few hoops here in order to compute
the command bits because the symbols are in a different order than in
the full alphabet. This looks complicated, but having the symbols
in this order in the command bits saves a few branches in the Emit*
functions. */
copy(cmd_depth[:], depth[24:][:24])
copy(cmd_depth[24:][:], depth[:8])
copy(cmd_depth[32:][:], depth[48:][:8])
copy(cmd_depth[40:][:], depth[8:][:8])
copy(cmd_depth[48:][:], depth[56:][:8])
copy(cmd_depth[56:][:], depth[16:][:8])
convertBitDepthsToSymbols(cmd_depth[:], 64, cmd_bits[:])
copy(bits, cmd_bits[24:][:8])
copy(bits[8:], cmd_bits[40:][:8])
copy(bits[16:], cmd_bits[56:][:8])
copy(bits[24:], cmd_bits[:24])
copy(bits[48:], cmd_bits[32:][:8])
copy(bits[56:], cmd_bits[48:][:8])
convertBitDepthsToSymbols(depth[64:], 64, bits[64:])
{
/* Create the bit length array for the full command alphabet. */
var i uint
for i := 0; i < int(64); i++ {
cmd_depth[i] = 0
} /* only 64 first values were used */
copy(cmd_depth[:], depth[24:][:8])
copy(cmd_depth[64:][:], depth[32:][:8])
copy(cmd_depth[128:][:], depth[40:][:8])
copy(cmd_depth[192:][:], depth[48:][:8])
copy(cmd_depth[384:][:], depth[56:][:8])
for i = 0; i < 8; i++ {
cmd_depth[128+8*i] = depth[i]
cmd_depth[256+8*i] = depth[8+i]
cmd_depth[448+8*i] = depth[16+i]
}
storeHuffmanTree(cmd_depth[:], numCommandSymbols, tree[:], bw)
}
storeHuffmanTree(depth[64:], 64, tree[:], bw)
}
func emitInsertLen(insertlen uint32, commands *[]uint32) {
if insertlen < 6 {
(*commands)[0] = insertlen
} else if insertlen < 130 {
var tail uint32 = insertlen - 2
var nbits uint32 = log2FloorNonZero(uint(tail)) - 1
var prefix uint32 = tail >> nbits
var inscode uint32 = (nbits << 1) + prefix + 2
var extra uint32 = tail - (prefix << nbits)
(*commands)[0] = inscode | extra<<8
} else if insertlen < 2114 {
var tail uint32 = insertlen - 66
var nbits uint32 = log2FloorNonZero(uint(tail))
var code uint32 = nbits + 10
var extra uint32 = tail - (1 << nbits)
(*commands)[0] = code | extra<<8
} else if insertlen < 6210 {
var extra uint32 = insertlen - 2114
(*commands)[0] = 21 | extra<<8
} else if insertlen < 22594 {
var extra uint32 = insertlen - 6210
(*commands)[0] = 22 | extra<<8
} else {
var extra uint32 = insertlen - 22594
(*commands)[0] = 23 | extra<<8
}
*commands = (*commands)[1:]
}
func emitCopyLen(copylen uint, commands *[]uint32) {
if copylen < 10 {
(*commands)[0] = uint32(copylen + 38)
} else if copylen < 134 {
var tail uint = copylen - 6
var nbits uint = uint(log2FloorNonZero(tail) - 1)
var prefix uint = tail >> nbits
var code uint = (nbits << 1) + prefix + 44
var extra uint = tail - (prefix << nbits)
(*commands)[0] = uint32(code | extra<<8)
} else if copylen < 2118 {
var tail uint = copylen - 70
var nbits uint = uint(log2FloorNonZero(tail))
var code uint = nbits + 52
var extra uint = tail - (uint(1) << nbits)
(*commands)[0] = uint32(code | extra<<8)
} else {
var extra uint = copylen - 2118
(*commands)[0] = uint32(63 | extra<<8)
}
*commands = (*commands)[1:]
}
func emitCopyLenLastDistance(copylen uint, commands *[]uint32) {
if copylen < 12 {
(*commands)[0] = uint32(copylen + 20)
*commands = (*commands)[1:]
} else if copylen < 72 {
var tail uint = copylen - 8
var nbits uint = uint(log2FloorNonZero(tail) - 1)
var prefix uint = tail >> nbits
var code uint = (nbits << 1) + prefix + 28
var extra uint = tail - (prefix << nbits)
(*commands)[0] = uint32(code | extra<<8)
*commands = (*commands)[1:]
} else if copylen < 136 {
var tail uint = copylen - 8
var code uint = (tail >> 5) + 54
var extra uint = tail & 31
(*commands)[0] = uint32(code | extra<<8)
*commands = (*commands)[1:]
(*commands)[0] = 64
*commands = (*commands)[1:]
} else if copylen < 2120 {
var tail uint = copylen - 72
var nbits uint = uint(log2FloorNonZero(tail))
var code uint = nbits + 52
var extra uint = tail - (uint(1) << nbits)
(*commands)[0] = uint32(code | extra<<8)
*commands = (*commands)[1:]
(*commands)[0] = 64
*commands = (*commands)[1:]
} else {
var extra uint = copylen - 2120
(*commands)[0] = uint32(63 | extra<<8)
*commands = (*commands)[1:]
(*commands)[0] = 64
*commands = (*commands)[1:]
}
}
func emitDistance(distance uint32, commands *[]uint32) {
var d uint32 = distance + 3
var nbits uint32 = log2FloorNonZero(uint(d)) - 1
var prefix uint32 = (d >> nbits) & 1
var offset uint32 = (2 + prefix) << nbits
var distcode uint32 = 2*(nbits-1) + prefix + 80
var extra uint32 = d - offset
(*commands)[0] = distcode | extra<<8
*commands = (*commands)[1:]
}
/* REQUIRES: len <= 1 << 24. */
func storeMetaBlockHeader(len uint, is_uncompressed bool, bw *bitWriter) {
var nibbles uint = 6
/* ISLAST */
bw.writeBits(1, 0)
if len <= 1<<16 {
nibbles = 4
} else if len <= 1<<20 {
nibbles = 5
}
bw.writeBits(2, uint64(nibbles)-4)
bw.writeBits(nibbles*4, uint64(len)-1)
/* ISUNCOMPRESSED */
bw.writeSingleBit(is_uncompressed)
}
func createCommands(input []byte, block_size uint, input_size uint, base_ip_ptr []byte, table []int, table_bits uint, min_match uint, literals *[]byte, commands *[]uint32) {
var ip int = 0
var shift uint = 64 - table_bits
var ip_end int = int(block_size)
var base_ip int = -cap(base_ip_ptr) + cap(input)
var next_emit int = 0
var last_distance int = -1
/* "ip" is the input pointer. */
const kInputMarginBytes uint = windowGap
/* "next_emit" is a pointer to the first byte that is not covered by a
previous copy. Bytes between "next_emit" and the start of the next copy or
the end of the input will be emitted as literal bytes. */
if block_size >= kInputMarginBytes {
var len_limit uint = brotli_min_size_t(block_size-min_match, input_size-kInputMarginBytes)
var ip_limit int = int(len_limit)
/* For the last block, we need to keep a 16 bytes margin so that we can be
sure that all distances are at most window size - 16.
For all other blocks, we only need to keep a margin of 5 bytes so that
we don't go over the block size with a copy. */
var next_hash uint32
ip++
for next_hash = hash1(input[ip:], shift, min_match); ; {
var skip uint32 = 32
var next_ip int = ip
/* Step 1: Scan forward in the input looking for a 6-byte-long match.
If we get close to exhausting the input then goto emit_remainder.
Heuristic match skipping: If 32 bytes are scanned with no matches
found, start looking only at every other byte. If 32 more bytes are
scanned, look at every third byte, etc.. When a match is found,
immediately go back to looking at every byte. This is a small loss
(~5% performance, ~0.1% density) for compressible data due to more
bookkeeping, but for non-compressible data (such as JPEG) it's a huge
win since the compressor quickly "realizes" the data is incompressible
and doesn't bother looking for matches everywhere.
The "skip" variable keeps track of how many bytes there are since the
last match; dividing it by 32 (ie. right-shifting by five) gives the
number of bytes to move ahead for each iteration. */
var candidate int
assert(next_emit < ip)
trawl:
for {
var hash uint32 = next_hash
var bytes_between_hash_lookups uint32 = skip >> 5
skip++
ip = next_ip
assert(hash == hash1(input[ip:], shift, min_match))
next_ip = int(uint32(ip) + bytes_between_hash_lookups)
if next_ip > ip_limit {
goto emit_remainder
}
next_hash = hash1(input[next_ip:], shift, min_match)
candidate = ip - last_distance
if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
if candidate < ip {
table[hash] = int(ip - base_ip)
break
}
}
candidate = base_ip + table[hash]
assert(candidate >= base_ip)
assert(candidate < ip)
table[hash] = int(ip - base_ip)
if isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
break
}
}
/* Check copy distance. If candidate is not feasible, continue search.
Checking is done outside of hot loop to reduce overhead. */
if ip-candidate > maxDistance_compress_fragment {
goto trawl
}
/* Step 2: Emit the found match together with the literal bytes from
"next_emit", and then see if we can find a next match immediately
afterwards. Repeat until we find no match for the input
without emitting some literal bytes. */
{
var base int = ip
/* > 0 */
var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match)
var distance int = int(base - candidate)
/* We have a 6-byte match at ip, and we need to emit bytes in
[next_emit, ip). */
var insert int = int(base - next_emit)
ip += int(matched)
emitInsertLen(uint32(insert), commands)
copy(*literals, input[next_emit:][:uint(insert)])
*literals = (*literals)[insert:]
if distance == last_distance {
(*commands)[0] = 64
*commands = (*commands)[1:]
} else {
emitDistance(uint32(distance), commands)
last_distance = distance
}
emitCopyLenLastDistance(matched, commands)
next_emit = ip
if ip >= ip_limit {
goto emit_remainder
}
{
var input_bytes uint64
var cur_hash uint32
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some
positions within the last copy. */
var prev_hash uint32
if min_match == 4 {
input_bytes = binary.LittleEndian.Uint64(input[ip-3:])
cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match)
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 3)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 2)
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 1)
} else {
input_bytes = binary.LittleEndian.Uint64(input[ip-5:])
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 5)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 4)
prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
table[prev_hash] = int(ip - base_ip - 3)
input_bytes = binary.LittleEndian.Uint64(input[ip-2:])
cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 2)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 1)
}
candidate = base_ip + table[cur_hash]
table[cur_hash] = int(ip - base_ip)
}
}
for ip-candidate <= maxDistance_compress_fragment && isMatch1(input[ip:], base_ip_ptr[candidate-base_ip:], min_match) {
var base int = ip
/* We have a 6-byte match at ip, and no need to emit any
literal bytes prior to ip. */
var matched uint = min_match + findMatchLengthWithLimit(base_ip_ptr[uint(candidate-base_ip)+min_match:], input[uint(ip)+min_match:], uint(ip_end-ip)-min_match)
ip += int(matched)
last_distance = int(base - candidate) /* > 0 */
emitCopyLen(matched, commands)
emitDistance(uint32(last_distance), commands)
next_emit = ip
if ip >= ip_limit {
goto emit_remainder
}
{
var input_bytes uint64
var cur_hash uint32
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some
positions within the last copy. */
var prev_hash uint32
if min_match == 4 {
input_bytes = binary.LittleEndian.Uint64(input[ip-3:])
cur_hash = hashBytesAtOffset(input_bytes, 3, shift, min_match)
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 3)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 2)
prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
table[prev_hash] = int(ip - base_ip - 1)
} else {
input_bytes = binary.LittleEndian.Uint64(input[ip-5:])
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 5)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 4)
prev_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
table[prev_hash] = int(ip - base_ip - 3)
input_bytes = binary.LittleEndian.Uint64(input[ip-2:])
cur_hash = hashBytesAtOffset(input_bytes, 2, shift, min_match)
prev_hash = hashBytesAtOffset(input_bytes, 0, shift, min_match)
table[prev_hash] = int(ip - base_ip - 2)
prev_hash = hashBytesAtOffset(input_bytes, 1, shift, min_match)
table[prev_hash] = int(ip - base_ip - 1)
}
candidate = base_ip + table[cur_hash]
table[cur_hash] = int(ip - base_ip)
}
}
ip++
next_hash = hash1(input[ip:], shift, min_match)
}
}
emit_remainder:
assert(next_emit <= ip_end)
/* Emit the remaining bytes as literals. */
if next_emit < ip_end {
var insert uint32 = uint32(ip_end - next_emit)
emitInsertLen(insert, commands)
copy(*literals, input[next_emit:][:insert])
*literals = (*literals)[insert:]
}
}
var storeCommands_kNumExtraBits = [128]uint32{
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 24,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16,
17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
}
var storeCommands_kInsertOffset = [24]uint32{
0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578,
1090, 2114, 6210, 22594,
}
func storeCommands(literals []byte, num_literals uint, commands []uint32, num_commands uint, bw *bitWriter) {
var lit_depths [256]byte
var lit_bits [256]uint16
var lit_histo = [256]uint32{0}
var cmd_depths = [128]byte{0}
var cmd_bits = [128]uint16{0}
var cmd_histo = [128]uint32{0}
var i uint
for i = 0; i < num_literals; i++ {
lit_histo[literals[i]]++
}
buildAndStoreHuffmanTreeFast(lit_histo[:], num_literals, /* max_bits = */
8, lit_depths[:], lit_bits[:], bw)
for i = 0; i < num_commands; i++ {
var code uint32 = commands[i] & 0xFF
assert(code < 128)
cmd_histo[code]++
}
cmd_histo[1] += 1
cmd_histo[2] += 1
cmd_histo[64] += 1
cmd_histo[84] += 1
buildAndStoreCommandPrefixCode(cmd_histo[:], cmd_depths[:], cmd_bits[:], bw)
for i = 0; i < num_commands; i++ {
var cmd uint32 = commands[i]
var code uint32 = cmd & 0xFF
var extra uint32 = cmd >> 8
assert(code < 128)
bw.writeBits(uint(cmd_depths[code]), uint64(cmd_bits[code]))
bw.writeBits(uint(storeCommands_kNumExtraBits[code]), uint64(extra))
if code < 24 {
var insert uint32 = storeCommands_kInsertOffset[code] + extra
var j uint32
for j = 0; j < insert; j++ {
var lit byte = literals[0]
bw.writeBits(uint(lit_depths[lit]), uint64(lit_bits[lit]))
literals = literals[1:]
}
}
}
}
/* Acceptable loss for uncompressible speedup is 2% */
const minRatio = 0.98
const sampleRate = 43
func shouldCompress(input []byte, input_size uint, num_literals uint) bool {
var corpus_size float64 = float64(input_size)
if float64(num_literals) < minRatio*corpus_size {
return true
} else {
var literal_histo = [256]uint32{0}
var max_total_bit_cost float64 = corpus_size * 8 * minRatio / sampleRate
var i uint
for i = 0; i < input_size; i += sampleRate {
literal_histo[input[i]]++
}
return bitsEntropy(literal_histo[:], 256) < max_total_bit_cost
}
}
func emitUncompressedMetaBlock(input []byte, input_size uint, bw *bitWriter) {
storeMetaBlockHeader(input_size, true, bw)
bw.jumpToByteBoundary()
bw.writeBytes(input[:input_size])
}
func compressFragmentTwoPassImpl(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_bits uint, min_match uint, bw *bitWriter) {
/* Save the start of the first block for position and distance computations.
*/
var base_ip []byte = input
for input_size > 0 {
var block_size uint = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize)
var commands []uint32 = command_buf
var literals []byte = literal_buf
var num_literals uint
createCommands(input, block_size, input_size, base_ip, table, table_bits, min_match, &literals, &commands)
num_literals = uint(-cap(literals) + cap(literal_buf))
if shouldCompress(input, block_size, num_literals) {
var num_commands uint = uint(-cap(commands) + cap(command_buf))
storeMetaBlockHeader(block_size, false, bw)
/* No block splits, no contexts. */
bw.writeBits(13, 0)
storeCommands(literal_buf, num_literals, command_buf, num_commands, bw)
} else {
/* Since we did not find many backward references and the entropy of
the data is close to 8 bits, we can simply emit an uncompressed block.
This makes compression speed of uncompressible data about 3x faster. */
emitUncompressedMetaBlock(input, block_size, bw)
}
input = input[block_size:]
input_size -= block_size
}
}
/* Compresses "input" string to bw as one or more complete meta-blocks.
If "is_last" is 1, emits an additional empty last meta-block.
REQUIRES: "input_size" is greater than zero, or "is_last" is 1.
REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24).
REQUIRES: "command_buf" and "literal_buf" point to at least
kCompressFragmentTwoPassBlockSize long arrays.
REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero.
REQUIRES: "table_size" is a power of two
OUTPUT: maximal copy distance <= |input_size|
OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */
func compressFragmentTwoPass(input []byte, input_size uint, is_last bool, command_buf []uint32, literal_buf []byte, table []int, table_size uint, bw *bitWriter) {
var initial_storage_ix uint = bw.getPos()
var table_bits uint = uint(log2FloorNonZero(table_size))
var min_match uint
if table_bits <= 15 {
min_match = 4
} else {
min_match = 6
}
compressFragmentTwoPassImpl(input, input_size, is_last, command_buf, literal_buf, table, table_bits, min_match, bw)
/* If output is larger than single uncompressed block, rewrite it. */
if bw.getPos()-initial_storage_ix > 31+(input_size<<3) {
bw.rewind(initial_storage_ix)
emitUncompressedMetaBlock(input, input_size, bw)
}
if is_last {
bw.writeBits(1, 1) /* islast */
bw.writeBits(1, 1) /* isempty */
bw.jumpToByteBoundary()
}
} | vendor/github.com/andybalholm/brotli/compress_fragment_two_pass.go | 0.594787 | 0.442817 | compress_fragment_two_pass.go | starcoder |
package lingo
import (
"errors"
"fmt"
)
// Matrix represents a 2 dimensional matrix of float64.
type Matrix [][]float64
// Order returns the tensor order. For a matrix, this is always 2.
func (m Matrix) Order() int {
return 2
}
// Rows returns the number of rows the matrix contains.
func (m Matrix) Rows() int {
return len(m)
}
// Columns returns the number of columns the matrix contains.
func (m Matrix) Columns() int {
if len(m) == 0 {
return 0
}
return len(m[0])
}
// Value returns the value at a specific, zero-based position in the matrix.
func (m Matrix) Value(position ...int) (float64, error) {
if len(position) != 2 {
return 0, errors.New("matrix positions are 2 dimensional")
}
if len(m)-1 < position[0] {
return 0, errors.New("value does not exist")
}
if len(m[position[0]])-1 < position[1] {
return 0, errors.New("value does not exist")
}
return m[position[0]][position[1]], nil
}
// SetValue sets the value at a position in the matrix.
func (m Matrix) SetValue(value float64, position ...int) (Tensor, error) {
if len(position) != 2 {
return nil, errors.New("matrix positions are 2 dimensional")
}
if position[0] >= m.Rows() || position[1] > m.Columns() {
return nil, errors.New("position is not in Matrix")
}
m[position[0]][position[1]] = value
return m, nil
}
// String returns a string representation of the matrix.
func (m Matrix) String() string {
output := ""
for _, v := range m {
output += fmt.Sprintf("%v\n", v)
}
return output
}
// Reshape converts the matrix m into a new matrix with dimensions r,c.
func (m Matrix) Reshape(dims ...int) (Tensor, error) {
// check matching size
if len(dims) == 0 {
// scalar
if m.Rows() != 1 || m.Columns() != 1 {
return nil, errors.New("can only reshape a 1x1 matrix to a scalar")
}
}
if len(dims) == 1 {
// vector
if dims[0] != m.Rows()*m.Columns() {
return nil, errors.New("dimensions do not match")
}
}
if len(dims) == 2 {
// matrix
if dims[0]*dims[1] != m.Rows()*m.Columns() {
return nil, errors.New("dimensions do not match")
}
}
return nil, errors.New("more than 2 dimensions is not supported")
} | matrix.go | 0.870281 | 0.627438 | matrix.go | starcoder |
package gogm
import (
"constraints"
"fmt"
"math"
)
type number interface {
constraints.Integer | constraints.Float
}
// Vec2 is a vector with 2 components, of type T.
type Vec2[T number] [2]T
// Vec2CopyVec2 copies the content of src to dst.
func Vec2CopyVec2[T1, T2 number](dst *Vec2[T1], src *Vec2[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
}
// Vec2CopyVec3 copies the content of src to dst.
func Vec2CopyVec3[T1, T2 number](dst *Vec2[T1], src *Vec3[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
}
// Vec2CopyVec4 copies the content of src to dst.
func Vec2CopyVec4[T1, T2 number](dst *Vec2[T1], src *Vec4[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
}
// String returns a string representation of the vector.
func (v1 *Vec2[T]) String() string {
return fmt.Sprintf("{%v, %v}", v1[0], v1[1])
}
// Len returns the length of the vector.
func (v1 *Vec2[T]) Len() float64 {
return math.Hypot(float64(v1[0]), float64(v1[1]))
}
// Normalize normalizes v2, and stores the result in v1.
func (v1 *Vec2[T]) Normalize(v2 *Vec2[T]) {
l := T(v2.Len())
v1[0] = v2[0] / l
v1[1] = v2[1] / l
}
// Inverse sets v1 to the inverse of v2.
// v1 = -v2
func (v1 *Vec2[T]) Inverse(v2 *Vec2[T]) {
v1[0] = -v2[0]
v1[1] = -v2[1]
}
// Add adds v2 with v3 component-wise, and stores the result in v1.
// v1 = v2 + v3
func (v1 *Vec2[T]) Add(v2 *Vec2[T], v3 *Vec2[T]) {
v1[0] = v2[0] + v3[0]
v1[1] = v2[1] + v3[1]
}
// Sub subtracts v2 from v3 component-wise, and stores the result in v1.
// v1 = v2 - v3
func (v1 *Vec2[T]) Sub(v2 *Vec2[T], v3 *Vec2[T]) {
v1[0] = v2[0] - v3[0]
v1[1] = v2[1] - v3[1]
}
// Mul multiplies v2 with v3 component-wise, and stores the result in v1.
// v1 = v2 * v3
func (v1 *Vec2[T]) Mul(v2 *Vec2[T], v3 *Vec2[T]) {
v1[0] = v2[0] * v3[0]
v1[1] = v2[1] * v3[1]
}
// Div divides v2 by v3 component-wise, and stores the result in v1.
// v1 = v2 / v3
func (v1 *Vec2[T]) Div(v2 *Vec2[T], v3 *Vec2[T]) {
v1[0] = v2[0] / v3[0]
v1[1] = v2[1] / v3[1]
}
// AddS adds each component of v2 with s, and stores the result in v1.
// v1 = v2 + s
func (v1 *Vec2[T]) AddS(v2 *Vec2[T], s T) {
v1[0] = v2[0] + s
v1[1] = v2[1] + s
}
// SubS subtracts each component of v2 by s, and stores the result in v1.
// v1 = v2 - s
func (v1 *Vec2[T]) SubS(v2 *Vec2[T], s T) {
v1[0] = v2[0] - s
v1[1] = v2[1] - s
}
// MulS multiplies each component of v2 with s, and stores the result in v1.
// v1 = v2 * s
func (v1 *Vec2[T]) MulS(v2 *Vec2[T], s T) {
v1[0] = v2[0] * s
v1[1] = v2[1] * s
}
// DivS adds each component of v2 with s, and stores the result in v1.
// v1 = v2 / s
func (v1 *Vec2[T]) DivS(v2 *Vec2[T], s T) {
v1[0] = v2[0] / s
v1[1] = v2[1] / s
}
// Cross takes the cross product of v1 and v2, and returns the result.
func (v1 *Vec2[T]) Cross(v2 *Vec2[T]) T {
return v1[0]*v2[1] - v1[1]*v2[0]
}
// Dot takes the dot product of v1 and v2, and returns the result.
func (v1 *Vec2[T]) Dot(v2 *Vec2[T]) T {
return v1[0]*v2[0] + v1[1]*v2[1]
} | vec2.go | 0.801392 | 0.622517 | vec2.go | starcoder |
package segment
import (
"github.com/scionproto/scion/go/lib/addr"
)
// SrcDstPaths enumerates all possible end-to-end segments between a source and
// destination ISD-AS pair from a given set of segments. For constant-bounded
// segment length, the runtime complexity is linear in the number of
// enumeratable segments starting at the source ISD-AS.
func SrcDstPaths(segments []Segment, srcIA, dstIA addr.IA) []Segment {
maxSegLen := 3 // SCION-specific
buckets := createSegmentBuckets(segments)
seglists := recursiveSrcDstSeglists(maxSegLen, srcIA, dstIA, buckets)
flattened := flattenSeglists(seglists)
return flattened
}
func createSegmentBuckets(segments []Segment) map[addr.IA][]Segment {
buckets := make(map[addr.IA][]Segment, len(segments))
for _, segment := range segments {
srcIA, dstIA := segment.SrcIA(), segment.DstIA()
if srcIA == dstIA { // cyclic
continue
}
buckets[srcIA] = append(buckets[srcIA], segment)
}
return buckets
}
func recursiveSrcDstSeglists(maxlen int, srcIA, dstIA addr.IA, buckets map[addr.IA][]Segment) [][]Segment {
if srcIA == dstIA {
return [][]Segment{{}} // outer list contains one empty segment list
} else if maxlen <= 0 {
return [][]Segment{} // outer list is empty
}
srcToDstSeglists := make([][]Segment, 0)
for _, srcToMidSegment := range buckets[srcIA] {
midIA := srcToMidSegment.DstIA()
midToDstSeglists := recursiveSrcDstSeglists(maxlen-1, midIA, dstIA, buckets)
for _, midToDstSeglist := range midToDstSeglists {
cyclic := false
for _, seg := range midToDstSeglist {
if srcIA == seg.DstIA() {
cyclic = true
}
}
if !cyclic {
srcToDstSeglist := append([]Segment{srcToMidSegment}, midToDstSeglist...)
srcToDstSeglists = append(srcToDstSeglists, srcToDstSeglist)
}
}
}
return srcToDstSeglists
}
func flattenSeglists(seglists [][]Segment) []Segment {
segments := make([]Segment, 0)
for _, seglist := range seglists {
switch len(seglist) {
case 0: // Skip if segment list is empty
case 1:
segments = append(segments, seglist[0])
default:
segments = append(segments, FromSegments(seglist...))
}
}
return segments
} | segment/enumerate.go | 0.699665 | 0.420897 | enumerate.go | starcoder |
package utils
import (
"github.com/bnert/mfr"
)
// Contains returns true if the item is in the array.
func Contains[T comparable](array []T, item T) bool {
return mfr.Reduce[T, bool](array, false, func(ctx mfr.Ctx[T], acc bool) bool {
return acc || (ctx.Item == item)
})
}
// IsAll returns true if all the items in the array match the
// condition function.
func IsAll[T any](array []T, f func(T) bool) bool {
filtered := mfr.Filter(array, func(ctx mfr.Ctx[T]) bool {
return f(ctx.Item)
})
return len(array) == len(filtered)
}
// IsSome returns true if at least one item in the array matches the
// condition function.
func IsSome[T any](array []T, f func(T) bool) bool {
filtered := mfr.Filter(array, func(ctx mfr.Ctx[T]) bool {
return f(ctx.Item)
})
return len(filtered) > 0
}
// IsNone returns true if none of the items in the array match
// the condition function.
func IsNone[T any](array []T, f func(T) bool) bool {
filtered := mfr.Filter(array, func(ctx mfr.Ctx[T]) bool {
return f(ctx.Item)
})
return len(filtered) == 0
}
// Reverse returns the reverse of the original array.
func Reverse[T any](array []T) []T {
return mfr.Map[T, T](array, func(ctx mfr.Ctx[T]) T {
return ctx.Array[len(ctx.Array)-1-ctx.Index]
})
}
// Distinct returns all distinc items in an array
func Distinct[T comparable](array []T) []T {
if len(array) <= 0 {
return array
}
m := map[T]struct{}{}
return mfr.Filter(array, func(ctx mfr.Ctx[T]) bool {
if _, ok := m[ctx.Item]; !ok {
m[ctx.Item] = struct{}{}
return true
} else {
return false
}
})
}
// DistinctBy returns the distinct members of the fn.
func DistinctBy[T any, K comparable](array []T, f func(T) K) []T {
if len(array) <= 0 {
return array
}
m := map[K]struct{}{}
return mfr.Filter(array, func(ctx mfr.Ctx[T]) bool {
k := f(ctx.Item)
if _, ok := m[k]; !ok {
m[k] = struct{}{}
return true
} else {
return false
}
})
}
// IsDistinct returns true if all items in an array are distinct.
func IsDistinct[T comparable](array []T) bool {
if len(array) <= 0 {
return true
}
return len(Distinct(array)) == len(array)
} | utils/checks.go | 0.689515 | 0.57329 | checks.go | starcoder |
package activationfn
import (
"math"
"github.com/azuwey/gonetwork/matrix"
)
// ActivationFunction is an alias for the type of the activation functions
type ActivationFunction struct {
Name string
ActivationFn, DeactivationFn func(*matrix.Matrix) matrix.ApplyFn
}
func calculateApplySum(s []float64, aFn func(float64) float64) float64 {
sum := 0.0
for _, v := range s {
sum += aFn(v)
}
return sum
}
func calculateMax(s []float64) float64 {
max := math.Inf(-1)
for _, v := range s {
max = math.Max(max, v)
}
return max
}
// LogisticSigmoid ...
var logisticSigmoid *ActivationFunction = &ActivationFunction{
Name: "LogisticSigmoid",
ActivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
return 1 / (1 + math.Exp(-v))
}
},
DeactivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
v = (1 / (1 + math.Exp(-v)))
return v * (1 - v)
}
},
}
// TanH ...
var tanH *ActivationFunction = &ActivationFunction{
Name: "TanH",
ActivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
return math.Tanh(v)
}
},
DeactivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
return 1 - math.Pow(math.Tanh(v), 2)
}
},
}
// ReLU ...
var reLU *ActivationFunction = &ActivationFunction{
Name: "ReLU",
ActivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
return math.Max(0, v)
}
},
DeactivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
if v >= 0 {
return 1
} else {
return 0
}
}
},
}
// LeakyReLU ...
var leakyReLU *ActivationFunction = &ActivationFunction{
Name: "LeakyReLU",
ActivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
if v >= 0 {
return v
} else {
return 0.01 * v
}
}
},
DeactivationFn: func(_ *matrix.Matrix) matrix.ApplyFn {
return func(v float64, _ int, _ []float64) float64 {
if v >= 0 {
return 1
} else {
return 0.01
}
}
},
}
// Softmax ...
var softmax *ActivationFunction = &ActivationFunction{
Name: "Softmax",
ActivationFn: func(m *matrix.Matrix) matrix.ApplyFn {
sum := calculateApplySum(m.Values, func(v float64) float64 {
return math.Exp(v)
})
return func(v float64, _ int, _ []float64) float64 {
return math.Exp(v) / sum
}
},
DeactivationFn: func(m *matrix.Matrix) matrix.ApplyFn {
sum := calculateApplySum(m.Values, func(v float64) float64 {
return math.Exp(v)
})
var vF float64
return func(v float64, idx int, _ []float64) float64 {
v = math.Exp(v) / sum
if idx == 0 {
vF = v
p := v * (1 - v)
return p
} else {
return -vF * v
}
}
},
}
// StableSoftmax ...
var stableSoftmax *ActivationFunction = &ActivationFunction{
Name: "StableSoftmax",
ActivationFn: func(m *matrix.Matrix) matrix.ApplyFn {
max := calculateMax(m.Values)
sum := calculateApplySum(m.Values, func(v float64) float64 {
return math.Exp(v - max)
})
return func(v float64, _ int, _ []float64) float64 {
return math.Exp(v-max) / sum
}
},
DeactivationFn: func(m *matrix.Matrix) matrix.ApplyFn {
max := -calculateMax(m.Values)
sum := calculateApplySum(m.Values, func(v float64) float64 {
return math.Exp(v + max)
})
var vF float64
return func(v float64, idx int, _ []float64) float64 {
v = math.Exp(v+max) / sum
v = v / sum
if idx == 0 {
vF = v
p := v * (1 - v)
return p
} else {
return -vF * v
}
}
},
}
// ActivationFunctions ...
var ActivationFunctions = map[string]*ActivationFunction{
logisticSigmoid.Name: logisticSigmoid,
tanH.Name: tanH,
reLU.Name: reLU,
leakyReLU.Name: leakyReLU,
softmax.Name: softmax,
stableSoftmax.Name: stableSoftmax,
} | activationfn/activationfn.go | 0.759761 | 0.588682 | activationfn.go | starcoder |
package serialization
import (
i "io"
"time"
"github.com/google/uuid"
)
// SerializationWriter defines an interface for serialization of models to a byte array.
type SerializationWriter interface {
i.Closer
// WriteStringValue writes a String value to underlying the byte array.
WriteStringValue(key string, value *string) error
// WriteBoolValue writes a Bool value to underlying the byte array.
WriteBoolValue(key string, value *bool) error
// WriteInt32Value writes a Int32 value to underlying the byte array.
WriteInt32Value(key string, value *int32) error
// WriteInt64Value writes a Int64 value to underlying the byte array.
WriteInt64Value(key string, value *int64) error
// WriteFloat32Value writes a Float32 value to underlying the byte array.
WriteFloat32Value(key string, value *float32) error
// WriteFloat64Value writes a Float64 value to underlying the byte array.
WriteFloat64Value(key string, value *float64) error
// WriteByteArrayValue writes a ByteArray value to underlying the byte array.
WriteByteArrayValue(key string, value []byte) error
// WriteTimeValue writes a Time value to underlying the byte array.
WriteTimeValue(key string, value *time.Time) error
// WriteUUIDValue writes a UUID value to underlying the byte array.
WriteUUIDValue(key string, value *uuid.UUID) error
// WriteObjectValue writes a Parsable value to underlying the byte array.
WriteObjectValue(key string, item Parsable) error
// WriteCollectionOfObjectValues writes a collection of Parsable values to underlying the byte array.
WriteCollectionOfObjectValues(key string, collection []Parsable) error
// WriteCollectionOfStringValues writes a collection of String values to underlying the byte array.
WriteCollectionOfStringValues(key string, collection []string) error
// WriteCollectionOfBoolValues writes a collection of Bool values to underlying the byte array.
WriteCollectionOfBoolValues(key string, collection []bool) error
// WriteCollectionOfInt32Values writes a collection of Int32 values to underlying the byte array.
WriteCollectionOfInt32Values(key string, collection []int32) error
// WriteCollectionOfInt64Values writes a collection of Int64 values to underlying the byte array.
WriteCollectionOfInt64Values(key string, collection []int64) error
// WriteCollectionOfFloat32Values writes a collection of Float32 values to underlying the byte array.
WriteCollectionOfFloat32Values(key string, collection []float32) error
// WriteCollectionOfFloat64Values writes a collection of Float64 values to underlying the byte array.
WriteCollectionOfFloat64Values(key string, collection []float64) error
// WriteCollectionOfTimeValues writes a collection of Time values to underlying the byte array.
WriteCollectionOfTimeValues(key string, collection []time.Time) error
// WriteCollectionOfUUIDValues writes a collection of UUID values to underlying the byte array.
WriteCollectionOfUUIDValues(key string, collection []uuid.UUID) error
// GetSerializedContent returns the resulting byte array from the serialization writer.
GetSerializedContent() ([]byte, error)
// WriteAdditionalData writes additional data to underlying the byte array.
WriteAdditionalData(value map[string]interface{}) error
} | abstractions/go/serialization/serialization_writer.go | 0.607896 | 0.470797 | serialization_writer.go | starcoder |
package graph
//DenseGraph is a data structure representing a simple undirected labelled graph.
//DenseGraph stores the number of vertices, the number of edges, the degree sequence of the graph and stores the edges in a []byte array which has an indicator of an edge being present. The edges are in the order 01, 02, 12, 03, 13, 23... so the edge ij with i < j is in the (j*(j-1))/2 + i place.
//Adding or removing edges are quick operations. Adding a vertex may be quick if the backing array doesn't have to grow but may require copying the entire adjacency matrix. Removing a vertex is generally slow.
//*DenseGraph implements the Graph interface.
type DenseGraph struct {
NumberOfVertices int
NumberOfEdges int
DegreeSequence []int
Edges []byte
}
//NewDense returns a pointer to a DenseGraph representation of the graph with n vertices and the edges as given in edges.
//The edges are in the order 01, 02, 12, 03, 13, 23... so the edge ij with i < j is in the (j*(j-1))/2 + i place. The *DenseGraph uses its own copy of edges and modifications to edges won't change the current graph.
//*DenseGraph implements the Graph interface.
func NewDense(n int, edges []byte) *DenseGraph {
if edges == nil {
edges = make([]byte, (n*(n-1))/2)
return &DenseGraph{NumberOfVertices: n, NumberOfEdges: 0, DegreeSequence: make([]int, n), Edges: edges}
}
if len(edges) != (n*(n-1))/2 {
panic("Wrong number of edges")
}
degrees := make([]int, n)
m := 0
copyOfEdges := make([]byte, len(edges))
copy(copyOfEdges, edges)
index := 0
for j := 0; j < n; j++ {
for i := 0; i < j; i++ {
if edges[index] > 0 {
degrees[i]++
degrees[j]++
m++
}
index++
}
}
return &DenseGraph{NumberOfVertices: n, NumberOfEdges: m, DegreeSequence: degrees, Edges: edges}
}
//N returns the number of vertices in the graph.
func (g DenseGraph) N() int {
return g.NumberOfVertices
}
//M returns the number of vertices in the graph.
func (g DenseGraph) M() int {
return g.NumberOfEdges
}
//IsEdge returns true if the undirected edge (i, j) is present in the graph and false otherwise.
func (g DenseGraph) IsEdge(i, j int) bool {
if i >= g.NumberOfVertices || j >= g.NumberOfVertices || i < 0 || j < 0 {
return false
}
if i < j && g.Edges[(j*(j-1))/2+i] > 0 {
return true
} else if i > j && g.Edges[(i*(i-1))/2+j] > 0 {
return true
}
return false
}
//Neighbours returns the neighbours of v i.e. the vertices u such that (u,v) is an edge.
func (g DenseGraph) Neighbours(v int) []int {
degrees := g.DegreeSequence
r := make([]int, 0, degrees[v])
tmp := (v * (v - 1)) / 2
for i := 0; i < v; i++ {
index := tmp + i
if g.Edges[index] > 0 {
r = append(r, i)
}
}
for i := v + 1; i < g.N(); i++ {
index := (i*(i-1))/2 + v
if g.Edges[index] > 0 {
r = append(r, i)
}
}
return r
}
//Degrees returns the slice containing the degrees (number of edges incident with the vertex) of each vertex.
func (g DenseGraph) Degrees() []int {
tmpDegreeSequence := make([]int, len(g.DegreeSequence))
copy(tmpDegreeSequence, g.DegreeSequence)
return tmpDegreeSequence
}
//AddEdge modifies the graph by adding the edge (i, j) if it is not already present.
//If the edge is already present (or i == j), this does nothing.
func (g *DenseGraph) AddEdge(i, j int) {
if i == j || g.IsEdge(i, j) {
return
}
g.DegreeSequence[i]++
g.DegreeSequence[j]++
g.NumberOfEdges++
if i < j {
g.Edges[(j*(j-1))/2+i] = 1
} else if i > j {
g.Edges[(i*(i-1))/2+j] = 1
}
}
//RemoveEdge modifies the graph by removing the edge (i, j) if it is present.
//If the edge is not already present, this does nothing.
func (g *DenseGraph) RemoveEdge(i, j int) {
if !g.IsEdge(i, j) {
return
}
if i < j {
g.Edges[(j*(j-1))/2+i] = 0
} else if i > j {
g.Edges[(i*(i-1))/2+j] = 0
}
g.DegreeSequence[i]--
g.DegreeSequence[j]--
g.NumberOfEdges--
}
//AddVertex modifies the graph by appending one new vertex with edges from the new vertex to the vertices in neighbours.
func (g *DenseGraph) AddVertex(neighbours []int) {
oldSize := (g.NumberOfVertices * (g.NumberOfVertices - 1)) / 2
newSize := oldSize + g.NumberOfVertices
if cap(g.Edges) >= newSize {
g.Edges = g.Edges[:newSize]
for i := oldSize; i < newSize; i++ {
g.Edges[i] = 0
}
} else {
tmp := make([]byte, newSize)
copy(tmp, g.Edges)
g.Edges = tmp
}
for _, v := range neighbours {
g.Edges[oldSize+v] = 1
g.DegreeSequence[v]++
}
g.DegreeSequence = append(g.DegreeSequence, len(neighbours))
g.NumberOfVertices++
g.NumberOfEdges += len(neighbours)
}
//RemoveVertex modifies the graph by removing the speicified vertex. The index of a vertex u > v becomes u - 1 while the index of u < v is unchanged.
func (g *DenseGraph) RemoveVertex(v int) {
if v >= g.NumberOfVertices {
panic("No such vertex")
}
//Update the degree sequences and number of edges.
g.NumberOfEdges -= g.DegreeSequence[v]
tmp := (v * (v - 1)) / 2
for i := 0; i < v; i++ {
index := tmp + i
if g.Edges[index] > 0 {
g.DegreeSequence[i]--
}
}
for i := v + 1; i < g.N(); i++ {
index := (i*(i-1))/2 + v
if g.Edges[index] > 0 {
g.DegreeSequence[i]--
}
}
copy(g.DegreeSequence[v:], g.DegreeSequence[v+1:])
g.DegreeSequence = g.DegreeSequence[:len(g.DegreeSequence)-1]
//Update the backing array.
oldIndex := (v*(v+1))/2 - 1
newIndex := (v * (v - 1)) / 2
for j := v + 1; j < g.NumberOfVertices; j++ {
tmp := (j*(j-1))/2 + v
newIndex += copy(g.Edges[newIndex:], g.Edges[oldIndex+1:tmp])
oldIndex = tmp
}
copy(g.Edges[newIndex:], g.Edges[oldIndex+1:])
g.NumberOfVertices--
g.Edges = g.Edges[:(g.NumberOfVertices*(g.NumberOfVertices-1))/2]
}
//InducedSubgraph returns a deep copy of the induced subgraph of g with vertices given in order by V.
//This can also be used to return relabellings of the graph if len(V) = g.N().
func (g *DenseGraph) InducedSubgraph(V []int) EditableGraph {
n := len(V)
m := 0
degrees := make([]int, n)
edges := make([]byte, (n*(n-1))/2)
index := 0
for j := 1; j < len(V); j++ {
for i := 0; i < j; i++ {
if g.IsEdge(V[i], V[j]) {
edges[index] = 1
m++
degrees[i]++
degrees[j]++
}
index++
}
}
return &DenseGraph{NumberOfVertices: n, NumberOfEdges: m, DegreeSequence: degrees, Edges: edges}
}
//Copy returns a deep copy of the graph g.
func (g *DenseGraph) Copy() EditableGraph {
newEdges := make([]byte, len(g.Edges))
copy(newEdges, g.Edges)
newDegrees := make([]int, len(g.DegreeSequence))
copy(newDegrees, g.DegreeSequence)
return &DenseGraph{NumberOfVertices: g.NumberOfVertices, NumberOfEdges: g.NumberOfEdges, DegreeSequence: newDegrees, Edges: newEdges}
}
//Helper functions for implementing the required functions
//String returns a human readable representation of the graph.
// func (g DenseGraph) String() string {
// var buffer bytes.Buffer
// buffer.WriteString(fmt.Sprintf("Degree: %v \n", g.NumberOfVertices))
// for i := 0; i < g.NumberOfVertices; i++ {
// for j := 0; j < g.NumberOfVertices; j++ {
// if j < i {
// buffer.WriteString(" ")
// } else if j == i {
// buffer.WriteString("0 ")
// } else {
// buffer.WriteString(fmt.Sprintf("%v ", g.Edges[(j*(j-1))/2+i]))
// }
// }
// buffer.WriteString("\n")
// }
// return buffer.String()
// } | graph/graph_dense.go | 0.805211 | 0.875628 | graph_dense.go | starcoder |
package history
import (
"sort"
)
type targetsList []uint64
func (t targetsList) InsertSorted(version uint64) targetsList {
if len(t) == 0 {
t = append(t, version)
return t
}
index := sort.Search(len(t), func(i int) bool {
return t[i] > version
})
if index > 0 && t[index-1] == version {
return t
}
t = append(t, version)
copy(t[index+1:], t[index:])
t[index] = version
return t
}
func (t targetsList) Split(version uint64) (left, right targetsList) {
// the smallest index i where t[i] >= version
index := sort.Search(len(t), func(i int) bool {
return t[i] >= version
})
return t[:index], t[index:]
}
func pruneToFindConsistent(index, version uint64) operation {
var traverse func(pos *position, targets targetsList, shortcut bool) operation
traverse = func(pos *position, targets targetsList, shortcut bool) operation {
if len(targets) == 0 {
if !shortcut {
return newCollectOp(newGetCacheOp(pos))
}
return newGetCacheOp(pos)
}
if pos.IsLeaf() {
if pos.Index == index {
return newLeafHashOp(pos, nil)
}
if !shortcut {
return newCollectOp(newGetCacheOp(pos))
}
return newGetCacheOp(pos)
}
if len(targets) == 1 && targets[0] != index {
if !shortcut {
return newCollectOp(traverse(pos, targets, true))
}
}
rightPos := pos.Right()
leftTargets, rightTargets := targets.Split(rightPos.Index)
left := traverse(pos.Left(), leftTargets, shortcut)
if version < rightPos.Index {
return newPartialInnerHashOp(pos, left)
}
right := traverse(rightPos, rightTargets, shortcut)
return newInnerHashOp(pos, left, right)
}
targets := make(targetsList, 0)
targets = targets.InsertSorted(index)
targets = targets.InsertSorted(version)
return traverse(newRootPosition(version), targets, false)
}
func pruneToCheckConsistency(start, end uint64) operation {
var traverse func(pos *position, targets targetsList) operation
traverse = func(pos *position, targets targetsList) operation {
if len(targets) == 0 {
return newCollectOp(newGetCacheOp(pos))
}
if pos.IsLeaf() {
return newCollectOp(newGetCacheOp(pos))
}
rightPos := pos.Right()
leftTargets, rightTargets := targets.Split(rightPos.Index)
left := traverse(pos.Left(), leftTargets)
if end < rightPos.Index {
return newPartialInnerHashOp(pos, left)
}
right := traverse(rightPos, rightTargets)
return newInnerHashOp(pos, left, right)
}
targets := make(targetsList, 0)
targets = targets.InsertSorted(start)
targets = targets.InsertSorted(end)
return traverse(newRootPosition(end), targets)
} | balloon/history/consistency.go | 0.660282 | 0.500854 | consistency.go | starcoder |
package pairings
import (
"fmt"
"math/big"
)
//BN256G2CURVE structure
type BN256G2CURVE struct {
FieldModulus *big.Int
TWISTBX *big.Int
TWISTBY *big.Int
PTXX uint
PTXY uint
PTYX uint
PTYY uint
PTZX uint
PTZY uint
}
// Init Initialized the required curve
func Init() *BN256G2CURVE {
FieldModulus, TWISTBX, TWISTBY := big.NewInt(0), big.NewInt(0), big.NewInt(0)
FieldModulus.SetString("30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47", 16)
TWISTBX.SetString("2b149d40ceb8aaae81be18991be06ac3b5b4c5e559dbefa33267e6dc24a138e5", 16)
TWISTBY.SetString("9713b03af0fed4cd2cafadeed8fdf4a74fa084e52d1852e4a2bd0685c315d2", 16)
return &BN256G2CURVE{
FieldModulus: FieldModulus,
TWISTBX: TWISTBX,
TWISTBY: TWISTBY,
PTXX: 0,
PTXY: 1,
PTYX: 2,
PTYY: 3,
PTZX: 4,
PTZY: 5,
}
}
// ECTwistAdd Addition of twisted points in EC
func (crv *BN256G2CURVE) ECTwistAdd(pt1xx *big.Int, pt1xy *big.Int,
pt1yx *big.Int, pt1yy *big.Int,
pt2xx *big.Int, pt2xy *big.Int,
pt2yx *big.Int, pt2yy *big.Int) (*big.Int, *big.Int, *big.Int, *big.Int, error) {
zero := big.NewInt(0)
one := big.NewInt(1)
if pt1xx == zero && pt1xy == zero && pt2yx == zero && pt2yy == zero {
if !(pt2xx == zero && pt2xy == zero && pt2yx == zero && pt2yy == zero) {
// Assert if point in curve
if !crv.isOnCurve(pt2xx, pt2xy, pt2yx, pt2yy) {
return nil, nil, nil, nil, fmt.Errorf("The point is not in the curve")
}
}
return pt2xx, pt2xy, pt2yx, pt2yy, nil
} else if pt2xx == zero && pt2xy == zero && pt2yx == zero && pt2yy == zero {
// Assert if point in curve
if !crv.isOnCurve(pt1xx, pt1xy, pt1yx, pt1yy) {
return nil, nil, nil, nil, fmt.Errorf("The point is not in the curve")
}
return pt1xx, pt1xy, pt1yx, pt1yy, nil
}
if !crv.isOnCurve(pt2xx, pt2xy, pt2yx, pt2yy) {
return nil, nil, nil, nil, fmt.Errorf("The point is not in the curve")
}
if !crv.isOnCurve(pt1xx, pt1xy, pt1yx, pt1yy) {
return nil, nil, nil, nil, fmt.Errorf("The point is not in the curve")
}
pt3 := crv.ecTwistAddJacobian(pt1xx, pt1xy, pt1yx, pt1yy, big.NewInt(1), big.NewInt(0), pt2xx, pt2xy, pt2yx, pt2yy, one, zero)
j1, j2, j3, j4 := crv.fromJacobian(pt3[crv.PTXX], pt3[crv.PTXY], pt3[crv.PTYX], pt3[crv.PTYY], pt3[crv.PTZX], pt3[crv.PTZY])
return j1, j2, j3, j4, nil
}
// isOnCurve verifies if points in the curve
func (crv *BN256G2CURVE) isOnCurve(xx *big.Int, xy *big.Int, yy *big.Int, yx *big.Int) bool {
var yyx, yyy, xxxx, xxxy *big.Int
yyx, yyy = crv.fq2mul(yx, yy, yx, yy)
xxxx, xxxy = crv.fq2mul(xx, xy, xx, xy)
xxxx, xxxy = crv.fq2mul(xxxx, xxxy, xx, xy)
yyx, yyy = crv.fq2sub(yyx, yyy, xxxx, xxxy)
yyx, yyy = crv.fq2sub(yyx, yyy, crv.TWISTBX, crv.TWISTBY)
return yyx == big.NewInt(0) && yyy == big.NewInt(0)
}
func (crv *BN256G2CURVE) fq2mul(xx *big.Int, xy *big.Int, yx *big.Int, yy *big.Int) (out1 *big.Int, out2 *big.Int) {
out1 = submod(mulmod(xx, yx, crv.FieldModulus), mulmod(xy, yy, crv.FieldModulus), crv.FieldModulus)
out2 = submod(mulmod(xx, yy, crv.FieldModulus), mulmod(xy, yx, crv.FieldModulus), crv.FieldModulus)
return
}
func (crv *BN256G2CURVE) fq2sub(xx *big.Int, xy *big.Int, yx *big.Int, yy *big.Int) (rx *big.Int, ry *big.Int) {
ry = submod(xx, yx, crv.FieldModulus)
rx = submod(xy, yy, crv.FieldModulus)
return
}
func submod(a *big.Int, b *big.Int, n *big.Int) *big.Int {
return addmod(a, big.NewInt(0).Sub(n, b), n)
}
func addmod(x *big.Int, y *big.Int, k *big.Int) (out *big.Int) {
out.Add(x, y)
return big.NewInt(0).Mod(out, k)
}
func mulmod(x *big.Int, y *big.Int, k *big.Int) (out *big.Int) {
out.Mul(x, y)
return big.NewInt(0).Mod(out, k)
}
func (crv *BN256G2CURVE) fq2muc(xx *big.Int, xy *big.Int, c *big.Int) (*big.Int, *big.Int) {
return mulmod(xx, c, crv.FieldModulus), mulmod(xy, c, crv.FieldModulus)
}
func (crv *BN256G2CURVE) ecTwistDoubleJacobian(pt1xx *big.Int, pt1xy *big.Int,
pt1yx *big.Int, pt1yy *big.Int,
pt1zx *big.Int, pt1zy *big.Int) (pt2xx, pt2xy,
pt2yx *big.Int, pt2yy *big.Int,
pt2zx *big.Int, pt2zy *big.Int) {
pt2xx, pt2xy = crv.fq2muc(pt1xx, pt1xy, big.NewInt(3)) // 3 * x
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt1xx, pt1xy) // W = 3 * x * x
pt1zx, pt1zy = crv.fq2mul(pt1yx, pt1yy, pt1zx, pt1zy) // S = y * z
pt2yx, pt2yy = crv.fq2mul(pt1xx, pt1xy, pt1yx, pt1yy) // x * y
pt2yx, pt2yy = crv.fq2mul(pt2yx, pt2yy, pt1zx, pt1zy) // B = x * y * S
pt1xx, pt1xy = crv.fq2mul(pt2xx, pt2xy, pt2xx, pt2xy) // W * W
pt2zx, pt2zy = crv.fq2muc(pt2yx, pt2yy, big.NewInt(8)) // 8 * B
pt1xx, pt1xy = crv.fq2sub(pt1xx, pt1xy, pt2zx, pt2zy) // H = W * W - 8 * B
pt2zx, pt2zy = crv.fq2mul(pt1zx, pt1zy, pt1zx, pt1zy) // S_squared = S * S
pt2yx, pt2yy = crv.fq2muc(pt2yx, pt2yy, big.NewInt(4)) // 4 * B
pt2yx, pt2yy = crv.fq2sub(pt2yx, pt2yy, pt1xx, pt1xy) // 4 * B - H
pt2yx, pt2yy = crv.fq2mul(pt2yx, pt2yy, pt2xx, pt2xy) // W * (4 * B - H)
pt2xx, pt2xy = crv.fq2muc(pt1yx, pt1yy, big.NewInt(8)) // 8 * y
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt1yx, pt1yy) // 8 * y * y
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt2zx, pt2zy) // 8 * y * y * S_squared
pt2yx, pt2yy = crv.fq2sub(pt2yx, pt2yy, pt2xx, pt2xy) // newy = W * (4 * B - H) - 8 * y * y * S_squared
pt2xx, pt2xy = crv.fq2muc(pt1xx, pt1xy, big.NewInt(2)) // 2 * H
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt1zx, pt1zy) // newx = 2 * H * S
pt2zx, pt2zy = crv.fq2mul(pt1zx, pt1zy, pt2zx, pt2zy) // S * S_squared
pt2zx, pt2zy = crv.fq2muc(pt2zx, pt2zy, big.NewInt(8)) // newz = 8 * S * S_squared
return
}
func (crv *BN256G2CURVE) ecTwistAddJacobian(pt1xx *big.Int, pt1xy *big.Int,
pt1yx *big.Int, pt1yy *big.Int,
pt1zx *big.Int, pt1zy *big.Int,
pt2xx *big.Int, pt2xy *big.Int,
pt2yx *big.Int, pt2yy *big.Int,
pt2zx *big.Int, pt2zy *big.Int) (pt3 [6]*big.Int) {
zero := big.NewInt(0)
one := big.NewInt(1)
if pt1zx == zero && pt1zy == zero {
pt3[crv.PTXX], pt3[crv.PTXY], pt3[crv.PTYX], pt3[crv.PTYY], pt3[crv.PTZX], pt3[crv.PTZY] = pt2xx, pt2xy, pt2yx, pt2yy, pt2zx, pt2zy
return
} else if pt2zx == zero && pt2zy == zero {
pt3[crv.PTXX], pt3[crv.PTXY], pt3[crv.PTYX], pt3[crv.PTYY], pt3[crv.PTZX], pt3[crv.PTZY] = pt1xx, pt1xy, pt1yx, pt1yy, pt1zx, pt1zy
return
}
pt2yx, pt2yy = crv.fq2mul(pt2yx, pt2yy, pt1zx, pt1zy) // U1 = y2 * z1
pt3[crv.PTYX], pt3[crv.PTYY] = crv.fq2mul(pt1yx, pt1yy, pt2zx, pt2zy) // U2 = y1 * z2
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt1zx, pt1zy) // V1 = x2 * z1
pt3[crv.PTZX], pt3[crv.PTZY] = crv.fq2mul(pt1xx, pt1xy, pt2zx, pt2zy) // V2 = x1 * z2
if pt2xx == pt3[crv.PTZX] && pt2xy == pt3[crv.PTZY] {
if pt2yx == pt3[crv.PTYX] && pt2yy == pt3[crv.PTYY] {
pt3[crv.PTXX], pt3[crv.PTXY], pt3[crv.PTYX], pt3[crv.PTYY], pt3[crv.PTZX], pt3[crv.PTZY] = crv.ecTwistDoubleJacobian(pt1xx, pt1xy, pt1yx, pt1yy, pt1zx, pt1zy)
return
}
pt3[crv.PTXX], pt3[crv.PTXY], pt3[crv.PTYX], pt3[crv.PTYY], pt3[crv.PTZX], pt3[crv.PTZY] = one, zero, one, zero, zero, zero
return
}
pt2zx, pt2zy = crv.fq2mul(pt1zx, pt1zy, pt2zx, pt2zy) // W = z1 * z2
pt1xx, pt1xy = crv.fq2sub(pt2yx, pt2yy, pt3[crv.PTYX], pt3[crv.PTYY]) // U = U1 - U2
pt1yx, pt1yy = crv.fq2sub(pt2xx, pt2xy, pt3[crv.PTZX], pt3[crv.PTZY]) // V = V1 - V2
pt1zx, pt1zy = crv.fq2mul(pt1yx, pt1yy, pt1yx, pt1yy) // V_squared = V * V
pt2yx, pt2yy = crv.fq2mul(pt1zx, pt1zy, pt3[crv.PTZX], pt3[crv.PTZY]) // V_squared_times_V2 = V_squared * V2
pt1zx, pt1zy = crv.fq2mul(pt1zx, pt1zy, pt1yx, pt1yy) // V_cubed = V * V_squared
pt3[crv.PTZX], pt3[crv.PTZY] = crv.fq2mul(pt1zx, pt1zy, pt2zx, pt2zy) // newz = V_cubed * W
pt2xx, pt2xy = crv.fq2mul(pt1xx, pt1xy, pt1xx, pt1xy) // U * U
pt2xx, pt2xy = crv.fq2mul(pt2xx, pt2xy, pt2zx, pt2zy) // U * U * W
pt2xx, pt2xy = crv.fq2sub(pt2xx, pt2xy, pt1zx, pt1zy) // U * U * W - V_cubed
pt2zx, pt2zy = crv.fq2muc(pt2yx, pt2yy, big.NewInt(2)) // 2 * V_squared_times_V2
pt2xx, pt2xy = crv.fq2sub(pt2xx, pt2xy, pt2zx, pt2zy) // A = U * U * W - V_cubed - 2 * V_squared_times_V2
pt3[crv.PTXX], pt3[crv.PTXY] = crv.fq2mul(pt1yx, pt1yy, pt2xx, pt2xy) // newx = V * A
pt1yx, pt1yy = crv.fq2sub(pt2yx, pt2yy, pt2xx, pt2xy) // V_squared_times_V2 - A
pt1yx, pt1yy = crv.fq2mul(pt1xx, pt1xy, pt1yx, pt1yy) // U * (V_squared_times_V2 - A)
pt1xx, pt1xy = crv.fq2mul(pt1zx, pt1zy, pt3[crv.PTYX], pt3[crv.PTYY]) // V_cubed * U2
pt3[crv.PTYX], pt3[crv.PTYY] = crv.fq2sub(pt1yx, pt1yy, pt1xx, pt1xy) // newy = U * (V_squared_times_V2 - A) - V_cubed * U2
return
}
func (crv *BN256G2CURVE) fq2inv(x *big.Int, y *big.Int) (*big.Int, *big.Int) {
inv := modInv(addmod(mulmod(y, y, crv.FieldModulus), mulmod(x, x, crv.FieldModulus), crv.FieldModulus), crv.FieldModulus)
return mulmod(x, inv, crv.FieldModulus), big.NewInt(0).Sub(crv.FieldModulus, mulmod(y, inv, crv.FieldModulus))
}
func modInv(a *big.Int, n *big.Int) (t *big.Int) {
t = big.NewInt(0)
newT := big.NewInt(1)
r := n
newR := a
q := big.NewInt(0)
for newR != big.NewInt(0) {
q.Div(r, newR)
t, newT = newT, submod(t, mulmod(q, newT, n), n)
tmp1 := big.NewInt(0).Mul(q, newR)
r, newR = newR, big.NewInt(0).Sub(r, tmp1)
}
return
}
func (crv *BN256G2CURVE) fromJacobian(
pt1xx *big.Int, pt1xy *big.Int,
pt1yx *big.Int, pt1yy *big.Int,
pt1zx *big.Int, pt1zy *big.Int) (pt2xx *big.Int, pt2xy *big.Int,
pt2yx *big.Int, pt2yy *big.Int) {
invzx, invzy := big.NewInt(0), big.NewInt(0)
invzx, invzy = crv.fq2inv(pt1zx, pt1zy)
pt2xx, pt2xy = crv.fq2mul(pt1xx, pt1xy, invzx, invzy)
pt2yx, pt2yy = crv.fq2mul(pt1yx, pt1yy, invzx, invzy)
return
} | utils/pairings/bn256g.go | 0.719384 | 0.453141 | bn256g.go | starcoder |
package data
import (
"fmt"
"github.com/axiom-org/axiom/util"
)
// Money is measured in "microaxioms".
// A million microaxioms = Ax$1.
// The target is for Ax$1 to be worth roughly $1 in USD.
// The price of decentralized storage is currently pegged at $3 per gigabyte per month.
// The goal is 1/3 goes to the file host, 1/3 to the app developer, 1/3 to the protocol
// developers. So that means that if you host a single kilobyte for a month you make
// one microaxiom.
// Currently TotalMoney is approximately the size of hosting 333G of files on the initial
// seed servers. Once payment is in place, the total amount of money can float.
const TotalMoney = 1e9
const CostPerMegabyteMonth = 3000
type Account struct {
Owner string `json:"owner"`
// The sequence id of the last operation authorized by this account.
// 0 means there have never been any authorized operations.
// Used to prevent replay attacks.
Sequence uint32 `json:"sequence"`
// The current balance of this account.
Balance uint64 `json:"balance"`
// How much total bucket size, in megabytes, this account is currently storing.
Storage uint32 `json:"storage"`
}
// For debugging
func StringifyAccount(a *Account) string {
if a == nil {
return "nil"
}
return fmt.Sprintf("%s:s%d:b%d", util.Shorten(a.Owner), a.Sequence, a.Balance)
}
func (a *Account) CheckEqual(other *Account) error {
if a == nil && other == nil {
return nil
}
if a == nil || other == nil {
return fmt.Errorf("a != other. a is %+v, other is %+v", a, other)
}
if a.Owner != other.Owner {
return fmt.Errorf("owner %s != owner %s", a.Owner, other.Owner)
}
if a.Sequence != other.Sequence {
return fmt.Errorf("data mismatch for owner %s: seq %d != seq %d",
a.Owner, a.Sequence, other.Sequence)
}
if a.Balance != other.Balance {
return fmt.Errorf("data mismatch for owner %s: balance %d != balance %d",
a.Owner, a.Balance, other.Balance)
}
return nil
}
func (a *Account) Bytes() []byte {
return []byte(fmt.Sprintf("%s:%d:%d", a.Owner, a.Sequence, a.Balance))
}
func (a *Account) ValidateSendOperation(op *SendOperation) bool {
cost := op.Amount + op.Fee
return cost <= a.Balance
}
func (a *Account) CanAddStorage(amount uint32) bool {
possible := a.Storage + amount
return CostPerMegabyteMonth*uint64(possible) <= a.Balance
}
func (a *Account) Copy() *Account {
copy := *a
return ©
} | data/account.go | 0.666605 | 0.467393 | account.go | starcoder |
package util
import (
"errors"
"image"
"image/color"
)
// Matrix2Image 矩阵转成 image.Image
func Matrix2Image(imgMatrix [][][]uint8) (image.Image, error) {
height := len(imgMatrix)
width := len(imgMatrix[0])
if height == 0 || width == 0 {
return nil, errors.New("the input of matrix is illegal")
}
nrgba := image.NewNRGBA(image.Rect(0, 0, width, height))
for i := 0; i < height; i++ {
for j := 0; j < width; j++ {
nrgba.SetNRGBA(j, i, color.NRGBA{R: imgMatrix[i][j][0], G: imgMatrix[i][j][1], B: imgMatrix[i][j][2], A: imgMatrix[i][j][3]})
}
}
return nrgba, nil
}
func Image2Matrix(origin image.Image) [][][]uint8 {
// 转换颜色空间
origin = convertToNRGBA(origin)
width := origin.Bounds().Max.X // width
height := origin.Bounds().Max.Y // height
imgMatrix := NewRGBAMatrix(height, width)
for i := 0; i < height; i++ {
for j := 0; j < width; j++ {
c := origin.At(j, i)
r, g, b, a := c.RGBA()
imgMatrix[i][j][0] = uint8(r)
imgMatrix[i][j][1] = uint8(g)
imgMatrix[i][j][2] = uint8(b)
imgMatrix[i][j][3] = uint8(a)
}
}
return imgMatrix
}
func NewRGBAMatrix(x int, y int) [][][]uint8 {
return new3DSlice(x, y, 4)
}
func new3DSlice(x int, y int, z int) (theSlice [][][]uint8) {
theSlice = make([][][]uint8, x, x)
for i := 0; i < x; i++ {
s2 := make([][]uint8, y, y)
for j := 0; j < y; j++ {
s3 := make([]uint8, z, z)
s2[j] = s3
}
theSlice[i] = s2
}
return
}
// convert image to NRGBA
func convertToNRGBA(origin image.Image) *image.NRGBA {
srcBounds := origin.Bounds()
dstBounds := srcBounds.Sub(srcBounds.Min)
dst := image.NewNRGBA(dstBounds)
dstMinX := dstBounds.Min.X
dstMinY := dstBounds.Min.Y
srcMinX := srcBounds.Min.X
srcMinY := srcBounds.Min.Y
srcMaxX := srcBounds.Max.X
srcMaxY := srcBounds.Max.Y
switch src0 := origin.(type) {
case *image.NRGBA:
rowSize := srcBounds.Dx() * 4
numRows := srcBounds.Dy()
i0 := dst.PixOffset(dstMinX, dstMinY)
j0 := src0.PixOffset(srcMinX, srcMinY)
di := dst.Stride
dj := src0.Stride
for row := 0; row < numRows; row++ {
copy(dst.Pix[i0:i0+rowSize], src0.Pix[j0:j0+rowSize])
i0 += di
j0 += dj
}
case *image.NRGBA64:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+2]
dst.Pix[i+2] = src0.Pix[j+4]
dst.Pix[i+3] = src0.Pix[j+6]
}
}
case *image.RGBA:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
a := src0.Pix[j+3]
dst.Pix[i+3] = a
switch a {
case 0:
dst.Pix[i+0] = 0
dst.Pix[i+1] = 0
dst.Pix[i+2] = 0
case 0xff:
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+1]
dst.Pix[i+2] = src0.Pix[j+2]
default:
dst.Pix[i+0] = uint8(uint16(src0.Pix[j+0]) * 0xff / uint16(a))
dst.Pix[i+1] = uint8(uint16(src0.Pix[j+1]) * 0xff / uint16(a))
dst.Pix[i+2] = uint8(uint16(src0.Pix[j+2]) * 0xff / uint16(a))
}
}
}
case *image.RGBA64:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
a := src0.Pix[j+6]
dst.Pix[i+3] = a
switch a {
case 0:
dst.Pix[i+0] = 0
dst.Pix[i+1] = 0
dst.Pix[i+2] = 0
case 0xff:
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+2]
dst.Pix[i+2] = src0.Pix[j+4]
default:
dst.Pix[i+0] = uint8(uint16(src0.Pix[j+0]) * 0xff / uint16(a))
dst.Pix[i+1] = uint8(uint16(src0.Pix[j+2]) * 0xff / uint16(a))
dst.Pix[i+2] = uint8(uint16(src0.Pix[j+4]) * 0xff / uint16(a))
}
}
}
case *image.Gray:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
c := src0.Pix[j]
dst.Pix[i+0] = c
dst.Pix[i+1] = c
dst.Pix[i+2] = c
dst.Pix[i+3] = 0xff
}
}
case *image.Gray16:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
c := src0.Pix[j]
dst.Pix[i+0] = c
dst.Pix[i+1] = c
dst.Pix[i+2] = c
dst.Pix[i+3] = 0xff
}
}
case *image.YCbCr:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
yj := src0.YOffset(x, y)
cj := src0.COffset(x, y)
r, g, b := color.YCbCrToRGB(src0.Y[yj], src0.Cb[cj], src0.Cr[cj])
dst.Pix[i+0] = r
dst.Pix[i+1] = g
dst.Pix[i+2] = b
dst.Pix[i+3] = 0xff
}
}
default:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
c := color.NRGBAModel.Convert(origin.At(x, y)).(color.NRGBA)
dst.Pix[i+0] = c.R
dst.Pix[i+1] = c.G
dst.Pix[i+2] = c.B
dst.Pix[i+3] = c.A
}
}
}
return dst
} | image/util/matrix.go | 0.50952 | 0.410284 | matrix.go | starcoder |
package remotewrite
import (
"strings"
"unicode"
)
type table struct {
First *unicode.RangeTable
Rest *unicode.RangeTable
}
var metricNameTable = table{
First: &unicode.RangeTable{
R16: []unicode.Range16{
{0x003A, 0x003A, 1}, // :
{0x0041, 0x005A, 1}, // A-Z
{0x005F, 0x005F, 1}, // _
{0x0061, 0x007A, 1}, // a-z
},
LatinOffset: 4,
},
Rest: &unicode.RangeTable{
R16: []unicode.Range16{
{0x0030, 0x003A, 1}, // 0-:
{0x0041, 0x005A, 1}, // A-Z
{0x005F, 0x005F, 1}, // _
{0x0061, 0x007A, 1}, // a-z
},
LatinOffset: 4,
},
}
var labelNameTable = table{
First: &unicode.RangeTable{
R16: []unicode.Range16{
{0x0041, 0x005A, 1}, // A-Z
{0x005F, 0x005F, 1}, // _
{0x0061, 0x007A, 1}, // a-z
},
LatinOffset: 3,
},
Rest: &unicode.RangeTable{
R16: []unicode.Range16{
{0x0030, 0x0039, 1}, // 0-9
{0x0041, 0x005A, 1}, // A-Z
{0x005F, 0x005F, 1}, // _
{0x0061, 0x007A, 1}, // a-z
},
LatinOffset: 4,
},
}
func isValid(name string, table table) bool {
if name == "" {
return false
}
for i, r := range name {
switch {
case i == 0:
if !unicode.In(r, table.First) {
return false
}
default:
if !unicode.In(r, table.Rest) {
return false
}
}
}
return true
}
// Sanitize checks if the name is valid according to the table. If not, it
// attempts to replaces invalid runes with an underscore to create a valid
// name.
func sanitize(name string, table table) (string, bool) {
if isValid(name, table) {
return name, true
}
var b strings.Builder
for i, r := range name {
switch {
case i == 0:
if unicode.In(r, table.First) {
b.WriteRune(r)
}
default:
if unicode.In(r, table.Rest) {
b.WriteRune(r)
} else {
b.WriteString("_")
}
}
}
name = strings.Trim(b.String(), "_")
if name == "" {
return "", false
}
return name, true
}
// sanitizeMetricName checks if the name is a valid Prometheus metric name. If
// not, it attempts to replaces invalid runes with an underscore to create a
// valid name.
func sanitizeMetricName(name string) (string, bool) {
return sanitize(name, metricNameTable)
}
// sanitizeLabelName checks if the name is a valid Prometheus label name. If
// not, it attempts to replaces invalid runes with an underscore to create a
// valid name.
func sanitizeLabelName(name string) (string, bool) {
return sanitize(name, labelNameTable)
}
// sampleValue converts a field value into a value suitable for a simple sample value.
func sampleValue(value interface{}) (float64, bool) {
switch v := value.(type) {
case float64:
return v, true
case int64:
return float64(v), true
case uint64:
return float64(v), true
case bool:
if v {
return 1.0, true
}
return 0.0, true
default:
return 0, false
}
} | pkg/services/live/remotewrite/convert.go | 0.608594 | 0.448789 | convert.go | starcoder |
package solid
import (
"math"
"github.com/cpmech/gosl/chk"
"github.com/cpmech/gosl/tsr"
)
// Mmatch computes M=q/p and qy0 from c and φ corresponding to the strength that would
// be modelled by the Mohr-Coulomb model matching one of the following cones:
// typ == 0 : compression cone (outer)
// == 1 : extension cone (inner)
// == 2 : plane-strain
func Mmatch(c, φ float64, typ int) (M, qy0 float64, err error) {
φr := φ * math.Pi / 180.0
si := math.Sin(φr)
co := math.Cos(φr)
var ξ float64
switch typ {
case 0: // compression cone (outer)
M = 6.0 * si / (3.0 - si)
ξ = 6.0 * co / (3.0 - si)
case 1: // extension cone (inner)
M = 6.0 * si / (3.0 + si)
ξ = 6.0 * co / (3.0 + si)
case 2: // plane-strain
t := si / co
d := math.Sqrt(3.0 + 4.0*t*t)
M = 3.0 * t / d
ξ = 3.0 / d
default:
return 0, 0, chk.Err("typ=%d is invalid", typ)
}
qy0 = ξ * c
return
}
// SpectralCompose recreates tensor m from its spectral decomposition
// m -- 2nd order tensor in Mandel basis
// λ -- eigenvalues
// n -- eigenvectors [ncp][nvecs]
// tmp -- temporary matrix [3][3]
func SpectralCompose(m, λ []float64, n, tmp [][]float64) {
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
tmp[i][j] = λ[0]*n[i][0]*n[j][0] + λ[1]*n[i][1]*n[j][1] + λ[2]*n[i][2]*n[j][2]
}
}
tsr.Ten2Man(m, tmp)
}
// Eigenprojectors computes the Mandel eigenprojectors for given eigenvectors
// n -- eigenvectors [ncp][nvecs]
func Eigenprojectors(P [][]float64, n [][]float64) {
P[0][0] = n[0][0] * n[0][0]
P[0][1] = n[1][0] * n[1][0]
P[0][2] = n[2][0] * n[2][0]
P[0][3] = n[0][0] * n[1][0] * tsr.SQ2
P[1][0] = n[0][1] * n[0][1]
P[1][1] = n[1][1] * n[1][1]
P[1][2] = n[2][1] * n[2][1]
P[1][3] = n[0][1] * n[1][1] * tsr.SQ2
P[2][0] = n[0][2] * n[0][2]
P[2][1] = n[1][2] * n[1][2]
P[2][2] = n[2][2] * n[2][2]
P[2][3] = n[0][2] * n[1][2] * tsr.SQ2
if len(P[0]) == 6 {
P[0][4] = n[1][0] * n[2][0] * tsr.SQ2
P[0][5] = n[2][0] * n[0][0] * tsr.SQ2
P[1][4] = n[1][1] * n[2][1] * tsr.SQ2
P[1][5] = n[2][1] * n[0][1] * tsr.SQ2
P[2][4] = n[1][2] * n[2][2] * tsr.SQ2
P[2][5] = n[2][2] * n[0][2] * tsr.SQ2
}
}
/*
func Eigenprojectors(P0, P1, P2 []float64, n [][]float64) {
P0[0] = n[0][0] * n[0][0]
P0[1] = n[1][0] * n[1][0]
P0[2] = n[2][0] * n[2][0]
P0[3] = n[0][0] * n[1][0] * tsr.SQ2
P1[0] = n[0][1] * n[0][1]
P1[1] = n[1][1] * n[1][1]
P1[2] = n[2][1] * n[2][1]
P1[3] = n[0][1] * n[1][1] * tsr.SQ2
P2[0] = n[0][2] * n[0][2]
P2[1] = n[1][2] * n[1][2]
P2[2] = n[2][2] * n[2][2]
P2[3] = n[0][2] * n[1][2] * tsr.SQ2
if len(P0) == 6 {
P0[4] = n[1][0] * n[2][0] * tsr.SQ2
P0[5] = n[2][0] * n[0][0] * tsr.SQ2
P1[4] = n[1][1] * n[2][1] * tsr.SQ2
P1[5] = n[2][1] * n[0][1] * tsr.SQ2
P2[4] = n[1][2] * n[2][2] * tsr.SQ2
P2[5] = n[2][2] * n[0][2] * tsr.SQ2
}
}
*/ | mdl/solid/auxiliary.go | 0.564098 | 0.534005 | auxiliary.go | starcoder |
package conversions
import (
"errors"
"math"
"github.com/tomchavakis/turf-go/constants"
)
var factors = map[string]float64{
constants.UnitMiles: constants.EarthRadius / 1609.344,
constants.UnitNauticalMiles: constants.EarthRadius / 1852.0,
constants.UnitDegrees: constants.EarthRadius / 111325.0,
constants.UnitRadians: 1.0,
constants.UnitInches: constants.EarthRadius * 39.37,
constants.UnitYards: constants.EarthRadius / 1.0936,
constants.UnitMeters: constants.EarthRadius,
constants.UnitCentimeters: constants.EarthRadius * 100.0,
constants.UnitKilometers: constants.EarthRadius / 1000.0,
constants.UnitFeet: constants.EarthRadius * 3.28084,
constants.UnitCentimetres: constants.EarthRadius * 100.0,
constants.UnitMetres: constants.EarthRadius,
constants.UnitKimometres: constants.EarthRadius / 1000.0,
}
var areaFactors = map[string]float64{
constants.UnitAcres: 0.000247105,
constants.UnitCentimeters: 10000.0,
constants.UnitCentimetres: 10000.0,
constants.UnitFeet: 10.763910417,
constants.UnitHectares: 0.0001,
constants.UnitInches: 1550.003100006,
constants.UnitKilometers: 0.000001,
constants.UnitKimometres: 0.000001,
constants.UnitMeters: 1.0,
constants.UnitMetres: 1.0,
constants.UnitMiles: 3.86e-7,
constants.UnitMillimeters: 1000000.0,
constants.UnitMillimetres: 1000000.0,
constants.UnitYards: 1.195990046,
}
// DegreesToRadians converts an angle in degrees to radians.
// degrees angle between 0 and 360
func DegreesToRadians(degrees float64) float64 {
return degrees * math.Pi / 180
}
// RadiansToDegrees converts radians to degrees
func RadiansToDegrees(radians float64) float64 {
return radians * 180 / math.Pi
}
// ToKilometersPerHour converts knots to km/h
func ToKilometersPerHour(knots float64) float64 {
return knots * 1.852
}
// LengthToDegrees convert a distance measurement (assuming a spherical Earth) from a real-world unit into degrees
// Valid units: miles, nauticalmiles, inches, yards, meters, metres, centimeters, kilometres, feet
func LengthToDegrees(distance float64, units string) (float64, error) {
if units == "" {
units = constants.UnitDefault
}
ltr, err := LengthToRadians(distance, units)
if err != nil {
return 0.0, err
}
return RadiansToDegrees(ltr), nil
}
// LengthToRadians convert a distance measurement (assuming a spherical Earth) from a real-world unit into radians.
func LengthToRadians(distance float64, units string) (float64, error) {
if units == "" {
units = constants.UnitDefault
}
if !validateUnit(units) {
return 0.0, errors.New("invalid units")
}
return distance / factors[units], nil
}
// RadiansToLength convert a distance measurement (assuming a spherical Earth) from radians to a more friendly unit.
func RadiansToLength(radians float64, units string) (float64, error) {
if units == "" {
units = constants.UnitDefault
}
if !validateUnit(units) {
return 0.0, errors.New("invalid unit")
}
return radians * factors[units], nil
}
// ConvertLength converts a distance to a different unit specified.
func ConvertLength(distance float64, originalUnits string, finalUnits string) (float64, error) {
if originalUnits == "" {
originalUnits = constants.UnitMeters
}
if finalUnits == "" {
finalUnits = constants.UnitDefault
}
ltr, err := LengthToRadians(distance, originalUnits)
if err != nil {
return 0, err
}
return RadiansToLength(ltr, finalUnits)
}
// ConvertArea converts an area to the requested unit
func ConvertArea(area float64, originalUnits string, finalUnits string) (float64, error) {
if originalUnits == "" {
originalUnits = constants.UnitMeters
}
if finalUnits == "" {
finalUnits = constants.UnitKilometers
}
if area < 0 {
return 0.0, errors.New("area must be a positive number")
}
if !validateAreaUnit(originalUnits) {
return 0.0, errors.New("invalid original units")
}
if !validateAreaUnit(finalUnits) {
return 0.0, errors.New("invalid finalUnits units")
}
startFactor := areaFactors[originalUnits]
finalFactor := areaFactors[finalUnits]
return (area / startFactor) * finalFactor, nil
}
func validateAreaUnit(units string) bool {
_, ok := areaFactors[units]
return ok
}
func validateUnit(units string) bool {
_, ok := factors[units]
return ok
} | conversions/conversions.go | 0.848941 | 0.697751 | conversions.go | starcoder |
package gfx
import (
"image"
"image/color"
)
// Layer represents a layer of paletted tiles.
type Layer struct {
Tileset *Tileset
Width int // Width of the layer in number of tiles.
Data LayerData
}
// LayerData is the data for a layer.
type LayerData []int
// Size returns the size of the layer data given the number of columns.
func (ld LayerData) Size(cols int) image.Point {
l := len(ld)
if l < cols {
return Pt(cols, 1)
}
rows := l / cols
if rows*cols == l {
return Pt(cols, rows)
}
if rows%cols > 0 {
rows++
}
return Pt(cols, rows)
}
// NewLayer creates a new layer.
func NewLayer(tileset *Tileset, width int, data LayerData) *Layer {
return &Layer{Tileset: tileset, Width: width, Data: data}
}
// At returns the color at (x, y).
func (l *Layer) At(x, y int) color.Color {
return l.NRGBAAt(x, y)
}
// NRGBAAt returns the color.RGBA at (x, y).
func (l *Layer) NRGBAAt(x, y int) color.NRGBA {
if i := l.TileIndexAt(x, y); i > -1 {
s := l.Tileset.Size
return l.Tileset.Tiles[i].NRGBAAt(x%s.X, y%s.Y)
}
return ColorTransparent
}
// AlphaAt returns the alpha value at (x, y).
func (l *Layer) AlphaAt(x, y int) uint8 {
if i := l.TileIndexAt(x, y); i > -1 {
tx, ty := x%l.Tileset.Size.X, y%l.Tileset.Size.Y
return l.Tileset.Tiles[i].AlphaAt(tx, ty)
}
return 0
}
// Bounds returns the bounds of the paletted layer.
func (l *Layer) Bounds() image.Rectangle {
lpix := len(l.Data)
switch {
case l.Width < 1, lpix == 0,
l.Tileset == nil,
l.Tileset.Size.X < 1, l.Tileset.Size.Y < 1:
return ZR
case lpix < l.Width:
return IR(0, 0, l.Width, 1)
}
s := l.Data.Size(l.Width)
w := s.X * l.Tileset.Size.X
h := s.Y * l.Tileset.Size.Y
return IR(0, 0, w, h)
}
// ColorModel returns the color model for the paletted layer.
func (l *Layer) ColorModel() color.Model {
return color.RGBAModel
}
// ColorIndexAt returns the palette index of the pixel at (x, y).
func (l *Layer) ColorIndexAt(x, y int) uint8 {
if t := l.TileAt(x, y); t != nil {
ts := l.Tileset.Size
return t.ColorIndexAt(x%ts.X, y%ts.Y)
}
return 0
}
// TileAt returns the tile image at (x, y).
func (l *Layer) TileAt(x, y int) image.PalettedImage {
if i := l.TileIndexAt(x, y); i >= 0 && i < len(l.Tileset.Tiles) {
return l.Tileset.Tiles[i]
}
return nil
}
// TileSize returns the tileset tile size.
func (l *Layer) TileSize() image.Point {
return l.Tileset.Size
}
// GfxPalette retrieves the layer palette.
func (l *Layer) GfxPalette() Palette {
return l.Tileset.Palette
}
// ColorPalette retrieves the layer palette.
func (l *Layer) ColorPalette() color.Palette {
return l.Tileset.Palette.AsColorPalette()
}
// Index returns the tile index at (x, y). (Short for TileIndexAt)
func (l *Layer) Index(x, y int) int {
return l.TileIndexAt(x, y)
}
// TileIndexAt returns the tile index at (x, y).
func (l *Layer) TileIndexAt(x, y int) int {
s := l.Tileset.Size
o := y/s.Y*l.Width + x/s.X
if o >= 0 && o < len(l.Data) {
return l.Data[o]
}
return -1
}
// DataAt returns the data at (dx, dy).
func (l *Layer) DataAt(dx, dy int) int {
return l.Data[l.dataOffset(dx, dy)]
}
// Put changes the tile index at (dx, dy). (Short for SetTileIndex)
func (l *Layer) Put(dx, dy, index int) {
l.SetTileIndex(dx, dy, index)
}
// SetTileIndex changes the tile index at (dx, dy).
func (l *Layer) SetTileIndex(dx, dy, index int) {
if o := l.dataOffset(dx, dy); o >= 0 && o < len(l.Data) {
l.Data[o] = index
}
}
func (l *Layer) dataOffset(dx, dy int) int {
return dy*l.Width + dx
} | vendor/github.com/peterhellberg/gfx/layer.go | 0.891227 | 0.660446 | layer.go | starcoder |
package core
import (
"bytes"
"fmt"
"log"
"math"
"math/rand"
"strconv"
"strings"
)
// BhattacharyyaEstimator is the similarity estimator that quantifies the similarity
// of the distribution between the datasets.
type BhattacharyyaEstimator struct {
AbstractDatasetSimilarityEstimator
// struct used to map dataset paths to indexes
inverseIndex map[string]int
// determines the height of the kd tree to be used
maxPartitions int
// hold the portion of the data examined for constructing the tree
datasetSR float64
// kd tree, utilized for dataset partitioning
// kdTree *kdTreeNode
partitioner DataPartitioner
// holds the number of points for each dataset region
pointsPerRegion [][]int
// holds the total number of points for each dataset
datasetsSize []int
}
// Compute method constructs the Similarity Matrix
func (e *BhattacharyyaEstimator) Compute() error {
return datasetSimilarityEstimatorCompute(e)
}
// Similarity returns the similarity between two datasets
func (e *BhattacharyyaEstimator) Similarity(a, b *Dataset) float64 {
var indexA, indexB []int
var countA, countB int
if idx, ok := e.inverseIndex[a.Path()]; ok {
indexA = e.pointsPerRegion[idx]
countA = e.datasetsSize[idx]
} else {
err := a.ReadFromFile()
if err != nil {
log.Println(err)
}
clusters, err := e.partitioner.Partition(a.Data())
if err != nil {
log.Println(err)
}
for _, c := range clusters {
indexA = append(indexA, len(c))
}
// indexA = e.kdTree.GetLeafIndex(a.Data())
countA = len(a.Data())
}
if idx, ok := e.inverseIndex[b.Path()]; ok {
indexB = e.pointsPerRegion[idx]
countB = e.datasetsSize[idx]
} else {
err := b.ReadFromFile()
if err != nil {
log.Println(err)
}
clusters, err := e.partitioner.Partition(b.Data())
if err != nil {
log.Println(err)
}
for _, c := range clusters {
indexB = append(indexB, len(c))
}
// indexB = e.kdTree.GetLeafIndex(b.Data())
countB = len(b.Data())
}
return e.getValue(indexA, indexB, countA, countB)
}
// Configure sets a the configuration parameters of the estimator
func (e *BhattacharyyaEstimator) Configure(conf map[string]string) {
if val, ok := conf["concurrency"]; ok {
conv, err := strconv.ParseInt(val, 10, 32)
e.concurrency = int(conv)
if err != nil {
log.Println(err)
}
} else {
e.concurrency = 1
}
if val, ok := conf["partitions"]; ok {
//conv, err := strconv.ParseInt(val, 10, 32)
conv, err := strconv.ParseInt(val, 10, 32)
e.maxPartitions = int(conv)
if err != nil {
log.Println(err)
}
} else {
e.maxPartitions = 32
}
if val, ok := conf["dataset.sr"]; ok {
//conv, err := strconv.ParseInt(val, 10, 32)
conv, err := strconv.ParseFloat(val, 64)
e.datasetSR = conv
if err != nil {
log.Println(err)
}
} else {
e.datasetSR = 0.1
}
partitionerType := DataPartitionerKDTree
if val, ok := conf["partitioner.type"]; ok {
if "kmeans" == strings.ToLower(val) {
partitionerType = DataPartitionerKMeans
} else if "kdtree" == strings.ToLower(val) {
partitionerType = DataPartitionerKDTree
} else {
log.Println("Unknown partitioner type, using default (kdtree)")
}
}
partitionerConf := make(map[string]string)
partitionerConf["partitions"] = fmt.Sprintf("%d", e.maxPartitions)
// parse partitioner params
log.Println(conf)
for k, v := range conf {
log.Println(k, v)
if strings.HasPrefix(k, "partitioner.") {
partitionerConf[strings.TrimPrefix(k, "partitioner.")] = v
}
}
log.Println("Providing the following conf to the partitioner", partitionerConf)
e.init(partitionerType, partitionerConf)
}
func (e *BhattacharyyaEstimator) init(partitionerType DataPartitionerType, partitionerConf map[string]string) {
// initialization step
e.inverseIndex = make(map[string]int)
for i, d := range e.datasets {
e.inverseIndex[d.Path()] = i
}
for _, d := range e.datasets {
d.ReadFromFile()
}
//e.kdTree = newKDTreePartition(e.datasets[0].Data())
s := e.sampledDataset()
e.partitioner = NewDataPartitioner(partitionerType, partitionerConf)
e.partitioner.Construct(s)
e.pointsPerRegion = make([][]int, len(e.datasets))
e.datasetsSize = make([]int, len(e.datasets))
for i, d := range e.datasets {
clusters, err := e.partitioner.Partition(d.Data())
if err != nil {
log.Println(err)
} else {
for _, c := range clusters {
e.pointsPerRegion[i] = append(e.pointsPerRegion[i], len(c))
}
// e.pointsPerRegion[i] = e.kdTree.GetLeafIndex(d.Data())
e.datasetsSize[i] = len(d.Data())
}
}
// UP TO THIS POINT
}
// Options returns a list of parameters that can be set by the user
func (e *BhattacharyyaEstimator) Options() map[string]string {
return map[string]string{
"concurrency": "max num of threads used (int)",
"partitions": "max number of partitions to be used for the estimation (default is 32)",
"partitioner.type": "the partitioner type (one of kmeans, kdtree - default is kdtree) ",
"partitioner.*": "provide any argument to the partitioner instance using the partitioner.* prefix (e.g.: partitioner.weights=0.1,0.2 for kmeans)",
"dataset.sr": "determines the portion of datasets to sample for the partitioner construction",
// "columns": "comma separated values of column indices to consider (starting from 0) or all (default)",
}
}
func (e *BhattacharyyaEstimator) getValue(indA, indB []int, countA, countB int) float64 {
sum := 0.0
for k := 0; k < len(indA); k++ {
sum += math.Sqrt(float64(indA[k] * indB[k]))
}
sum /= math.Sqrt(float64(countA * countB))
return sum
}
// Serialize returns a byte array containing a serialized form of the estimator
func (e *BhattacharyyaEstimator) Serialize() []byte {
buffer := new(bytes.Buffer)
buffer.Write(getBytesInt(int(SimilarityTypeBhattacharyya)))
bytes := datasetSimilarityEstimatorSerialize(e.AbstractDatasetSimilarityEstimator)
buffer.Write(bytes)
buffer.Write(getBytesInt(e.maxPartitions))
// write points per region
buffer.Write(getBytesInt(len(e.pointsPerRegion[0])))
for i := range e.pointsPerRegion {
for j := range e.pointsPerRegion[i] {
buffer.Write(getBytesInt(e.pointsPerRegion[i][j]))
}
}
// write datasets size
for _, s := range e.datasetsSize {
buffer.Write(getBytesInt(s))
}
// write kdtree
serializedPartitioner := e.partitioner.Serialize()
buffer.Write(getBytesInt(len(serializedPartitioner)))
buffer.Write(serializedPartitioner)
return buffer.Bytes()
}
// Deserialize constructs a similarity object based on the byte stream
func (e *BhattacharyyaEstimator) Deserialize(b []byte) {
buffer := bytes.NewBuffer(b)
tempInt := make([]byte, 4)
tempFloat := make([]byte, 8)
buffer.Read(tempInt) // contains estimator type
var count int
buffer.Read(tempInt)
absEstBytes := make([]byte, getIntBytes(tempInt))
buffer.Read(absEstBytes)
e.AbstractDatasetSimilarityEstimator =
*datasetSimilarityEstimatorDeserialize(absEstBytes)
buffer.Read(tempInt)
e.maxPartitions = getIntBytes(tempFloat)
e.inverseIndex = make(map[string]int)
for i := range e.datasets {
e.inverseIndex[e.datasets[i].Path()] = i
}
buffer.Read(tempInt)
count = getIntBytes(tempInt)
e.pointsPerRegion = make([][]int, len(e.datasets))
for i := range e.pointsPerRegion {
e.pointsPerRegion[i] = make([]int, count)
for j := range e.pointsPerRegion[i] {
buffer.Read(tempInt)
e.pointsPerRegion[i][j] = getIntBytes(tempInt)
}
}
e.datasetsSize = make([]int, len(e.datasets))
for i := range e.datasetsSize {
buffer.Read(tempInt)
e.datasetsSize[i] = getIntBytes(tempInt)
}
buffer.Read(tempInt)
count = getIntBytes(tempInt)
tempCustom := make([]byte, count)
buffer.Read(tempCustom)
e.partitioner = DeserializePartitioner(tempCustom)
//e.kdTree = new(kdTreeNode)
//e.kdTree.Deserialize(tempCustom)
}
// sampledDataset returns a custom dataset that consist of the tuples of the
// previous
func (e *BhattacharyyaEstimator) sampledDataset() []DatasetTuple {
log.Println("Generating a sampled and merged dataset with all tuples")
var result []DatasetTuple
for _, d := range e.datasets {
tuplesToChoose := int(math.Floor(float64(len(d.Data())) * e.datasetSR))
log.Printf("%d/%d tuples chosen for %s\n", tuplesToChoose, len(d.Data()), d.path)
tuplesIdx := make(map[int]bool)
for len(tuplesIdx) < tuplesToChoose {
tuplesIdx[rand.Int()%len(d.Data())] = true
}
for k := range tuplesIdx {
result = append(result, d.Data()[k])
}
}
return result
} | core/similaritybhattacharyya.go | 0.665411 | 0.641478 | similaritybhattacharyya.go | starcoder |
package parquet
import "unsafe"
func (d *int32Dictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*int32)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *int64Dictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*int64)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *floatDictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*float32)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *doubleDictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*float64)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *byteArrayDictionary) lookupString(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
v := d.index(j)
*(*string)(rows.index(i, size, offset)) = *(*string)(unsafe.Pointer(&v))
}
}
func (d *fixedLenByteArrayDictionary) lookupString(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
v := d.index(j)
*(*string)(rows.index(i, size, offset)) = *(*string)(unsafe.Pointer(&v))
}
}
func (d *uint32Dictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*uint32)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *uint64Dictionary) lookup(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(*uint64)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *be128Dictionary) lookupString(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
s := "0123456789ABCDEF"
for i, j := range indexes {
*(**[16]byte)(unsafe.Pointer(&s)) = d.index(j)
*(*string)(rows.index(i, size, offset)) = s
}
}
func (d *be128Dictionary) lookupPointer(indexes []int32, rows array, size, offset uintptr) {
checkLookupIndexBounds(indexes, rows)
for i, j := range indexes {
*(**[16]byte)(rows.index(i, size, offset)) = d.index(j)
}
}
func (d *int32Dictionary) bounds(indexes []int32) (min, max int32) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *int64Dictionary) bounds(indexes []int32) (min, max int64) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *floatDictionary) bounds(indexes []int32) (min, max float32) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *doubleDictionary) bounds(indexes []int32) (min, max float64) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *uint32Dictionary) bounds(indexes []int32) (min, max uint32) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *uint64Dictionary) bounds(indexes []int32) (min, max uint64) {
min = d.index(indexes[0])
max = min
for _, i := range indexes[1:] {
value := d.index(i)
if value < min {
min = value
}
if value > max {
max = value
}
}
return min, max
}
func (d *be128Dictionary) bounds(indexes []int32) (min, max *[16]byte) {
values := [64]*[16]byte{}
min = d.index(indexes[0])
max = min
for i := 1; i < len(indexes); i += len(values) {
n := len(indexes) - i
if n > len(values) {
n = len(values)
}
j := i + n
d.lookupPointer(indexes[i:j:j], makeArrayBE128(values[:n:n]), unsafe.Sizeof(values[0]), 0)
for _, value := range values[:n:n] {
switch {
case lessBE128(value, min):
min = value
case lessBE128(max, value):
max = value
}
}
}
return min, max
} | dictionary_purego.go | 0.522689 | 0.60577 | dictionary_purego.go | starcoder |
package tezerrors
var errorsDescrs = `{
"Bad deserialized counter": {
"title": "Deserialized counter does not match the stored one",
"descr": "The byte sequence references a multisig counter that does not match the one currently stored in the given multisig contract"
},
"Bad_hash": {
"title": "Bad hash",
"descr": "Wrong hash given"
},
"Context_not_found": {
"title": "Context not found",
"descr": "Cannot find context corresponding to hash"
},
"InconsistentImportedBlock": {
"title": "Inconsistent imported block",
"descr": "The imported block is not the expected one."
},
"InconsistentOperationHashes": {
"title": "Inconsistent operation hashes",
"descr": "The operations given do not match their hashes."
},
"InconsistentTypesTypeError": {
"title": "Inconsistent types (typechecking error)",
"descr": "This is the basic type clash error, that appears in several places where the equality of two types have to be proven, it is always accompanied with another error that provides more context."
},
"Inconsistent_snapshot_data": {
"title": "Inconsistent snapshot data",
"descr": "The data provided by the snapshot is inconsistent"
},
"Inconsistent_snapshot_file": {
"title": "Inconsistent snapshot file",
"descr": "Error while opening snapshot file"
},
"Invalid_snapshot_version": {
"title": "Invalid snapshot version",
"descr": "The version of the snapshot to import is not valid"
},
"Missing_snapshot_data": {
"title": "Missing data in imported snapshot",
"descr": "Mandatory data missing while reaching end of snapshot file."
},
"RPC_context.Not_found": {
"title": "RPC lookup failed",
"descr": "RPC lookup failed. No RPC exists at the URL or the RPC tried to access non-existent data."
},
"Restore_context_failure": {
"title": "Failed to restore context",
"descr": "Internal error while restoring the context"
},
"SnapshotImportFailure": {
"title": "Snapshot import failure",
"descr": "The imported snapshot is malformed."
},
"System_read_error": {
"title": "System read error",
"descr": "Failed to read file"
},
"Validator_process.failed_to_checkout_context": {
"title": "Fail during checkout context",
"descr": "The context checkout failed using a given hash"
},
"Validator_process.system_error_while_validating": {
"title": "Failed to validate block because of a system error",
"descr": "The validator failed because of a system error"
},
"Writing_error": {
"title": "Writing error",
"descr": "Cannot write in file for context dump"
},
"WrongBlockExport": {
"title": "Wrong block export",
"descr": "The block to export in the snapshot is not valid."
},
"WrongProtocolHash": {
"title": "Wrong protocol hash",
"descr": "Wrong protocol hash"
},
"WrongReconstructMode": {
"title": "Wrong reconstruct mode",
"descr": "Reconstruction of contexts while importing is comptible with full mode snapshots only"
},
"WrongSnapshotExport": {
"title": "Wrong snapshot export",
"descr": "Snapshot exports is not compatible with the current configuration."
},
"actionDeserialisation": {
"title": "The expression is not a valid multisig action",
"descr": "When trying to deserialise an action from a sequence of bytes, we got an expression that does not correspond to a known multisig action"
},
"badBlockArgument": {
"title": "Bad Block Argument",
"descr": "Block argument could not be parsed"
},
"badChainArgument": {
"title": "Bad Chain Argument",
"descr": "Chain argument could not be parsed"
},
"badContractParameter": {
"title": "Contract supplied an invalid parameter",
"descr": "Either no parameter was supplied to a contract with a non-unit parameter type, a non-unit parameter was passed to an account, or a parameter was supplied of the wrong type"
},
"badDeserializedContract": {
"title": "The byte sequence is not for the given multisig contract",
"descr": "When trying to deserialise an action from a sequence of bytes, we got an action for another multisig contract"
},
"badEndorsementDelayArg": {
"title": "Bad -endorsement-delay arg",
"descr": "invalid duration in -endorsement-delay"
},
"badMaxPriorityArg": {
"title": "Bad -max-priority arg",
"descr": "invalid priority in -max-priority"
},
"badMaxWaitingTimeArg": {
"title": "Bad -max-waiting-time arg",
"descr": "invalid duration in -max-waiting-time"
},
"badMinimalFeesArg": {
"title": "Bad -minimal-fees arg",
"descr": "invalid fee threshold in -fee-threshold"
},
"badPreservedLevelsArg": {
"title": "Bad -preserved-levels arg",
"descr": "invalid number of levels in -preserved-levels"
},
"badProtocolArgument": {
"title": "Bad Protocol Argument",
"descr": "Protocol argument could not be parsed"
},
"badReturnTypeError": {
"title": "Bad return (typechecking error)",
"descr": "Unexpected stack at the end of a lambda or script."
},
"badStackItemTypeError": {
"title": "Bad stack item (typechecking error)",
"descr": "The type of a stack item is unexpected (this error is always accompanied by a more precise one)."
},
"badStackTypeError": {
"title": "Bad stack (typechecking error)",
"descr": "The stack has an unexpected length or contents."
},
"badTezArg": {
"title": "Bad Tez Arg",
"descr": "Invalid ꜩ notation in parameter."
},
"baking.insufficient_proof_of_work": {
"title": "Insufficient block proof-of-work stamp",
"descr": "The block's proof-of-work stamp is insufficient"
},
"baking.invalid_block_signature": {
"title": "Invalid block signature",
"descr": "A block was not signed with the expected private key."
},
"baking.invalid_fitness_gap": {
"title": "Invalid fitness gap",
"descr": "The gap of fitness is out of bounds"
},
"baking.invalid_signature": {
"title": "Invalid block signature",
"descr": "The block's signature is invalid"
},
"baking.timestamp_too_early": {
"title": "Block forged too early",
"descr": "The block timestamp is before the first slot for this baker at this level"
},
"baking.unexpected_endorsement": {
"title": "Endorsement from unexpected delegate",
"descr": "The operation is signed by a delegate without endorsement rights."
},
"block.inconsistent_double_baking_evidence": {
"title": "Inconsistent double baking evidence",
"descr": "A double-baking evidence is inconsistent (two distinct delegates)"
},
"block.inconsistent_double_endorsement_evidence": {
"title": "Inconsistent double endorsement evidence",
"descr": "A double-endorsement evidence is inconsistent (two distinct delegates)"
},
"block.invalid_commitment": {
"title": "Invalid commitment in block header",
"descr": "The block header has invalid commitment."
},
"block.invalid_double_baking_evidence": {
"title": "Invalid double baking evidence",
"descr": "A double-baking evidence is inconsistent (two distinct level)"
},
"block.invalid_double_endorsement_evidence": {
"title": "Invalid double endorsement evidence",
"descr": "A double-endorsement evidence is malformed"
},
"block.multiple_revelation": {
"title": "Multiple revelations were included in a manager operation",
"descr": "A manager operation should not contain more than one revelation"
},
"block.outdated_double_baking_evidence": {
"title": "Outdated double baking evidence",
"descr": "A double-baking evidence is outdated."
},
"block.outdated_double_endorsement_evidence": {
"title": "Outdated double endorsement evidence",
"descr": "A double-endorsement evidence is outdated."
},
"block.too_early_double_baking_evidence": {
"title": "Too early double baking evidence",
"descr": "A double-baking evidence is in the future"
},
"block.too_early_double_endorsement_evidence": {
"title": "Too early double endorsement evidence",
"descr": "A double-endorsement evidence is in the future"
},
"block.unrequired_double_baking_evidence": {
"title": "Unrequired double baking evidence",
"descr": "A double-baking evidence is unrequired"
},
"block.unrequired_double_endorsement_evidence": {
"title": "Unrequired double endorsement evidence",
"descr": "A double-endorsement evidence is unrequired"
},
"bytesDeserialisation": {
"title": "The byte sequence is not a valid multisig action",
"descr": "When trying to deserialise an action from a sequence of bytes, we got an error"
},
"canceled": {
"title": "Canceled",
"descr": "A promise was unexpectedly canceled"
},
"cannotSerializeError": {
"title": "Not enough gas to serialize error",
"descr": "The error was too big to be serialized with the provided gas"
},
"cannotSerializeFailure": {
"title": "Not enough gas to serialize argument of FAILWITH",
"descr": "Argument of FAILWITH was too big to be serialized with the provided gas"
},
"cannotSerializeLog": {
"title": "Not enough gas to serialize execution trace",
"descr": "Execution trace with stacks was to big to be serialized with the provided gas"
},
"cannotSerializeStorage": {
"title": "Not enough gas to serialize execution storage",
"descr": "The returned storage was too big to be serialized with the provided gas"
},
"cannot_originate_non_spendable_account": {
"title": "Cannot originate non spendable account",
"descr": "An origination was attempted that would create a non spendable, non scripted contract"
},
"cannot_originate_spendable_smart_contract": {
"title": "Cannot originate spendable smart contract",
"descr": "An origination was attempted that would create a spendable scripted contract"
},
"cli.key.invalid_uri": {
"title": "Invalid key uri",
"descr": "A key has been provided with an invalid uri."
},
"cli.signature_mismatch": {
"title": "Signature mismatch",
"descr": "The signer produced an invalid signature"
},
"cli.unregistered_key_scheme": {
"title": "Unregistered key scheme",
"descr": "A key has been provided with an unregistered scheme (no corresponding plugin)"
},
"comparableTypeExpectedTypeError": {
"title": "Comparable type expected (typechecking error)",
"descr": "A non comparable type was used in a place where only comparable types are accepted."
},
"context.failed_to_decode_parameter": {
"title": "Failed to decode parameter",
"descr": "Unexpected JSON object."
},
"context.failed_to_parse_parameter": {
"title": "Failed to parse parameter",
"descr": "The protocol parameters are not valid JSON."
},
"context.storage_error": {
"title": "Storage error (fatal internal error)",
"descr": "An error that should never happen unless something has been deleted or corrupted in the database."
},
"context_dump.read.bad_hash": {
"title": "Wrong hash given",
"descr": ""
},
"context_dump.read.bad_read": {
"title": "Cannot read file",
"descr": ""
},
"context_dump.read.cannot_open": {
"title": "Cannot open file for context restoring",
"descr": ""
},
"context_dump.read.suspicious": {
"title": "Suspicious file: data after end",
"descr": ""
},
"context_dump.write.cannot_open": {
"title": "Cannot open file for context dump",
"descr": ""
},
"context_dump.write.context_not_found": {
"title": "Cannot find context corresponding to hash",
"descr": ""
},
"context_dump.write.missing_space": {
"title": "Cannot write in file for context dump",
"descr": ""
},
"contract.balance_too_low": {
"title": "Balance too low",
"descr": "An operation tried to spend more tokens than the contract has"
},
"contract.cannot_pay_storage_fee": {
"title": "Cannot pay storage fee",
"descr": "The storage fee is higher than the contract balance"
},
"contract.counter_in_the_future": {
"title": "Invalid counter (not yet reached) in a manager operation",
"descr": "An operation assumed a contract counter in the future"
},
"contract.counter_in_the_past": {
"title": "Invalid counter (already used) in a manager operation",
"descr": "An operation assumed a contract counter in the past"
},
"contract.empty_transaction": {
"title": "Empty transaction",
"descr": "Forbidden to credit 0ꜩ to a contract without code."
},
"contract.failure": {
"title": "Contract storage failure",
"descr": "Unexpected contract storage error"
},
"contract.invalid_contract_notation": {
"title": "Invalid contract notation",
"descr": "A malformed contract notation was given to an RPC or in a script."
},
"contract.manager.consume_roll_change": {
"title": "Consume roll change",
"descr": "Change is not enough to consume a roll."
},
"contract.manager.inconsistent_hash": {
"title": "Inconsistent public key hash",
"descr": "A revealed manager public key is inconsistent with the announced hash"
},
"contract.manager.inconsistent_public_key": {
"title": "Inconsistent public key",
"descr": "A provided manager public key is different with the public key stored in the contract"
},
"contract.manager.no_roll_for_delegate": {
"title": "No roll for delegate",
"descr": "Delegate has no roll."
},
"contract.manager.no_roll_snapshot_for_cycle": {
"title": "No roll snapshot for cycle",
"descr": "A snapshot of the rolls distribution does not exist for this cycle."
},
"contract.manager.unregistered_delegate": {
"title": "Unregistered delegate",
"descr": "A contract cannot be delegated to an unregistered delegate"
},
"contract.non_existing_contract": {
"title": "Non existing contract",
"descr": "A contract handle is not present in the context (either it never was or it has been destroyed)"
},
"contract.previously_revealed_key": {
"title": "Manager operation already revealed",
"descr": "One tried to revealed twice a manager public key"
},
"contract.undelegatable_contract": {
"title": "Non delegatable contract",
"descr": "Tried to delegate an implicit contract or a non delegatable originated contract"
},
"contract.unrevealed_key": {
"title": "Manager operation precedes key revelation",
"descr": "One tried to apply a manager operation without revealing the manager public key"
},
"contract.unspendable_contract": {
"title": "Unspendable contract",
"descr": "An operation tried to spend tokens from an unspendable contract"
},
"contractHasNoScript": {
"title": "The given contract is not a multisig contract because it has no script",
"descr": "A multisig command has referenced a scriptless smart contract instead of a multisig smart contract."
},
"contractHasNoStorage": {
"title": "The given contract is not a multisig contract because it has no storage",
"descr": "A multisig command has referenced a smart contract without storage instead of a multisig smart contract."
},
"contractHasUnexpectedStorage": {
"title": "The storage of the given contract is not of the shape expected for a multisig contract",
"descr": "A multisig command has referenced a smart contract whose storage is of a different shape than the expected one."
},
"contractWithoutCode": {
"title": "The given contract has no code",
"descr": "Attempt to get the code of a contract failed because it has nocode. No scriptless contract should remain."
},
"delegate.already_active": {
"title": "Delegate already active",
"descr": "Useless delegate reactivation"
},
"delegate.balance_too_low_for_deposit": {
"title": "Balance too low for deposit",
"descr": "Cannot freeze deposit when the balance is too low"
},
"delegate.empty_delegate_account": {
"title": "Empty delegate account",
"descr": "Cannot register a delegate when its implicit account is empty"
},
"delegate.no_deletion": {
"title": "Forbidden delegate deletion",
"descr": "Tried to unregister a delegate"
},
"delegate.unchanged": {
"title": "Unchanged delegated",
"descr": "Contract already delegated to the given delegate"
},
"distributed_db.Operation_hash.fetch_canceled": {
"title": "Canceled fetch of a Operation_hash",
"descr": "The fetch of a Operation_hash has been canceled"
},
"distributed_db.Operation_hash.fetch_timeout": {
"title": "Timed out fetch of a Operation_hash",
"descr": "The fetch of a Operation_hash has timed out"
},
"distributed_db.Operation_hash.missing": {
"title": "Missing Operation_hash",
"descr": "Some Operation_hash is missing from the distributed db"
},
"distributed_db.Protocol_hash.fetch_canceled": {
"title": "Canceled fetch of a Protocol_hash",
"descr": "The fetch of a Protocol_hash has been canceled"
},
"distributed_db.Protocol_hash.fetch_timeout": {
"title": "Timed out fetch of a Protocol_hash",
"descr": "The fetch of a Protocol_hash has timed out"
},
"distributed_db.Protocol_hash.missing": {
"title": "Missing Protocol_hash",
"descr": "Some Protocol_hash is missing from the distributed db"
},
"distributed_db.block_hash.fetch_canceled": {
"title": "Canceled fetch of a block_hash",
"descr": "The fetch of a block_hash has been canceled"
},
"distributed_db.block_hash.fetch_timeout": {
"title": "Timed out fetch of a block_hash",
"descr": "The fetch of a block_hash has timed out"
},
"distributed_db.block_hash.missing": {
"title": "Missing block_hash",
"descr": "Some block_hash is missing from the distributed db"
},
"distributed_db.operation_hashes.fetch_canceled": {
"title": "Canceled fetch of a operation_hashes",
"descr": "The fetch of a operation_hashes has been canceled"
},
"distributed_db.operation_hashes.fetch_timeout": {
"title": "Timed out fetch of a operation_hashes",
"descr": "The fetch of a operation_hashes has timed out"
},
"distributed_db.operation_hashes.missing": {
"title": "Missing operation_hashes",
"descr": "Some operation_hashes is missing from the distributed db"
},
"distributed_db.operations.fetch_canceled": {
"title": "Canceled fetch of a operations",
"descr": "The fetch of a operations has been canceled"
},
"distributed_db.operations.fetch_timeout": {
"title": "Timed out fetch of a operations",
"descr": "The fetch of a operations has timed out"
},
"distributed_db.operations.missing": {
"title": "Missing operations",
"descr": "Some operations is missing from the distributed db"
},
"duplicateMapKeys": {
"title": "Duplicate map keys",
"descr": "Map literals cannot contain duplicated keys"
},
"duplicateScriptField": {
"title": "Script has a duplicated field (parse error)",
"descr": "When parsing script, a field was found more than once"
},
"duplicateSetValuesInLiteral": {
"title": "Sets literals cannot contain duplicate elements",
"descr": "Set literals cannot contain duplicate elements, but a duplicae was found while parsing."
},
"empty_proposal": {
"title": "Empty proposal",
"descr": "Proposal lists cannot be empty."
},
"failNotInTailPositionTypeError": {
"title": "FAIL not in tail position (typechecking error)",
"descr": "There is non trivial garbage code after a FAIL instruction."
},
"failure": {
"title": "Generic error",
"descr": "Unclassified error"
},
"gas_exhausted.block": {
"title": "Gas quota exceeded for the block",
"descr": "The sum of gas consumed by all the operations in the block exceeds the hard gas limit per block"
},
"gas_exhausted.init_deserialize": {
"title": "Not enough gas for initial deserialization of script expresions",
"descr": "Gas limit was not high enough to deserialize the transaction parameters or origination script code or initial storage, making the operation impossible to parse within the provided gas bounds."
},
"gas_exhausted.operation": {
"title": "Gas quota exceeded for the operation",
"descr": "A script or one of its callee took more time than the operation said it would"
},
"gas_limit_too_high": {
"title": "Gas limit out of protocol hard bounds",
"descr": "A transaction tried to exceed the hard limit on gas"
},
"illFormedTypeTypeError": {
"title": "Ill formed type (typechecking error)",
"descr": "The toplevel error thrown when trying to parse a type expression (always followed by more precise errors)."
},
"illTypedContractTypeError": {
"title": "Ill typed contract (typechecking error)",
"descr": "The toplevel error thrown when trying to typecheck a contract code against given input, output and storage types (always followed by more precise errors)."
},
"illTypedDataTypeError": {
"title": "Ill typed data (typechecking error)",
"descr": "The toplevel error thrown when trying to typecheck a data expression against a given type (always followed by more precise errors)."
},
"implicit.empty_implicit_contract": {
"title": "Empty implicit contract",
"descr": "No manager operations are allowed on an empty implicit contract."
},
"inconsistentAnnotations": {
"title": "Annotations inconsistent between branches",
"descr": "The annotations on two types could not be merged"
},
"inconsistentFieldAnnotations": {
"title": "Annotations for field accesses is inconsistent",
"descr": "The specified field does not match the field annotation in the type"
},
"inconsistentStackLengthsTypeError": {
"title": "Inconsistent stack lengths (typechecking error)",
"descr": "A stack was of an unexpected length (this error is always in the context of a located error)."
},
"inconsistentTypeAnnotations": {
"title": "Types contain inconsistent annotations",
"descr": "The two types contain annotations that do not match"
},
"incorrect_number_of_endorsements": {
"title": "Incorrect number of endorsements",
"descr": "The number of endorsements must be non-negative and at most the endosers_per_block constant."
},
"incorrect_priority": {
"title": "Incorrect priority",
"descr": "Block priority must be non-negative."
},
"internal-event-activation-error": {
"title": "Internal Event Sink: Wrong Activation URI",
"descr": "Activation of an Internal Event SINK with an URI failed"
},
"internal_operation_replay": {
"title": "Internal operation replay",
"descr": "An internal operation was emitted twice by a script"
},
"invalidArityTypeError": {
"title": "Invalid arity (typechecking error)",
"descr": "In a script or data expression, a primitive was applied to an unsupported number of arguments."
},
"invalidConstantTypeError": {
"title": "Invalid constant (typechecking error)",
"descr": "A data expression was invalid for its expected type."
},
"invalidContractTypeError": {
"title": "Invalid contract (typechecking error)",
"descr": "A script or data expression references a contract that does not exist or assumes a wrong type for an existing contract."
},
"invalidExpressionKindTypeError": {
"title": "Invalid expression kind (typechecking error)",
"descr": "In a script or data expression, an expression was of the wrong kind (for instance a string where only a primitive applications can appear)."
},
"invalidIterBody": {
"title": "ITER body returned wrong stack type",
"descr": "The body of an ITER instruction must result in the same stack type as before the ITER."
},
"invalidMapBlockFail": {
"title": "FAIL instruction occurred as body of map block",
"descr": "FAIL cannot be the only instruction in the body. The propper type of the return list cannot be inferred."
},
"invalidMapBody": {
"title": "Invalid map body",
"descr": "The body of a map block did not match the expected type"
},
"invalidPortArgument": {
"title": "Bad Port Argument",
"descr": "Port argument could not be parsed"
},
"invalidPrimitiveNameCaseTypeError": {
"title": "Invalid primitive name case (typechecking error)",
"descr": "In a script or data expression, a primitive name is neither uppercase, lowercase or capitalized."
},
"invalidPrimitiveNameTypeErro": {
"title": "Invalid primitive name (typechecking error)",
"descr": "In a script or data expression, a primitive name is unknown or has a wrong case."
},
"invalidPrimitiveNamespaceTypeError": {
"title": "Invalid primitive namespace (typechecking error)",
"descr": "In a script or data expression, a primitive was of the wrong namespace."
},
"invalidPrimitiveTypeError": {
"title": "Invalid primitive (typechecking error)",
"descr": "In a script or data expression, a primitive was unknown."
},
"invalidSignature": {
"title": "The following signature did not match a public key in the given multisig contract",
"descr": "A signature was given for a multisig contract that matched none of the public keys of the contract signers"
},
"invalidSyntacticConstantError": {
"title": "Invalid constant (parse error)",
"descr": "A compile-time constant was invalid for its expected form."
},
"invalidWaitArgument": {
"title": "Bad Wait Argument",
"descr": "Wait argument could not be parsed"
},
"invalid_arg": {
"title": "Invalid arg",
"descr": "Negative multiple of periods are not allowed."
},
"invalid_binary_format": {
"title": "Invalid binary format",
"descr": "Could not deserialize some piece of data from its binary representation"
},
"invalid_fitness": {
"title": "Invalid fitness",
"descr": "Fitness representation should be exactly 8 bytes long."
},
"invalid_proposal": {
"title": "Invalid proposal",
"descr": "Ballot provided for a proposal that is not the current one."
},
"invalid_remote_signer": {
"title": "Unexpected URI fot remote signer",
"descr": "The provided remote signer is invalid."
},
"invalid_remote_signer_argument": {
"title": "Unexpected URI of remote signer",
"descr": "The remote signer argument could not be parsed"
},
"malformed_period": {
"title": "Malformed period",
"descr": "Period is negative."
},
"micheline.parse_error.annotation_exceeds_max_length": {
"title": "Micheline parser error: annotation exceeds max length",
"descr": "While parsing a piece of Micheline source, an annotation exceeded the maximum length (255)."
},
"micheline.parse_error.empty_expression": {
"title": "Micheline parser error: empty_expression",
"descr": "Tried to interpret an empty piece or Micheline source as a single expression."
},
"micheline.parse_error.extra_token": {
"title": "Micheline parser error: extra token",
"descr": "While parsing a piece of Micheline source, an extra semi colon or parenthesis was encountered."
},
"micheline.parse_error.invalid_utf8_sequence": {
"title": "Micheline parser error: invalid UTF-8 sequence",
"descr": "While parsing a piece of Micheline source, a sequence of bytes that is not valid UTF-8 was encountered."
},
"micheline.parse_error.misaligned_node": {
"title": "Micheline parser error: misaligned node",
"descr": "While parsing a piece of Micheline source, an expression was not aligned with its siblings of the same mother application or sequence."
},
"micheline.parse_error.missing_break_after_number": {
"title": "Micheline parser error: missing break after number",
"descr": "While parsing a piece of Micheline source, a number was not visually separated from its follower token, leading to misreadability."
},
"micheline.parse_error.odd_lengthed_bytes": {
"title": "Micheline parser error: odd lengthed bytes",
"descr": "While parsing a piece of Micheline source, the length of a byte sequence (0x...) was not a multiple of two, leaving a trailing half byte."
},
"micheline.parse_error.unclosed_token": {
"title": "Micheline parser error: unclosed token",
"descr": "While parsing a piece of Micheline source, a parenthesis or a brace was unclosed."
},
"micheline.parse_error.undefined_escape_sequence": {
"title": "Micheline parser error: undefined escape sequence",
"descr": "While parsing a piece of Micheline source, an unexpected escape sequence was encountered in a string."
},
"micheline.parse_error.unexpected_character": {
"title": "Micheline parser error: unexpected character",
"descr": "While parsing a piece of Micheline source, an unexpected character was encountered."
},
"micheline.parse_error.unexpected_token": {
"title": "Micheline parser error: unexpected token",
"descr": "While parsing a piece of Micheline source, an unexpected token was encountered."
},
"micheline.parse_error.unterminated_comment": {
"title": "Micheline parser error: unterminated comment",
"descr": "While parsing a piece of Micheline source, a commentX was not terminated."
},
"micheline.parse_error.unterminated_integer": {
"title": "Micheline parser error: unterminated integer",
"descr": "While parsing a piece of Micheline source, an integer was not terminated."
},
"micheline.parse_error.unterminated_string": {
"title": "Micheline parser error: unterminated string",
"descr": "While parsing a piece of Micheline source, a string was not terminated."
},
"michelson.macros.bas_arity": {
"title": "Wrong number of arguments to macro",
"descr": "A wrong number of arguments was provided to a macro"
},
"michelson.macros.sequence_expected": {
"title": "Macro expects a sequence",
"descr": "An macro expects a sequence, but a sequence was not provided"
},
"michelson.macros.unexpected_annotation": {
"title": "Unexpected annotation",
"descr": "A macro had an annotation, but no annotation was permitted on this macro."
},
"michelson_v1.bad_contract_parameter": {
"title": "Contract supplied an invalid parameter",
"descr": "Either no parameter was supplied to a contract with a non-unit parameter type, a non-unit parameter was passed to an account, or a parameter was supplied of the wrong type"
},
"michelson_v1.bad_return": {
"title": "Bad return",
"descr": "Unexpected stack at the end of a lambda or script."
},
"michelson_v1.bad_stack": {
"title": "Bad stack",
"descr": "The stack has an unexpected length or contents."
},
"michelson_v1.bad_stack_item": {
"title": "Bad stack item",
"descr": "The type of a stack item is unexpected (this error is always accompanied by a more precise one)."
},
"michelson_v1.cannot_serialize_error": {
"title": "Not enough gas to serialize error",
"descr": "The error was too big to be serialized with the provided gas"
},
"michelson_v1.cannot_serialize_failure": {
"title": "Not enough gas to serialize argument of FAILWITH",
"descr": "Argument of FAILWITH was too big to be serialized with the provided gas"
},
"michelson_v1.cannot_serialize_log": {
"title": "Not enough gas to serialize execution trace",
"descr": "Execution trace with stacks was to big to be serialized with the provided gas"
},
"michelson_v1.cannot_serialize_storage": {
"title": "Not enough gas to serialize execution storage",
"descr": "The returned storage was too big to be serialized with the provided gas"
},
"michelson_v1.comparable_type_expected": {
"title": "Comparable type expected",
"descr": "A non comparable type was used in a place where only comparable types are accepted."
},
"michelson_v1.deprecated_instruction": {
"title": "Script is using a deprecated instruction",
"descr": "A deprecated instruction usage is disallowed in newly created contracts"
},
"michelson_v1.duplicate_entrypoint": {
"title": "Duplicate entrypoint (type error)",
"descr": "Two entrypoints have the same name."
},
"michelson_v1.duplicate_map_keys": {
"title": "Duplicate map keys",
"descr": "Map literals cannot contain duplicated keys"
},
"michelson_v1.duplicate_script_field": {
"title": "Script has a duplicated field (parse error)",
"descr": "When parsing script, a field was found more than once"
},
"michelson_v1.duplicate_set_values_in_literal": {
"title": "Sets literals cannot contain duplicate elements",
"descr": "Set literals cannot contain duplicate elements, but a duplicae was found while parsing."
},
"michelson_v1.entrypoint_name_too_long": {
"title": "Entrypoint name too long (type error)",
"descr": "An entrypoint name exceeds the maximum length of 31 characters."
},
"michelson_v1.fail_not_in_tail_position": {
"title": "FAIL not in tail position",
"descr": "There is non trivial garbage code after a FAIL instruction."
},
"michelson_v1.ill_formed_type": {
"title": "Ill formed type",
"descr": "The toplevel error thrown when trying to parse a type expression (always followed by more precise errors)."
},
"michelson_v1.ill_typed_contract": {
"title": "Ill typed contract",
"descr": "The toplevel error thrown when trying to typecheck a contract code against given input, output and storage types (always followed by more precise errors)."
},
"michelson_v1.ill_typed_data": {
"title": "Ill typed data",
"descr": "The toplevel error thrown when trying to typecheck a data expression against a given type (always followed by more precise errors)."
},
"michelson_v1.inconsistent_annotations": {
"title": "Annotations inconsistent between branches",
"descr": "The annotations on two types could not be merged"
},
"michelson_v1.inconsistent_field_annotations": {
"title": "Annotations for field accesses is inconsistent",
"descr": "The specified field does not match the field annotation in the type"
},
"michelson_v1.inconsistent_stack_lengths": {
"title": "Inconsistent stack lengths",
"descr": "A stack was of an unexpected length (this error is always in the context of a located error)."
},
"michelson_v1.inconsistent_type_annotations": {
"title": "Types contain inconsistent annotations",
"descr": "The two types contain annotations that do not match"
},
"michelson_v1.inconsistent_types": {
"title": "Inconsistent types",
"descr": "This is the basic type clash error, that appears in several places where the equality of two types have to be proven, it is always accompanied with another error that provides more context."
},
"michelson_v1.invalid_arity": {
"title": "Invalid arity",
"descr": "In a script or data expression, a primitive was applied to an unsupported number of arguments."
},
"michelson_v1.invalid_big_map": {
"title": "Invalid big_map",
"descr": "A script or data expression references a big_map that does not exist or assumes a wrong type for an existing big_map."
},
"michelson_v1.invalid_constant": {
"title": "Invalid constant",
"descr": "A data expression was invalid for its expected type."
},
"michelson_v1.invalid_contract": {
"title": "Invalid contract",
"descr": "A script or data expression references a contract that does not exist or assumes a wrong type for an existing contract."
},
"michelson_v1.invalid_expression_kind": {
"title": "Invalid expression kind",
"descr": "In a script or data expression, an expression was of the wrong kind (for instance a string where only a primitive applications can appear)."
},
"michelson_v1.invalid_iter_body": {
"title": "ITER body returned wrong stack type",
"descr": "The body of an ITER instruction must result in the same stack type as before the ITER."
},
"michelson_v1.invalid_map_block_fail": {
"title": "FAIL instruction occurred as body of map block",
"descr": "FAIL cannot be the only instruction in the body. The propper type of the return list cannot be inferred."
},
"michelson_v1.invalid_map_body": {
"title": "Invalid map body",
"descr": "The body of a map block did not match the expected type"
},
"michelson_v1.invalid_primitive": {
"title": "Invalid primitive",
"descr": "In a script or data expression, a primitive was unknown."
},
"michelson_v1.invalid_primitive_name": {
"title": "Invalid primitive name",
"descr": "In a script or data expression, a primitive name is unknown or has a wrong case."
},
"michelson_v1.invalid_primitive_name_case": {
"title": "Invalid primitive name case",
"descr": "In a script or data expression, a primitive name is neither uppercase, lowercase or capitalized."
},
"michelson_v1.invalid_primitive_namespace": {
"title": "Invalid primitive namespace",
"descr": "In a script or data expression, a primitive was of the wrong namespace."
},
"michelson_v1.missing_script_field": {
"title": "Script is missing a field (parse error)",
"descr": "When parsing script, a field was expected, but not provided"
},
"michelson_v1.no_such_entrypoint": {
"title": "No such entrypoint (type error)",
"descr": "An entrypoint was not found when calling a contract."
},
"michelson_v1.runtime_error": {
"title": "Script runtime error",
"descr": "Toplevel error for all runtime script errors"
},
"michelson_v1.script_overflow": {
"title": "Script failed (overflow error)",
"descr": "A FAIL instruction was reached due to the detection of an overflow"
},
"michelson_v1.script_rejected": {
"title": "Script failed",
"descr": "A FAILWITH instruction was reached"
},
"michelson_v1.self_in_lambda": {
"title": "SELF instruction in lambda",
"descr": "A SELF instruction was encountered in a lambda expression."
},
"michelson_v1.type_too_large": {
"title": "Stack item type too large",
"descr": "An instruction generated a type larger than the limit."
},
"michelson_v1.undefined_binop": {
"title": "Undefined binop",
"descr": "A binary operation is called on operands of types over which it is not defined."
},
"michelson_v1.undefined_unop": {
"title": "Undefined unop",
"descr": "A unary operation is called on an operand of type over which it is not defined."
},
"michelson_v1.unexpected_annotation": {
"title": "An annotation was encountered where no annotation is expected",
"descr": "A node in the syntax tree was impropperly annotated"
},
"michelson_v1.unexpected_bigmap": {
"title": "Big map in unauthorized position (type error)",
"descr": "When parsing script, a big_map type was found in a position where it could end up stored inside a big_map, which is forbidden for now."
},
"michelson_v1.unexpected_contract": {
"title": "Contract in unauthorized position (type error)",
"descr": "When parsing script, a contract type was found in the storage or parameter field."
},
"michelson_v1.unexpected_operation": {
"title": "Operation in unauthorized position (type error)",
"descr": "When parsing script, an operation type was found in the storage or parameter field."
},
"michelson_v1.ungrouped_annotations": {
"title": "Annotations of the same kind were found spread apart",
"descr": "Annotations of the same kind must be grouped"
},
"michelson_v1.unknown_primitive_name": {
"title": "Unknown primitive name",
"descr": "In a script or data expression, a primitive was unknown."
},
"michelson_v1.unmatched_branches": {
"title": "Unmatched branches",
"descr": "At the join point at the end of two code branches the stacks have inconsistent lengths or contents."
},
"michelson_v1.unordered_map_literal": {
"title": "Invalid map key order",
"descr": "Map keys must be in strictly increasing order"
},
"michelson_v1.unordered_set_literal": {
"title": "Invalid set value order",
"descr": "Set values must be in strictly increasing order"
},
"michelson_v1.unreachable_entrypoint": {
"title": "Unreachable entrypoint (type error)",
"descr": "An entrypoint in the contract is not reachable."
},
"missingScriptField": {
"title": "Script is missing a field (parse error)",
"descr": "When parsing script, a field was expected, but not provided"
},
"node.bootstrap_pipeline.invalid_locator": {
"title": "Invalid block locator",
"descr": "Block locator is invalid."
},
"node.bootstrap_pipeline.too_short_locator": {
"title": "Too short locator",
"descr": "Block locator is too short."
},
"node.p2p_io_scheduler.connection_closed": {
"title": "Connection closed",
"descr": "IO error: connection with a peer is closed."
},
"node.p2p_pool.connected": {
"title": "Connected",
"descr": "Fail to connect with a peer: a connection is already established."
},
"node.p2p_pool.connection_refused": {
"title": "Connection refused",
"descr": "Connection was refused."
},
"node.p2p_pool.peer_banned": {
"title": "Peer Banned",
"descr": "The peer identity you tried to connect is banned."
},
"node.p2p_pool.pending_connection": {
"title": "Pending connection",
"descr": "Fail to connect with a peer: a connection is already pending."
},
"node.p2p_pool.point_banned": {
"title": "Point Banned",
"descr": "The address you tried to connect is banned."
},
"node.p2p_pool.private_mode": {
"title": "Private mode",
"descr": "Node is in private mode."
},
"node.p2p_pool.rejected": {
"title": "Rejected peer",
"descr": "Connection to peer was rejected."
},
"node.p2p_pool.too_many_connections": {
"title": "Too many connections",
"descr": "Too many connections."
},
"node.p2p_socket.decipher_error": {
"title": "Decipher error",
"descr": "An error occurred while deciphering."
},
"node.p2p_socket.decoding_error": {
"title": "Decoding error",
"descr": "An error occurred while decoding."
},
"node.p2p_socket.encoding_error": {
"title": "Encoding error",
"descr": "An error occurred while encoding."
},
"node.p2p_socket.invalid_auth": {
"title": "Invalid authentication",
"descr": "Rejected peer connection: invalid authentication."
},
"node.p2p_socket.invalid_chunks_size": {
"title": "Invalid chunks size",
"descr": "Size of chunks is not valid."
},
"node.p2p_socket.invalid_message_size": {
"title": "Invalid message size",
"descr": "The size of the message to be written is invalid."
},
"node.p2p_socket.myself": {
"title": "Myself",
"descr": "Remote peer is actually yourself."
},
"node.p2p_socket.not_enough_proof_of_work": {
"title": "Not enough proof of work",
"descr": "Remote peer cannot be authenticated: not enough proof of work."
},
"node.p2p_socket.rejected_no_common_protocol": {
"title": "Rejected socket connection - no common network protocol",
"descr": "Rejected peer connection: rejected socket connection as we have no common network protocol with the peer."
},
"node.p2p_socket.rejected_socket_connection": {
"title": "Rejected socket connection",
"descr": "Rejected peer connection: rejected socket connection."
},
"node.peer_validator.known_invalid": {
"title": "Known invalid",
"descr": "Known invalid block found in the peer's chain"
},
"node.peer_validator.unknown_ancestor": {
"title": "Unknown ancestor",
"descr": "Unknown ancestor block found in the peer's chain"
},
"node.prevalidation.future_block_header": {
"title": "Future block header",
"descr": "The block was annotated with a time too far in the future."
},
"node.prevalidation.oversized_operation": {
"title": "Oversized operation",
"descr": "The operation size is bigger than allowed."
},
"node.prevalidation.parse_error": {
"title": "Parsing error in prevalidation",
"descr": "Raised when an operation has not been parsed correctly during prevalidation."
},
"node.prevalidation.too_many_operations": {
"title": "Too many pending operations in prevalidation",
"descr": "The prevalidation context is full."
},
"node.protocol_validator.invalid_protocol": {
"title": "Invalid protocol",
"descr": "Invalid protocol."
},
"node.state.bad_data_dir": {
"title": "Bad data directory",
"descr": "The data directory could not be read. This could be because it was generated with an old version of the tezos-node program. Deleting and regenerating this directory may fix the problem."
},
"node.state.block.inconsistent_context_hash": {
"title": "Inconsistent commit hash",
"descr": "When commiting the context of a block, the announced context hash was not the one computed at commit time."
},
"node.state.block_not_invalid": {
"title": "Block not invalid",
"descr": "The invalid block to be unmarked was not actually invalid."
},
"node.state.unknown_chain": {
"title": "Unknown chain",
"descr": "The chain identifier could not be found in the chain identifiers table."
},
"node.validator.checkpoint_error": {
"title": "Block incompatible with the current checkpoint.",
"descr": "The block belongs to a branch that is not compatible with the current checkpoint."
},
"node.validator.inactive_chain": {
"title": "Inactive chain",
"descr": "Attempted validation of a block from an inactive chain."
},
"node_config_file.incorrect_history_mode_switch": {
"title": "Incorrect history mode switch",
"descr": "Incorrect history mode switch."
},
"nonPositiveThreshold": {
"title": "Given threshold is not positive",
"descr": "A multisig threshold should be a positive number"
},
"nonce.previously_revealed": {
"title": "Previously revealed nonce",
"descr": "Duplicated revelation for a nonce."
},
"nonce.too_early_revelation": {
"title": "Too early nonce revelation",
"descr": "Nonce revelation happens before cycle end"
},
"nonce.too_late_revelation": {
"title": "Too late nonce revelation",
"descr": "Nonce revelation happens too late"
},
"nonce.unexpected": {
"title": "Unexpected nonce",
"descr": "The provided nonce is inconsistent with the committed nonce hash."
},
"notASupportedMultisigContract": {
"title": "The given contract is not one of the supported contracts",
"descr": "A multisig command has referenced a smart contract whose script is not one of the known multisig contract scripts."
},
"notEnoughSignatures": {
"title": "Not enough signatures were provided for this multisig action",
"descr": "To run an action on a multisig contract, you should provide at least as many signatures as indicated by the threshold stored in the multisig contract."
},
"operation.cannot_parse": {
"title": "Cannot parse operation",
"descr": "The operation is ill-formed or for another protocol version"
},
"operation.duplicate_endorsement": {
"title": "Duplicate endorsement",
"descr": "Two endorsements received from same delegate"
},
"operation.invalid_activation": {
"title": "Invalid activation",
"descr": "The given key and secret do not correspond to any existing preallocated contract"
},
"operation.invalid_endorsement_level": {
"title": "Unexpected level in endorsement",
"descr": "The level of an endorsement is inconsistent with the provided block hash."
},
"operation.invalid_signature": {
"title": "Invalid operation signature",
"descr": "The operation signature is ill-formed or has been made with the wrong public key"
},
"operation.missing_signature": {
"title": "Missing operation signature",
"descr": "The operation is of a kind that must be signed, but the signature is missing"
},
"operation.not_enought_endorsements_for_priority": {
"title": "Not enough endorsements for priority",
"descr": "The block being validated does not include the required minimum number of endorsements for this priority."
},
"operation.wrong_endorsement_predecessor": {
"title": "Wrong endorsement predecessor",
"descr": "Trying to include an endorsement in a block that is not the successor of the endorsed one"
},
"operation.wrong_voting_period": {
"title": "Wrong voting period",
"descr": "Trying to onclude a proposal or ballot meant for another voting period"
},
"raw_context.invalid_depth": {
"title": "Invalid depth argument",
"descr": "The raw context extraction depth argument must be positive."
},
"raw_store.unknown": {
"title": "Missing key in store",
"descr": "Missing key in store"
},
"rpc_client.request_failed": {
"title": "<Untitled>",
"descr": ""
},
"scriptOverflowRuntimeError": {
"title": "Script failed (overflow error)",
"descr": "A FAIL instruction was reached due to the detection of an overflow"
},
"scriptRejectedRuntimeError": {
"title": "Script failed (runtime script error)",
"descr": "A FAILWITH instruction was reached"
},
"scriptRuntimeError": {
"title": "Script runtime error",
"descr": "Toplevel error for all runtime script errors"
},
"seed.unknown_seed": {
"title": "Unknown seed",
"descr": "The requested seed is not available"
},
"selfInLambda": {
"title": "SELF instruction in lambda (typechecking error)",
"descr": "A SELF instruction was encountered in a lambda expression."
},
"signer.decoding_error": {
"title": "Decoding_error",
"descr": "Error while decoding a remote signer message"
},
"signer.encoding_error": {
"title": "Encoding_error",
"descr": "Error while encoding a remote signer message"
},
"signer.ledger": {
"title": "Ledger error",
"descr": "Error when communication to a Ledger Nano S device"
},
"signer.ledger.deterministic_nonce_not_implemented": {
"title": "Ledger deterministic_nonce(_hash) not implemented",
"descr": "The deterministic_nonce(_hash) functionality is not implemented by the ledger"
},
"state.block.contents_not_found": {
"title": "Block_contents_not_found",
"descr": "Block not found"
},
"state.block.not_found": {
"title": "Block_not_found",
"descr": "Block not found"
},
"storage_exhausted.operation": {
"title": "Storage quota exceeded for the operation",
"descr": "A script or one of its callee wrote more bytes than the operation said it would"
},
"storage_limit_too_high": {
"title": "Storage limit out of protocol hard bounds",
"descr": "A transaction tried to exceed the hard limit on storage"
},
"tez.addition_overflow": {
"title": "Overflowing tez addition",
"descr": "An addition of two tez amounts overflowed"
},
"tez.invalid_divisor": {
"title": "Invalid tez divisor",
"descr": "Multiplication of a tez amount by a non positive integer"
},
"tez.multiplication_overflow": {
"title": "Overflowing tez multiplication",
"descr": "A multiplication of a tez amount by an integer overflowed"
},
"tez.negative_multiplicator": {
"title": "Negative tez multiplicator",
"descr": "Multiplication of a tez amount by a negative integer"
},
"tez.subtraction_underflow": {
"title": "Underflowing tez subtraction",
"descr": "An subtraction of two tez amounts underflowed"
},
"thresholdTooHigh": {
"title": "Given threshold is too high",
"descr": "The given threshold is higher than the number of keys, this would lead to a frozen multisig contract"
},
"timestamp_add": {
"title": "Timestamp add",
"descr": "Overflow when adding timestamps."
},
"timestamp_sub": {
"title": "Timestamp sub",
"descr": "Substracting timestamps resulted in negative period."
},
"too_many_internal_operations": {
"title": "Too many internal operations",
"descr": "A transaction exceeded the hard limit of internal operations it can emit"
},
"too_many_proposals": {
"title": "Too many proposals",
"descr": "The delegate reached the maximum number of allowed proposals."
},
"typeTooLarge": {
"title": "Stack item type too large",
"descr": "An instruction generated a type larger than the limit."
},
"unauthorized_ballot": {
"title": "Unauthorized ballot",
"descr": "The delegate provided for the ballot is not in the voting listings."
},
"unauthorized_proposal": {
"title": "Unauthorized proposal",
"descr": "The delegate provided for the proposal is not in the voting listings."
},
"undefinedBinopTypeError": {
"title": "Undefined binop (typechecking error)",
"descr": "A binary operation is called on operands of types over which it is not defined."
},
"undefinedUnopTypeError": {
"title": "Undefined unop (typechecking error)",
"descr": "A unary operation is called on an operand of type over which it is not defined."
},
"undefined_operation_nonce": {
"title": "Ill timed access to the origination nonce",
"descr": "An origination was attemped out of the scope of a manager operation"
},
"unexpectedAnnotation": {
"title": "An annotation was encountered where no annotation is expected",
"descr": "A node in the syntax tree was impropperly annotated"
},
"unexpectedBigMap": {
"title": "Big map in unauthorized position (type error)",
"descr": "When parsing script, a big_map type was found somewhere else than in the left component of the toplevel storage pair."
},
"unexpectedOperation": {
"title": "Big map in unauthorized position (type error)",
"descr": "When parsing script, a operation type was found in the storage or parameter field."
},
"unexpected_ballot": {
"title": "Unexpected ballot",
"descr": "Ballot recorded outside of a voting period."
},
"unexpected_level": {
"title": "Unexpected level",
"descr": "Level must be non-negative."
},
"unexpected_nonce_length": {
"title": "Unexpected nonce length",
"descr": "Nonce length is incorrect."
},
"unexpected_proposal": {
"title": "Unexpected proposal",
"descr": "Proposal recorded outside of a proposal period."
},
"ungroupedAnnotations": {
"title": "Annotations of the same kind were found spread apart",
"descr": "Annotations of the same kind must be grouped"
},
"unix.system_info": {
"title": "Unix System_info failure",
"descr": "Unix System_info failure"
},
"unix_error": {
"title": "Unix error",
"descr": "An unhandled unix exception"
},
"unknownPrimitiveNameTypeError": {
"title": "Unknown primitive name (typechecking error)",
"descr": "In a script or data expression, a primitive was unknown."
},
"unmatchedBranchesTypeError": {
"title": "Unmatched branches (typechecking error)",
"descr": "At the join point at the end of two code branches the stacks have inconsistent lengths or contents."
},
"unorderedMapLiteral": {
"title": "Invalid map key order",
"descr": "Map keys must be in strictly increasing order"
},
"unorderedSetLiteral": {
"title": "Invalid set value order",
"descr": "Set values must be in strictly increasing order"
},
"utils.Timeout": {
"title": "Timeout",
"descr": "Timeout"
},
"validator.inconsistent_operations_hash": {
"title": "Invalid merkle tree",
"descr": "The provided list of operations is inconsistent with the block header."
},
"validator.invalid_block": {
"title": "Invalid block",
"descr": "Invalid block."
},
"validator.missing_test_protocol": {
"title": "Missing test protocol",
"descr": "Missing test protocol when forking the test chain"
},
"validator.unavailable_protocol": {
"title": "Missing protocol",
"descr": "The protocol required for validating a block is missing."
},
"worker.closed": {
"title": "Worker closed",
"descr": "An operation on a worker could not complete before it was shut down."
}
}` | internal/bcd/tezerrors/errors.go | 0.838316 | 0.599573 | errors.go | starcoder |
package mandel
import (
"math"
"github.com/karlek/wasabi/fractal"
)
// OrbitTrap returns the smallest distance and it's point from a distance
// function calculated on each point in the orbit.
func OrbitTrap(z, c complex128, frac *fractal.Fractal, trap func(complex128) float64) (dist float64, closest complex128) {
// Arbitrarily chosen high number.
dist = math.MaxFloat64
// We can't assume bulb convergence since we're interested in the orbit
// trap functions value.
// Saved value for cycle-detection.
var bfract complex128
// See if the complex function diverges before we reach our iteration
// count.
var i int64
for i = 0; i < frac.Iterations; i++ {
z = frac.Func(z, c, frac.Coef)
// Calculate and maybe save the distance of our new point.
if newDist := trap(z); dist > newDist {
dist = newDist
closest = z
}
if IsCycle(z, &bfract, i) {
return math.Sqrt(dist), closest
}
// This point diverges, so we return the smallest distance and the
// point that was closest to the trap.
if IsOutside(z, frac.Bailout) {
return math.Sqrt(dist), closest
}
}
// This point converges; assumed under the number of iterations.
return math.Sqrt(dist), closest
}
// Pickover calculates the distance between the point and the coordinate axis.
func Pickover(p complex128) func(complex128) float64 {
return func(z complex128) float64 {
// Distance to y-axis.
xd := math.Abs(real(z) + real(p))
// Distance to x-axis.
yd := math.Abs(imag(z) + imag(p))
return math.Min(xd, yd)
}
}
// Line calculates the distance between the point and a line given by a point
// (on the line) and direction.
func Line(p0, dir complex128) func(complex128) float64 {
return func(z complex128) float64 {
return DistToLine(z, p0, dir)
}
}
// Point returns the distance between the point z and the trap point.
func Point(trap complex128) func(complex128) float64 {
return func(z complex128) float64 {
return abs(z - trap)
}
}
// DistToLine returns the distance between the point z and a line function
// specified by the direction of the line and a point on the line.
func DistToLine(z, p0, dir complex128) float64 {
// Dot product.
projLen := real(z)*real(dir) + imag(z)*imag(dir)
// Parameter on line.
t := sign(real(dir)) * sign(imag(dir)) * projLen / (math.Abs(real(dir)) + math.Abs(imag(dir)))
// Point on line closest to our point z.
p := p0 + complex(real(dir)*t, imag(dir)*t)
// Vector between the closest point on the line and the point.
n := z - p
return abs(n)
}
// sign returns -1 for negative numbers or 1 for positive numbers.
func sign(f float64) float64 {
if f < 0 {
return -1
}
return 1
} | mandel/trap.go | 0.877398 | 0.550064 | trap.go | starcoder |
package gotree
import "strconv"
// BinaryTree https://en.wikipedia.org/wiki/Binary_tree
type BinaryTree struct {
Root *Node
}
// TODO: interfaces to interact with BinarySearchTrees
// Node contains data (and usually a value or a pointer to a value) and pointers to the child nodes
type Node struct {
Left *Node
Right *Node
Data int
}
// CreateBinarySubtree returns the root after recursively creating a tree given a slice of strings that are either an integer or "nil"
func CreateBinarySubtree(a []string, index int) *Node {
if len(a) == 0 {
return nil
}
if index >= len(a) {
return nil
}
if a[index] == "nil" {
return nil
}
value, err := strconv.Atoi(a[index])
if err != nil {
// TODO: logging?
return nil
}
n := &Node{Data: value}
leftIndex := index*2 + 1
if leftIndex < len(a) {
n.Left = CreateBinarySubtree(a, leftIndex)
}
rightIndex := index*2 + 2
if rightIndex < len(a) {
n.Right = CreateBinarySubtree(a, rightIndex)
}
return n
}
// Height is the longest distance from the root to a leaf in a binary tree, effectively counting the number of edges
// https://www.cs.cmu.edu/~adamchik/15-121/lectures/Trees/trees.html
func (tree *BinaryTree) Height() int {
if tree.Root == nil || (tree.Root.Left == nil && tree.Root.Right == nil) {
return 0
}
return SubtreeHeight(tree.Root) - 1
}
// SubtreeHeight recursively calculates the largest distance from a node in a binary tree
func SubtreeHeight(n *Node) int {
if n == nil {
return 0
}
leftMax := 0
rightMax := 0
if n.Left == nil && n.Right == nil {
return 1
}
if n.Left != nil {
leftMax = SubtreeHeight(n.Left)
}
if n.Right != nil {
rightMax = SubtreeHeight(n.Right)
}
if leftMax > rightMax {
return leftMax + 1
}
return rightMax + 1
}
// MinimumDepth is a convenience wrapper for the number of nodes on the shortest path from a tree root to a leaf
func (tree *BinaryTree) MinimumDepth() int {
if tree.Root == nil {
return 0
}
return subtreeMinimumDepth(tree.Root, 1)
}
// subtreeMinimumDepth is a depth first recursive algorithm to find the shortest path from root to leaf
// the leaf node triggers the return
// every intermediate node needs to add one
func subtreeMinimumDepth(n *Node, depth int) int {
leftMax := 0
rightMax := 0
if n.Left == nil && n.Right == nil {
return depth
}
if n.Right == nil {
return subtreeMinimumDepth(n.Left, depth+1)
}
if n.Left == nil {
return subtreeMinimumDepth(n.Right, depth+1)
}
leftMax = subtreeMinimumDepth(n.Left, depth+1)
rightMax = subtreeMinimumDepth(n.Right, depth+1)
if leftMax < rightMax {
return leftMax
}
return rightMax
} | binary-tree.go | 0.603815 | 0.677829 | binary-tree.go | starcoder |
package mandelbrot
import (
"image"
"math"
"github.com/lucasb-eyer/go-colorful"
)
// This function renders the Mandelbrot set using the complex coordinates (centerX, centerY) and zoom factor. The colors parameter is
// a slice of color hex string that will be used to color the image. The last color in the slice is used to color the inside
// of the fractal. The colorSpacing parameter determines how often the color scheme repeats. A low value will cause the
// colors to repeat often while a high value will cause the causes the colors to be stretched out.
func Render(dimensions *image.Point, centerX float64, centerY float64, zoom float64, colors []string, colorSpacing int) *image.NRGBA {
var (
minCx, minCy, widthCx, heightCy float64 = calculateExtents(centerX, centerY, zoom)
)
m := image.NewNRGBA(image.Rect(0,0,dimensions.X,dimensions.Y))
insideColor, _ := colorful.Hex(colors[len(colors)-1])
palette := makePalette(colors, colorSpacing)
ch := make(chan pixelRow)
// compute the mandlebot set
for py := 0; py < dimensions.Y; py++ {
y0 := (float64(py) / float64(dimensions.Y)) * heightCy + minCy
// use concurrency to take advantage of multiple cores
go renderRow(py, y0, dimensions.X, minCx, minCy, widthCx, heightCy, palette, insideColor, ch)
}
// receive rendered rows and write to image
for py := 0; py < dimensions.Y; py++ {
// receive result
pixelRow := <- ch
// write color values to image
for px := 0; px < dimensions.X; px++ {
index := pixelRow.Row*m.Stride + px*4
color := pixelRow.Pixels[px]
m.Pix[index] = uint8(color.R * 255)
m.Pix[index+1] = uint8(color.G * 255)
m.Pix[index+2] = uint8(color.B * 255)
m.Pix[index+3] = 255
}
}
return m
}
// struct for receiving rendered rows
type pixelRow struct {
Row int
Pixels []colorful.Color
}
// render one pixel row of the mandelbrot set
func renderRow(py int, y0 float64, width int, minCx float64, minCy float64, widthCx float64, heightCy float64, palette []colorful.Color, insideColor colorful.Color, ch chan pixelRow) {
pixels := make([]colorful.Color, width)
for px := 0; px < width; px++ {
x0 := (float64(px) / float64(width)) * widthCx + minCx
pixels[px]= renderPixel(px, py, x0, y0, palette, insideColor)
}
// send result back using channel
ch <- pixelRow{py, pixels}
}
// render one pixel of the mandelbrot set with the given location and palette
func renderPixel(px int, py int, x0 float64, y0 float64, palette []colorful.Color, insideColor colorful.Color) (colorful.Color) {
var x, y, xSquare, ySquare, iteration float64
var maxIteration float64 = 4096
for ; (xSquare + ySquare) < 4 && iteration < maxIteration; iteration++ {
y = (x + y) * (x + y) - xSquare - ySquare + y0
x = xSquare - ySquare + x0
xSquare = x * x
ySquare = y * y
}
if (iteration < maxIteration) {
zn := xSquare + ySquare
nu := math.Log( math.Log(zn) / 2 / math.Log(2) ) / math.Log(2)
iteration = iteration + 1 - nu
}
color1 := palette[ int(math.Floor(math.Mod( iteration, float64(len(palette))))) ]
color2 := palette[ int(math.Floor(math.Mod( iteration + 1, float64(len(palette))))) ]
color := color1.BlendRgb(color2, math.Mod(iteration, 1))
if (iteration >= maxIteration) {
color = insideColor
}
return color
}
// make palette by extending colors via blending
func makePalette(colors []string, colorSpacing int) ([]colorful.Color) {
length := len(colors)-1
palette := make([]colorful.Color, (length * colorSpacing))
for c := 0; c<length; c++ {
c2 := (c+1) % length
color, _ := colorful.Hex(colors[c])
color2, _ := colorful.Hex(colors[c2])
for i := 0; i<colorSpacing; i++ {
palette[c*colorSpacing+i] = color.BlendRgb(color2, float64(i)/float64(colorSpacing))
}
}
return palette
}
// calculate extents in complex space based on center and zoom factor
func calculateExtents(centerX float64, centerY float64, zoom float64) (float64, float64, float64, float64) {
widthDefaultX := 3.0
heightDefaultY := 2.0
widthCx := widthDefaultX / zoom
heightCy := heightDefaultY / zoom
minCx := centerX - widthCx / 2
minCy := centerY - heightCy /2
return minCx, minCy, widthCx, heightCy
} | mandelbrot.go | 0.840128 | 0.691237 | mandelbrot.go | starcoder |
package Math
import (
"errors"
"math"
)
// a vector is constructed having row and columns of the input
// data contains our points
type Vector struct {
Row_dimension int `json:"row_dimension"`
Col_dimension int `json:"col_dimension"`
Data []float64 `json:"data"`
}
// check if the row and column dimensions of the two vectors are equivalent
func (row_vec Vector) Dot(col_vec Vector) (sum float64,err error){
if row_vec.Col_dimension != col_vec.Row_dimension {
return 0,errors.New("Incompatible dimensions")
}
for i:=0;i<len(row_vec.Data);i++{
sum+= (row_vec.Data[i] * col_vec.Data[i])
}
return
}
// check for equivalent dimensions
// then sum the data arrays
func (row_vecA Vector) Sum (row_vecB Vector) (sum Vector,err error){
if row_vecA.Row_dimension!= row_vecB.Row_dimension || row_vecA.Col_dimension!=row_vecB.Col_dimension {
return Vector{},errors.New("Incompatible dimensions")
}
result := make([]float64,len(row_vecA.Data))
for i:=0;i<len(row_vecA.Data);i++{
result[i] = (row_vecA.Data[i] + row_vecB.Data[i])
}
sum.Row_dimension = row_vecA.Row_dimension
sum.Col_dimension = row_vecB.Col_dimension
sum.Data = result
return
}
// same as addition function
func (row_vecA Vector) Subtract (row_vecB Vector) (sum Vector,err error){
if row_vecA.Row_dimension!= row_vecB.Row_dimension || row_vecA.Col_dimension!=row_vecB.Col_dimension {
return Vector{},errors.New("Incompatible dimensions")
}
result := make([]float64,len(row_vecA.Data))
for i:=0;i<len(row_vecA.Data);i++{
result[i] = (row_vecA.Data[i] - row_vecB.Data[i])
}
sum.Row_dimension = row_vecA.Row_dimension
sum.Col_dimension = row_vecB.Col_dimension
sum.Data = result
return
}
// transpose in this simplistic schema is to switch the dimensions
func (vec Vector) T () Vector{
temp_col := vec.Col_dimension
temp_row := vec.Row_dimension
vec.Col_dimension = temp_row
vec.Row_dimension = temp_col
return vec
}
// l2 norm of a vector is sum of each input squared
// then take square root
func (vec Vector) L2Norm () float64 {
squared_sum := 0.0
for i:=0;i < len(vec.Data) ; i++ {
squared_sum += (math.Pow(vec.Data[i],2))
}
return math.Sqrt(squared_sum)
}
// create a zero row vector
func Zeros (row_dim,col_dim int) Vector {
res := make([]float64,row_dim)
for i:=0;i<len(res);i++ {
res[i] = 0.0
}
return Vector{
Row_dimension:row_dim,
Col_dimension:col_dim,
Data:res,
}
}
/*
* scalar-vector ops
*/
func (vec Vector) Add (scalar float64) Vector {
temp := make([]float64,len(vec.Data))
for i:=0;i<len(temp) ; i++ {
temp[i] = vec.Data[i] + scalar
}
vec.Data = temp
return vec
}
func (vec Vector) Diff (scalar float64) Vector {
temp := make([]float64,len(vec.Data))
for i:=0;i<len(temp) ; i++ {
temp[i] = vec.Data[i] - scalar
}
vec.Data = temp
return vec
}
func (vec Vector) Prod (scalar float64) Vector {
temp := make([]float64,len(vec.Data))
for i:=0;i<len(temp) ; i++ {
temp[i] = vec.Data[i] * scalar
}
vec.Data = temp
return vec
}
func (vec Vector) Div (scalar float64) Vector {
temp := make([]float64,len(vec.Data))
for i:=0;i<len(temp) ; i++ {
temp[i] = vec.Data[i] / scalar
}
vec.Data = temp
return vec
} | Math/vector.go | 0.646795 | 0.652144 | vector.go | starcoder |
package graph
import (
"fmt"
"github.com/fogleman/delaunay"
)
type Graph struct {
Vertices []*Vertex
Edges []*Edge
Adjacency map[*Vertex][]*connection // We using an Adjaceny List boys. |E|/|V|^2 is typically > 1/64, at least in the graphs I like seeing it make
}
type connection struct {
edge *Edge
vertex *Vertex
}
func New_Graph() *Graph {
g := &Graph{}
g.Adjacency = make(map[*Vertex][]*connection)
return g
}
func (g Graph) String() string {
outstr := ""
for i, vertex := range g.Vertices {
outstr = outstr + "[" + fmt.Sprint(i) + "]: " + vertex.String() + "\n"
}
for _, edge := range g.Edges {
outstr = outstr + edge.String() + "; "
}
return outstr
}
func (g *Graph) has(v *Vertex) bool {
for _, v_test := range g.Vertices {
if v == v_test {
return true
}
}
return false
}
func (g *Graph) Add_Vertex(x, y int) (*Vertex, error) {
v := &Vertex{X: x, Y: y}
for _, v_test := range g.Vertices {
if v.Same_As(v_test) {
return v_test, &VertexAlreadyExistsError{vertex: v_test}
}
}
g.Vertices = append(g.Vertices, v)
return v, nil
}
func (g *Graph) Add_Edge(v1 *Vertex, v2 *Vertex) (*Edge, error) {
if !g.has(v1) {
return nil, &MissingVertexError{vertex: v1}
}
if !g.has(v2) {
return nil, &MissingVertexError{vertex: v2}
}
e := NewEdge(v1, v2)
for _, c_test := range g.Adjacency[v1] {
if e.same_as(c_test.edge) {
return c_test.edge, &EdgeAlreadyExistsError{edge: c_test.edge}
}
}
g.Edges = append(g.Edges, e)
g.Adjacency[v1] = append(g.Adjacency[v1], &connection{vertex: v2, edge: e})
g.Adjacency[v2] = append(g.Adjacency[v2], &connection{vertex: v1, edge: e})
return e, nil
}
func (g *Graph) Remove_Edge(e *Edge) error {
for it, edge := range g.Edges {
if e.same_as(edge) {
// Remove from graph Edge list
g.Edges = append(g.Edges[:it], g.Edges[it+1:]...)
// Remove from the two Adjacency slices
for it, conn := range g.Adjacency[e.v1] {
if e.same_as(conn.edge) {
g.Adjacency[e.v1] = append(g.Adjacency[e.v1][:it], g.Adjacency[e.v1][it+1:]...)
break
}
}
for it, conn := range g.Adjacency[e.v2] {
if e.same_as(conn.edge) {
g.Adjacency[e.v2] = append(g.Adjacency[e.v2][:it], g.Adjacency[e.v2][it+1:]...)
break
}
}
return nil
}
}
return &EdgeNotFoundError{edge: e}
}
func (g *Graph) Delaunay_Triangulate() (*delaunay.Triangulation, error) {
var points []delaunay.Point
for _, v := range g.Vertices {
points = append(points, delaunay.Point{X: float64(v.X), Y: float64(v.Y)})
}
return delaunay.Triangulate(points)
}
func (g *Graph) Connect_Delaunay() error { // https://mapbox.github.io/delaunator/
triangulation, err := g.Delaunay_Triangulate()
for it := 0; it < len(triangulation.Triangles)/3; it++ {
for jt := 0; jt < 3; jt++ {
g.Add_Edge(g.Vertices[triangulation.Triangles[3*it+jt]], g.Vertices[triangulation.Triangles[3*it+(jt+1)%3]])
}
}
return err
} | game/util/graph/graph.go | 0.706494 | 0.425605 | graph.go | starcoder |
package mocks
import (
"encoding/json"
"github.com/taxjar/taxjar-go"
)
// SummaryRates - mock response
var SummaryRates = new(taxjar.SummaryRatesResponse)
var _ = json.Unmarshal([]byte(SummaryRatesJSON), &SummaryRates)
// SummaryRatesJSON - mock SummaryRates JSON
var SummaryRatesJSON = `{
"summary_rates": [
{
"country_code": "AT",
"country": "Austria",
"region_code": null,
"region": null,
"minimum_rate": {
"label": "VAT",
"rate": 0.2
},
"average_rate": {
"label": "VAT",
"rate": 0.2
}
},
{
"country_code": "BE",
"country": "Belgium",
"region_code": null,
"region": null,
"minimum_rate": {
"label": "VAT",
"rate": 0.21
},
"average_rate": {
"label": "VAT",
"rate": 0.21
}
},
{
"country_code": "BG",
"country": "Bulgaria",
"region_code": null,
"region": null,
"minimum_rate": {
"label": "VAT",
"rate": 0.2
},
"average_rate": {
"label": "VAT",
"rate": 0.2
}
},
{
"country_code": "CA",
"country": "Canada",
"region_code": "AB",
"region": "Alberta",
"minimum_rate": {
"label": "GST",
"rate": 0.05
},
"average_rate": {
"label": "GST",
"rate": 0.05
}
},
{
"country_code": "CA",
"country": "Canada",
"region_code": "BC",
"region": "British Columbia",
"minimum_rate": {
"label": "GST",
"rate": 0.05
},
"average_rate": {
"label": "GST/PST",
"rate": 0.12
}
}
]
}` | test/mocks/SummaryRates_mock.go | 0.693473 | 0.422445 | SummaryRates_mock.go | starcoder |
package value
import "strings"
type compareStringFunc func(a, b string) bool
// StringSlice holds a slice of string values
type StringSlice struct {
valsPtr *[]string
}
// NewStringSlice makes a new StringSlice with the given string values.
func NewStringSlice(vals ...string) *StringSlice {
slice := make([]string, len(vals))
copy(slice, vals)
return &StringSlice{valsPtr: &slice}
}
// NewStringSliceFromPtr makes a new StringSlice with the given pointer to string values.
func NewStringSliceFromPtr(valsPtr *[]string) *StringSlice {
return &StringSlice{valsPtr: valsPtr}
}
// Set changes the string values.
func (v *StringSlice) Set(vals []string) { *v.valsPtr = vals }
// Type return TypeString.
func (v *StringSlice) Type() Type { return TypeString }
// IsSlice returns true.
func (v *StringSlice) IsSlice() bool { return true }
// Clone produce a clone that is identical except for the backing pointer.
func (v *StringSlice) Clone() Value { return NewStringSlice(*v.valsPtr...) }
// Parse sets the values from the given string.
func (v *StringSlice) Parse(str string) error {
substrings := strings.Split(str, ",")
trimmed := make([]string, len(substrings))
for i := 0; i < len(substrings); i++ {
trimmed[i] = strings.TrimSpace(substrings[i])
}
*v.valsPtr = trimmed
return nil
}
// SlicePointer returns the pointer for storage of slice values.
func (v *StringSlice) SlicePointer() interface{} { return v.valsPtr }
// Slice returns the string slice values.
func (v *StringSlice) Slice() interface{} { return *v.valsPtr }
// Len returns the number of slice elements.
func (v *StringSlice) Len() int { return len(*v.valsPtr) }
// Equal checks if length and values of given slice equal the current.
// Returns a non-nil error if types do not match.
func (v *StringSlice) Equal(v2 Slice) (bool, error) {
if err := CheckType(TypeString, v2.Type()); err != nil {
return false, err
}
vals1 := *v.valsPtr
vals2 := v2.Slice().([]string)
if len(vals1) != len(vals2) {
return false, nil
}
for i, val1 := range vals1 {
if val1 != vals2[i] {
return false, nil
}
}
return true, nil
}
// Greater checks if all values of the current slice are greater than that of
// the given single.
// Returns a non-nil error if types do not match.
func (v *StringSlice) Greater(v2 Single) (bool, error) {
return compareStrings(*v.valsPtr, v2, stringGreater)
}
// GreaterEqual checks if all values of the current slice are greater or equal
// to the given single.
// Returns a non-nil error if types do not match.
func (v *StringSlice) GreaterEqual(v2 Single) (bool, error) {
return compareStrings(*v.valsPtr, v2, stringGreaterEqual)
}
// Less checks if all values of the current slice are less than that of
// the given single.
// Returns a non-nil error if types do not match.
func (v *StringSlice) Less(v2 Single) (bool, error) {
return compareStrings(*v.valsPtr, v2, stringLess)
}
// LessEqual checks if all values of the current slice are less or equal
// to the given single.
// Returns a non-nil error if types do not match.
func (v *StringSlice) LessEqual(v2 Single) (bool, error) {
return compareStrings(*v.valsPtr, v2, stringLessEqual)
}
// Contains checks if the given single value is equal to one of the
// current slice values.
// Returns a non-nil error if types do not match.
func (v *StringSlice) Contains(v2 Single) (bool, error) {
if err := CheckType(TypeString, v2.Type()); err != nil {
return false, err
}
vals := *v.valsPtr
val2 := v2.Value().(string)
for _, val1 := range vals {
if val1 == val2 {
return true, nil
}
}
return false, nil
}
func compareStrings(vals []string, v2 Single, f compareStringFunc) (bool, error) {
if err := CheckType(TypeString, v2.Type()); err != nil {
return false, err
}
if len(vals) == 0 {
return false, nil
}
val2 := v2.Value().(string)
for _, val1 := range vals {
if !f(val1, val2) {
return false, nil
}
}
return true, nil
}
func stringGreater(a, b string) bool {
return a > b
}
func stringGreaterEqual(a, b string) bool {
return a >= b
}
func stringLess(a, b string) bool {
return a < b
}
func stringLessEqual(a, b string) bool {
return a <= b
} | value/stringslice.go | 0.845688 | 0.556219 | stringslice.go | starcoder |
package ternarytree
// Search tests whether a string is present in the ternary search tree.
func (tree *TernaryTree) Search(s string) bool {
if len(s) > 0 {
return searchVisitor(tree.head, s[0], s[1:])
} else if tree.hasEmpty {
return true
} else {
return false
}
}
func searchVisitor(node *treeNode, head byte, tail string) bool {
if node == nil {
return false
}
if head < node.char {
return searchVisitor(node.loKid, head, tail)
} else if head == node.char {
if len(tail) > 0 {
return searchVisitor(node.eqKid, tail[0], tail[1:])
}
return node.data != nil
} else {
return searchVisitor(node.hiKid, head, tail)
}
}
// PartialMatchSearch searches the ternary search tree for contained strings that match the supplied target
// string or any character for occurrences of the wildcard character in the target string.
func (tree *TernaryTree) PartialMatchSearch(s string, wildcard byte) []string {
var result []string
if len(s) > 0 {
pmSearchVisitor(tree.head, wildcard, s[0], s[1:], &result)
} else if tree.hasEmpty {
result = append(result, "")
}
return result
}
func pmSearchVisitor(node *treeNode, wildcard byte, head byte, tail string, result *[]string) {
if node == nil {
return
}
if head == wildcard || head < node.char {
pmSearchVisitor(node.loKid, wildcard, head, tail, result)
}
if head == wildcard || head == node.char {
if len(tail) > 0 {
pmSearchVisitor(node.eqKid, wildcard, tail[0], tail[1:], result)
} else {
*result = append(*result, *node.data)
}
}
if head == wildcard || head > node.char {
pmSearchVisitor(node.hiKid, wildcard, head, tail, result)
}
}
// NearNeighborSearch finds strings that differ by no more than a given Hamming distance of the target string.
func (tree *TernaryTree) NearNeighborSearch(s string, distance int) []string {
var result []string
if len(s) > 0 {
nnSearchVisitor(tree.head, distance, s[0], s[1:], &result)
} else if tree.hasEmpty {
result = append(result, "")
}
return result
}
func nnSearchVisitor(node *treeNode, distance int, head byte, tail string, result *[]string) {
if node == nil || distance < 0 {
return
}
if distance > 0 || head < node.char {
nnSearchVisitor(node.loKid, distance, head, tail, result)
}
d := distance
if head != node.char {
d = distance - 1
}
if node.data != nil {
if len(tail) <= d {
*result = append(*result, *node.data)
}
} else {
if len(tail) > 0 {
nnSearchVisitor(node.eqKid, d, tail[0], tail[1:], result)
}
}
if distance > 0 || head > node.char {
nnSearchVisitor(node.hiKid, distance, head, tail, result)
}
} | search.go | 0.764188 | 0.476092 | search.go | starcoder |
package av
import (
"encoding/csv"
"io"
"sort"
"time"
"github.com/pkg/errors"
)
const (
// digitalCurrencySeriesDateFormat is the format that digital currency time series data comes in
digitalCurrencySeriesDateFormat = "2006-01-02 15:04:05"
)
// DigitalCurrencySeriesValue is a piece of data for a given time about digital currency prices
type DigitalCurrencySeriesValue struct {
Time time.Time
// Price is the recorded in the physical currency specified
Price float64
Volume float64
MarketCap float64
}
// sortDigitalCurrencySeriesValuesByDate allows DigitalCurrencySeriesValue
// slices to be sorted by date in ascending order
type sortDigitalCurrencySeriesValuesByDate []*DigitalCurrencySeriesValue
func (b sortDigitalCurrencySeriesValuesByDate) Len() int { return len(b) }
func (b sortDigitalCurrencySeriesValuesByDate) Less(i, j int) bool { return b[i].Time.Before(b[j].Time) }
func (b sortDigitalCurrencySeriesValuesByDate) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
// parseDigitalCurrencySeriesData will parse csv data from a reader
func parseDigitalCurrencySeriesData(r io.Reader) ([]*DigitalCurrencySeriesValue, error) {
reader := csv.NewReader(r)
reader.ReuseRecord = true // optimization
reader.LazyQuotes = true
reader.TrailingComma = true
reader.TrimLeadingSpace = true
// strip header
if _, err := reader.Read(); err != nil {
if err == io.EOF {
return nil, nil
}
return nil, err
}
values := make([]*DigitalCurrencySeriesValue, 0, 64)
for {
record, err := reader.Read()
if err != nil {
if err == io.EOF {
break
}
return nil, err
}
value, err := parseDigitalCurrencySeriesRecord(record)
if err != nil {
return nil, err
}
values = append(values, value)
}
// sort values by date
sort.Sort(sortDigitalCurrencySeriesValuesByDate(values))
return values, nil
}
// parseDigitalCurrencySeriesRecord will parse an individual csv record
func parseDigitalCurrencySeriesRecord(s []string) (*DigitalCurrencySeriesValue, error) {
// these are the expected columns in the csv record
const (
timestamp = iota
price
_ // price 2 ?? seems to be a duplicate of the first price
volume
marketCap
)
value := &DigitalCurrencySeriesValue{}
d, err := parseDate(s[timestamp], digitalCurrencySeriesDateFormat)
if err != nil {
return nil, errors.Wrapf(err, "error parsing timestamp %s", s[timestamp])
}
value.Time = d
f, err := parseFloat(s[price])
if err != nil {
return nil, errors.Wrapf(err, "error parsing price %s", s[price])
}
value.Price = f
f, err = parseFloat(s[volume])
if err != nil {
return nil, errors.Wrapf(err, "error parsing volume %s", s[volume])
}
value.Volume = f
f, err = parseFloat(s[marketCap])
if err != nil {
return nil, errors.Wrapf(err, "error parsing market cap %s", s[marketCap])
}
value.MarketCap = f
return value, nil
} | vendor/github.com/cmckee-dev/go-alpha-vantage/digital_currency.go | 0.717903 | 0.523968 | digital_currency.go | starcoder |
package rf
import r "reflect"
// Flags constituting the return value of `rf.Filter`.
// Unknown bits will be ignored.
const (
// Don't visit self or descendants. Being zero, this is the default.
VisNone = 0b_0000_0000
// Visit self.
VisSelf = 0b_0000_0001
// Visit descendants.
VisDesc = 0b_0000_0010
// Visit both self and descendants.
VisBoth = VisSelf | VisDesc
// Same effect as `rf.VisBoth`. Provided for arithmetic.
VisAll = 0b_1111_1111
)
/*
Tool for implementing efficient reflect-based deep walking. Determines if a
particular node should be visited during a walk, and how. This package provides
several filter implementations, such as filtering by type, by struct tag, or
combining other filters.
The return value of `rf.Filter.Visit` is a combination of two optional flags:
`rf.VisSelf` and `rf.VisDesc`. Flags are combined with bitwise OR. The
following combinations are known:
return VisNone // Zero value / default.
return VisSelf
return VisDesc
return VisSelf | VisDesc
return VisBoth // Shortcut for the above.
If the flag `rf.VisDesc` is set, we attempt to generate an inner walker that
visits the descendants of the current node, such as the elements of a slice,
the fields of a struct, the value behind a pointer, or the value referenced by
an interface. Otherwise, we don't attempt to generate an inner walker.
If the flag `rf.VisSelf` is set, we generate a walker that invokes
`Visitor.Visit` on the current node. Otherwise the resulting walker will not
visit the current node, and may possibly be nil.
For technical reasons, all implementations of this interface must be values
rather than references. For example, filters provided by this package must be
used as values rather than pointers. The following is the CORRECT way to
construct filters:
var filter rf.Filter = rf.And{
TypeFilter[string]{},
rf.TagFilter{`json`, `fieldName`},
}
The following is the INCORRECT way to construct filters. Due to internal
validation, this will cause panics at runtime:
var filter rf.Filter = &rf.And{
&rf.TypeFilter[string]{},
&rf.TagFilter{`json`, `fieldName`},
}
See also:
rf.Walker
rf.Visitor
rf.GetWalker
rf.Walk
*/
type Filter interface {
Visit(r.Type, r.StructField) byte
}
/*
Tool for implementing efficient reflect-based deep walking. The function
`rf.GetWalker` generates a walker for a SPECIFIC combination of parent type and
`rf.Filter`. The resulting walker is specialized for that combination, and
walks its input precisely and efficiently.
For simplicity and efficiency reasons, walkers generated by this package don't
additionally assert that the provided `reflect.Value` has the same type for
which the walker is generated. When using `rf.Walk` or `rf.WalkFunc`, this is
handled for you. Otherwise, it's your responsibility to pass a value of the
same type. For simplicity, walkers also assume that the visitor is non-nil.
This package currently does NOT support walking into maps, for two reasons:
unclear semantics and inefficiency. It's unclear if we should walk keys,
values, or key-value pairs, and how that affects the rest of the walking API.
Currently in Go 1.17, reflect-based map walking has horrible inefficiencies
which can't be amortized by 3rd party code. It would be a massive performance
footgun.
This package does support walking into interface values included into other
structures, but at an efficiency loss. In general, our walking mechanism relies
on statically determining what we should and shouldn't visit, which is possible
only with static types. Using interfaces as dynamically-typed containers of
unknown values defeats this design by forcing us to always visit each of them,
and may produce significant slowdowns. However, while visiting each interface
value is an unfortunate inefficiency, walking the value REFERENCED by an
interface is as precise and efficient as with static types.
*/
type Walker interface {
Walk(r.Value, Visitor)
}
/*
Used by `rf.Walker` and `rf.Walk` to visit certain nodes of the given value. A
visitor can be an arbitrary value or a function; see `rf.VisitorFunc`.
*/
type Visitor interface {
Visit(r.Value, r.StructField)
}
/*
Function type that implements `rf.Visitor`. Used by `rf.WalkFunc`. Converting a
func to an interface value is alloc-free.
*/
type VisitorFunc func(r.Value, r.StructField)
// Implement `rf.Visitor` by calling itself.
func (self VisitorFunc) Visit(val r.Value, field r.StructField) {
if self == nil {
return
}
self(val, field)
}
// Shortcut for calling `rf.Walk` with a visitor func.
func WalkFunc(val r.Value, fil Filter, vis VisitorFunc) {
// `Walk` can't detect this case. We have to check it here.
if vis == nil {
return
}
Walk(val, fil, vis)
}
/*
Takes an arbitrary value and performs deep traversal, invoking the visitor for
each node allowed by the filter. Internally, uses `rf.GetWalker` to get or
create a walker specialized for this combination of type and filter. For each
type+filter combination, `rf.GetWalker` generates a specialized walker, caching
it for future calls. This approach allows MUCH more efficient walking.
If the input is zero/invalid/nil or the visitor is nil, this is a nop. For
slightly better performance, pass a pointer to reduce copying.
See also:
rf.Walker
rf.Filter
rf.Visitor
rf.GetWalker
*/
func Walk(val r.Value, fil Filter, vis Visitor) {
if vis == nil {
return
}
wal := GetWalker(ValueType(val), fil)
if wal == nil {
return
}
wal.Walk(val, vis)
}
/*
Shortcut for `rf.Walk` on the given value, which must be either a valid pointer
or nil. If the value is nil, this is a nop. Requiring a pointer is useful for
both efficiency and correctness. Even if the walker doesn't modify anything,
passing a pointer reduces copying. If the walker does modify walked values, and
you try to walk a non-pointer, you will get uninformative panics from
the "reflect" package. This function validates the inputs early, making it
easier to catch such bugs.
*/
func WalkPtr(val any, fil Filter, vis Visitor) {
if val == nil {
return
}
Walk(ValueDeref(ValidateValueKind(r.ValueOf(val), r.Ptr)), fil, vis)
}
// Shortcut for calling `rf.WalkPtr` with a visitor func.
func WalkPtrFunc(val any, fil Filter, vis VisitorFunc) {
if val == nil {
return
}
// Validate before early return.
tar := ValueDeref(ValidateValueKind(r.ValueOf(val), r.Ptr))
// `Walk` can't detect this case. We have to check it here.
if vis == nil {
return
}
Walk(tar, fil, vis)
}
/*
Returns an `rf.Walker` for the given type with the given filter. Uses caching to
avoid generating a walker more than once. Future calls with the same inputs
will return the same walker instance. Returns nil if for this combination of
type and filter, nothing will be visited. A nil filter is equivalent to a
filter that always returns false, resulting in a nil walker.
*/
func GetWalker(typ r.Type, fil Filter) Walker {
if typ == nil || fil == nil {
return nil
}
validateFilter(fil)
return walkerCacheStatic.get(typ, nil, r.StructField{}, fil)
}
/*
Shortcut for `rf.TrawlWith` without an additional filter. Takes an arbitrary
source value and a pointer to an output slice. Walks the source value,
appending all non-zero values of the matching type to the given slice.
*/
func Trawl[Src any, Out ~[]Elem, Elem any](src *Src, out *Out) {
TrawlWith(src, out, nil)
}
/*
Shortcut for using `rf.Appender` and `rf.Walk` to trawl the provided "source"
value to collect all non-zero values of a specific type into an "output" slice.
The source value may be of arbitrary type. The output must be a non-nil pointer
to a slice. The additional filter is optional.
*/
func TrawlWith[Src any, Out ~[]Elem, Elem any](src *Src, out *Out, fil Filter) {
if src == nil || out == nil {
return
}
/**
The unsafe cast is correct and safe. Workaround for Go limitations.
The following is equivalent and should work, but does not compile:
appender := (*Appender[Elem])(out)
*/
appender := cast[*Appender[Elem]](out)
filter := MaybeAnd(appender.Filter(), fil)
Walk(r.ValueOf(src), filter, appender)
}
// Implementation of `rf.Filter` that always returns `rf.VisSelf`.
type Self struct{}
// Implement `rf.Filter`.
func (Self) Visit(r.Type, r.StructField) byte { return VisSelf }
// Implementation of `rf.Filter` that always returns `rf.VisDesc`.
type Desc struct{}
// Implement `rf.Filter`.
func (Desc) Visit(r.Type, r.StructField) byte { return VisDesc }
// Implementation of `rf.Filter` that always returns `rf.VisBoth`.
type Both struct{}
// Implement `rf.Filter`.
func (Both) Visit(r.Type, r.StructField) byte { return VisBoth }
// Implementation of `rf.Filter` that always returns `rf.VisAll`.
type All struct{}
// Implement `rf.Filter`.
func (All) Visit(r.Type, r.StructField) byte { return VisAll }
/*
Implementation of `rf.Filter` that allows to visit values of this specific type.
If the type is nil, this won't visit anything. The type may be either concrete
or an interface. It also allows to visit descendants.
*/
type TypeFilter[_ any] struct{}
// Implement `rf.Filter`.
func (TypeFilter[A]) Visit(typ r.Type, _ r.StructField) byte {
if typ == Type[A]() {
return VisBoth
}
return VisDesc
}
/*
Implementation of `rf.Filter` that allows to visit values of the given
`reflect.Kind`. If the kind is `reflect.Invalid`, this won't visit anything.
Untested.
*/
type KindFilter r.Kind
// Implement `rf.Filter`.
func (self KindFilter) Visit(typ r.Type, _ r.StructField) byte {
if r.Kind(self) == typ.Kind() {
return VisBoth
}
return VisDesc
}
/*
Implementation of `rf.Filter` that allows to visit values whose types implement
the given interface BY POINTER. If the type is nil, this won't visit anything.
The type must represent an interface, otherwise this will panic. The visitor
must explicitly take value address:
func visit(val r.Value, _ r.StructField) {
val.Addr().Interface().(SomeInterface).SomeMethod()
}
*/
type IfaceFilter[_ any] struct{}
// Implement `rf.Filter`.
func (IfaceFilter[A]) Visit(typ r.Type, _ r.StructField) byte {
return ifaceVisit(typ, Type[A](), VisBoth)
}
/*
Like `rf.IfaceFilter`, but visits either self or descendants, not both. In other
words, once it finds a node that implements the given interface (by pointer),
it allows to visit that node and stops there, without walking its descendants.
*/
type ShallowIfaceFilter[_ any] struct{}
// Implement `rf.Filter`.
func (ShallowIfaceFilter[A]) Visit(typ r.Type, _ r.StructField) byte {
return ifaceVisit(typ, Type[A](), VisSelf)
}
/*
Implementation of `rf.Filter` that allows to visit values whose struct tag has a
specific tag with a specific value, such as tag "json" with value "-". It also
allows to visit descendants.
Known limitation: can't differentiate empty tag from missing tag.
*/
type TagFilter [2]string
// Implement `rf.Filter`.
func (self TagFilter) Visit(_ r.Type, field r.StructField) byte {
key, val := self[0], self[1]
if key != `` && field.Tag.Get(key) == val {
return VisBoth
}
return VisDesc
}
/*
Implementation of `rf.Filter` that inverts the "self" bit of the inner filter,
without changing the other flags. If the inner filter is nil, this always
returns `rf.VisNone`.
*/
type InvertSelf [1]Filter
// Implement `rf.Filter`.
func (self InvertSelf) Visit(typ r.Type, field r.StructField) byte {
if self[0] == nil {
return VisNone
}
return self[0].Visit(typ, field) ^ VisSelf
}
/*
Micro-optimization for `rf.And`. If the input has NO non-nil filters, returns
nil. If the input has ONE non-nil filter, returns that filter, avoiding an
allocation of `rf.And{}`. Otherwise combines the filters via `rf.And`.
*/
func MaybeAnd(vals ...Filter) Filter {
var out And
slice := maybeCombineFilters(vals, out[:0])
switch len(slice) {
case 0:
return nil
case 1:
return slice[0]
default:
return out
}
}
/*
Implementation of `rf.Filter` that combines other filters, AND-ing their outputs
via `&`. Nil elements are ignored. If all elements are nil, the output is
automatically `VisNone`.
*/
type And [8]Filter
// Implement `rf.Filter`.
func (self And) Visit(typ r.Type, field r.StructField) (vis byte) {
var found bool
for _, val := range self {
if val != nil {
if !found {
found = true
vis = val.Visit(typ, field)
} else {
vis &= val.Visit(typ, field)
}
}
}
return
}
/*
Micro-optimization for `rf.Or`. If the input has NO non-nil filters, returns
nil. If the input has ONE non-nil filter, returns that filter, avoiding an
allocation of `rf.Or{}`. Otherwise combines the filters via `rf.Or`.
*/
func MaybeOr(vals ...Filter) Filter {
var out Or
slice := maybeCombineFilters(vals, out[:0])
switch len(slice) {
case 0:
return nil
case 1:
return slice[0]
default:
return out
}
}
/*
Implementation of `rf.Filter` that combines other filters, OR-ing their outputs
via `|`. Nil elements are ignored. If all elements are nil, the output is
automatically `VisNone`.
*/
type Or [8]Filter
// Implement `rf.Filter`.
func (self Or) Visit(typ r.Type, field r.StructField) (vis byte) {
for _, val := range self {
if val != nil {
vis |= val.Visit(typ, field)
}
}
return
}
// No-op implementation of both `rf.Visitor` that does nothing upon visit.
type Nop struct{}
// Implement `rf.Visitor`.
func (Nop) Visit(r.Value, r.StructField) {}
// Implements `rf.Visitor` by appending visited non-zero elements.
type Appender[A any] []A
/*
Implement `rf.Visitor` by appending the input value to the inner slice, if the
value is non-zero.
*/
func (self *Appender[A]) Visit(val r.Value, _ r.StructField) {
if self != nil && !val.IsZero() {
if val.CanAddr() {
*self = append(*self, *val.Addr().Interface().(*A))
} else {
*self = append(*self, val.Interface().(A))
}
}
}
/*
Returns a filter that allows to visit only values suitable to be elements of the
slice held by the appender.
*/
func (self Appender[A]) Filter() Filter { return TypeFilter[A]{} } | rf_walk.go | 0.873862 | 0.687584 | rf_walk.go | starcoder |
package cron
import (
"errors"
"fmt"
"time"
)
type Scheduler interface {
Next(t time.Time) time.Time
Done() bool
}
type PeriodScheduler struct {
period time.Duration
}
func (p *PeriodScheduler) Next(t time.Time) time.Time {
return t.Truncate(time.Second).Add(p.period)
}
func (p *PeriodScheduler) Done() bool {
return false
}
func (a *PeriodScheduler) At(t string) Scheduler {
if a.period < time.Hour*24 {
panic("Period Must be at least 1 Day")
}
h, m, err := parse(t)
if err != nil {
panic(err.Error())
}
return &AtScheduler{
period: a.period,
tH: h,
tM: m,
}
}
type AtScheduler struct {
period time.Duration
tH int
tM int
}
func (a *AtScheduler) Next(t time.Time) time.Time {
next := time.Date(t.Year(), t.Month(), t.Day(), a.tH, a.tM, 0, 0, time.UTC)
if t.After(next) {
return next.Add(a.period)
}
return next
}
func (a *AtScheduler) Done() bool {
return false
}
type AtOnceScheduler struct {
tH int
tM int
done bool
}
func (a *AtOnceScheduler) Next(t time.Time) time.Time {
next := time.Date(t.Year(), t.Month(), t.Day(), a.tH, a.tM, 0, 0, t.Location())
fmt.Println(next)
if t.After(next) && a.done == false {
next = next.Add(time.Hour * 24)
}
fmt.Println("again", next)
a.done = true
return next
}
func (a *AtOnceScheduler) Done() bool {
return a.done
}
func AtOnce(t string) Scheduler {
h, m, err := parse(t)
fmt.Println("House", h, m)
if err != nil {
panic(err.Error())
}
return &AtOnceScheduler{
tH: h,
tM: m,
done: false,
}
}
func Every(t time.Duration) Scheduler {
if t.Nanoseconds() < time.Second.Nanoseconds() {
t = time.Second
}
t = t - time.Duration(t.Nanoseconds())%time.Second
return &PeriodScheduler{
period: t,
}
}
func parse(t string) (int, int, error) {
h := int(t[0]-'0')*10 + int(t[1]-'0')
m := int(t[3]-'0')*10 + int(t[4]-'0')
var err error
if h < 0 || h > 24 {
h, m = 0, 0
err = errors.New("invalid h format")
}
if m < 0 || m > 59 {
h, m = 0, 0
err = errors.New("invalid m format")
}
return h, m, err
} | cron/scheduler.go | 0.639849 | 0.413359 | scheduler.go | starcoder |
package lshensemble
import (
"math"
)
// LshForestArray represents a MinHash LSH implemented using an array of LshForest.
// It allows a wider range for the K and L parameters.
type LshForestArray struct {
maxK int
numHash int
array []*LshForest
}
// NewLshForestArray initializes with parameters:
// maxK is the maximum value for the MinHash parameter K - the number of hash functions per "band".
// numHash is the number of hash functions in MinHash.
// initSize is the initial size of underlying hash tables to allocate.
func NewLshForestArray(maxK, numHash, initSize int) *LshForestArray {
array := make([]*LshForest, maxK)
for k := 1; k <= maxK; k++ {
array[k-1] = NewLshForest(k, numHash/k, initSize)
}
return &LshForestArray{
maxK: maxK,
numHash: numHash,
array: array,
}
}
// Add a key with MinHash signature into the index.
// The key won't be searchable until Index() is called.
func (a *LshForestArray) Add(key interface{}, sig []uint64) {
for i := range a.array {
a.array[i].Add(key, sig)
}
}
// Index makes all the keys added searchable.
func (a *LshForestArray) Index() {
for i := range a.array {
a.array[i].Index()
}
}
// Query returns candidate keys given the query signature and parameters.
func (a *LshForestArray) Query(sig []uint64, K, L int, out chan<- interface{}, done <-chan struct{}) {
a.array[K-1].Query(sig, -1, L, out, done)
}
// OptimalKL returns the optimal K and L for containment search,
// and the false positive and negative probabilities.
// where x is the indexed domain size, q is the query domain size,
// and t is the containment threshold.
func (a *LshForestArray) OptimalKL(x, q int, t float64) (optK, optL int, fp, fn float64) {
minError := math.MaxFloat64
for l := 1; l <= a.numHash; l++ {
for k := 1; k <= a.maxK; k++ {
if k*l > a.numHash {
continue
}
currFp := probFalsePositive(x, q, l, k, t, integrationPrecision)
currFn := probFalseNegative(x, q, l, k, t, integrationPrecision)
currErr := currFn + currFp
if minError > currErr {
minError = currErr
optK = k
optL = l
fp = currFp
fn = currFn
}
}
}
return
} | lsharray.go | 0.732305 | 0.400105 | lsharray.go | starcoder |
package scene
import (
"strings"
"github.com/Laughs-In-Flowers/shiva/lib/lua"
"github.com/Laughs-In-Flowers/shiva/lib/math"
"github.com/Laughs-In-Flowers/shiva/lib/render"
"github.com/Laughs-In-Flowers/shiva/lib/xrror"
l "github.com/yuin/gopher-lua"
)
type CamT int
const (
NOT_A_CAMERA_TYPE CamT = iota
PERSPECTIVE
ORTHOGRAPHIC
CUSTOM
)
func StringToCamT(s string) CamT {
switch strings.ToLower(s) {
case "perspective":
return PERSPECTIVE
case "orthographic":
return ORTHOGRAPHIC
case "custom":
return CUSTOM
}
return NOT_A_CAMERA_TYPE
}
type Plane int
const (
FOV Plane = iota
ASPECT
NEAR
FAR
ZOOM
LEFT
RIGHT
TOP
BOTTOM
UNKNOWN_PLANE
)
func StringToPlane(s string) Plane {
switch strings.ToLower(s) {
case "fov":
return FOV
case "aspect":
return ASPECT
case "near":
return NEAR
case "far":
return FAR
case "zoom":
return ZOOM
case "left":
return LEFT
case "right":
return RIGHT
case "top":
return TOP
case "bottom":
return BOTTOM
}
return UNKNOWN_PLANE
}
type Cam interface {
Planes() []float32
GetPlane(Plane) float32
SetPlane(Plane, float32)
ProjectionMatrix() math.Matrice
}
type planesFunc func(*cam) []float32
type projectionMatrixFunc func(*cam)
type cam struct {
camt CamT
plfn planesFunc
fov, aspect, near, far, zoom float32
left, right, top, bottom float32
pmfn projectionMatrixFunc
projectionChange bool
projectionMatrix math.Matrice
}
func newCam(t CamT, p planesFunc, m projectionMatrixFunc) *cam {
return &cam{
camt: t,
plfn: p,
zoom: 1.0,
pmfn: m,
projectionChange: true,
projectionMatrix: math.Mat4(),
}
}
func (c *cam) Planes() []float32 {
return c.plfn(c)
}
func (c *cam) GetPlane(p Plane) float32 {
var ret float32
switch p {
case FOV:
ret = c.fov
case ASPECT:
ret = c.aspect
case NEAR:
ret = c.near
case FAR:
ret = c.far
case ZOOM:
ret = c.zoom
case LEFT:
ret = c.left
case RIGHT:
ret = c.right
case TOP:
ret = c.top
case BOTTOM:
ret = c.bottom
}
return ret
}
func (c *cam) SetPlane(p Plane, v float32) {
switch p {
case FOV:
c.projectionChange = true
c.fov = v
case ASPECT:
c.projectionChange = true
c.aspect = v
case NEAR:
c.near = v
case FAR:
c.far = v
case ZOOM:
c.projectionChange = true
c.zoom = math.Abs(v)
case LEFT:
c.left = v
case RIGHT:
c.right = v
case TOP:
c.top = v
case BOTTOM:
c.bottom = v
}
}
func (c *cam) ProjectionMatrix() math.Matrice {
if c.projectionChange {
c.pmfn(c)
c.projectionChange = false
}
return c.projectionMatrix
}
func perspective(fov, aspect, near, far float32) *cam {
c := newCam(
PERSPECTIVE,
func(c *cam) []float32 {
return []float32{
c.GetPlane(FOV),
c.GetPlane(ASPECT),
c.GetPlane(NEAR),
c.GetPlane(FAR),
}
},
func(c *cam) {
p := c.Planes()
c.projectionMatrix.Perspective(p[0], p[1], p[2], p[3])
},
)
c.fov = fov
c.aspect = aspect
c.near = near
c.far = far
return c
}
func orthographic(left, right, top, bottom, near, far float32) *cam {
c := newCam(
ORTHOGRAPHIC,
func(c *cam) []float32 {
return []float32{
c.GetPlane(LEFT),
c.GetPlane(RIGHT),
c.GetPlane(TOP),
c.GetPlane(BOTTOM),
c.GetPlane(NEAR),
c.GetPlane(FAR),
c.GetPlane(ZOOM),
}
},
func(c *cam) {
p := c.Planes()
z := p[6]
c.projectionMatrix.Orthographic(
p[0]/z,
p[1]/z,
p[2]/z,
p[3]/z,
p[4],
p[5],
)
},
)
c.left = left
c.right = right
c.top = top
c.bottom = bottom
c.near = near
c.far = far
return c
}
type camera struct {
*cam
*position
target math.Vector
up math.Vector
viewMatrix math.Matrice
}
func newCamera(ck *cam) *camera {
c := &camera{
ck,
newPosition(),
math.Vec3(0, 0, 0),
math.Vec3(0, 1, 0),
math.Mat4(),
}
c.Update(DIRECTION, 0, 0, -1)
return c
}
// update camera quat on changes
func (c *camera) update() {
dr := c.direct(math.Vec3(0, 0, 0))
q := math.Quat(0, 0, 0, 0)
math.SetQuatFromUnitVectors(q, math.Vec3(0, 0, -1), dr)
c.Update(ROTATE, q.Raw()...)
}
func (c *camera) Direction() math.Vector {
tr := c.translate(math.Vec3(0, 0, 0))
res := c.target
res.Sub(tr).Normalize()
return res
}
func (c *camera) LookAt(t math.Vector) {
c.target = t
}
func (c *camera) ViewMatrix() math.Matrice {
tr := c.translate(math.Vec3(0, 0, 0))
c.viewMatrix.LookAt(tr, c.target, c.up)
return c.viewMatrix
}
type cameraNode struct {
*camera
*node
}
const lCameraNodeClass = "NCAMERA"
func Camera(tag string, c *cam) Node {
cc := newCamera(c)
nn := newNode(tag, func(r render.Renderer, n Node) {
cc.updateMatrixWorld(r)
r.SetViewMatrice(cc.ViewMatrix())
r.SetProjectionMatrice(cc.ProjectionMatrix())
}, defaultRemovalFn, defaultReplaceFn, lCameraNodeClass, lNodeClass)
return &cameraNode{
cc,
nn,
}
}
var (
cameraTag TagFunc = tagFnFor("camera", 1)
CameraFromStringError = xrror.Xrror("%s is not a camera that can be specified from a single string").Out
)
func buildCam(L *l.LState, from int) (*cam, error) {
var c *cam
var err error
stringFn := func(L *l.LState, from int) (*cam, error) {
var ret *cam
var serr error
s := L.CheckString(from)
switch strings.ToLower(s) {
case "perspective":
ret = perspective(65, Aspect(nativeWindow), 0.01, 1000)
case "orthographic":
ret = orthographic(-2, 2, 2, -2, 0.01, 100)
default:
serr = CameraFromStringError(from)
}
return ret, serr
}
tableFn := func(L *l.LState, from int) (*cam, error) {
//t := L.CheckTable(from)
//spew.Dump(t)
return nil, nil
}
v := L.Get(from)
switch v.Type() {
case l.LTString:
c, err = stringFn(L, from)
case l.LTTable:
c, err = tableFn(L, from)
default:
L.RaiseError("A string denoting a camera type or table with camera configuration expected. %s is neither", v)
}
return c, err
}
func lcamera(L *l.LState) int {
tag := cameraTag(L)
cm, err := buildCam(L, 2)
if err != nil {
L.RaiseError("error building camera: %s", err)
return 0
}
c := Camera(tag, cm)
return pushNode(L, c)
}
type cameraMemberFunc func(*l.LState, *l.LUserData, *cameraNode) int
func checkCameraNodeWithUD(L *l.LState, pos int) (*l.LUserData, *cameraNode) {
ud := L.CheckUserData(pos)
if n, ok := ud.Value.(*cameraNode); ok {
return ud, n
}
L.ArgError(pos, "camera node expected")
return nil, nil
}
func cameraMember(fn cameraMemberFunc) l.LGFunction {
return func(L *l.LState) int {
if u, n := checkCameraNodeWithUD(L, 1); n != nil {
return fn(L, u, n)
}
return 0
}
}
func cameraFov(L *l.LState, u *l.LUserData, n *cameraNode) int {
return 0
}
func cameraAspect(L *l.LState, u *l.LUserData, n *cameraNode) int {
return 0
}
func cameraNear(L *l.LState, u *l.LUserData, n *cameraNode) int {
return 0
}
func cameraFar(L *l.LState, u *l.LUserData, n *cameraNode) int {
return 0
}
func cameraZoom(L *l.LState, u *l.LUserData, n *cameraNode) int {
return 0
}
var lCameraNodeTable = &lua.Table{
lCameraNodeClass,
[]*lua.Table{nodeTable},
defaultIdxMetaFuncs(),
map[string]l.LGFunction{},
map[string]l.LGFunction{
"fov": cameraMember(cameraFov),
"aspect": cameraMember(cameraAspect),
"near": cameraMember(cameraNear),
"far": cameraMember(cameraFar),
"zoom": cameraMember(cameraZoom),
//view matrix
//projection matrix
},
}
// A default perspective camera.
// func Perspective() {}
// A default orthographic camera.
// func Orthographic() {} | lib/scene/camera.go | 0.633637 | 0.44077 | camera.go | starcoder |
package draw2dAnimation
import (
"image/color"
"math"
)
// An adroid figure. Change width and height for adjusting the figure to the desired ratio.
type Android struct {
*ComposedFigure
BodyWidth float64
BodyHeight float64
}
// Constructor setting current struct's fields and default values for the base struct
func NewAndroid(bodyWidth float64, bodyHeight float64, lineWidth float64, fillColor color.RGBA) *Android {
android := &Android{NewComposedFigure(), bodyWidth, bodyHeight}
android.SetLineWidth(lineWidth)
android.SetSubClass(android)
android.InitializeFigures(fillColor)
return android
}
// Constructor setting both base struct's and current struct's fields.
func NewAndroid7(
bodyWidth float64, bodyHeight float64, fillColor color.RGBA,
depth int, startPoint Point, rotationDegrees float64, lineWidth float64) *Android {
android := &Android{NewComposedFigure3(depth, startPoint, rotationDegrees), bodyWidth, bodyHeight}
android.SetLineWidth(lineWidth)
android.SetSubClass(android)
android.InitializeFigures(fillColor)
return android
}
// Called by constructors to set initial state of the figure. Can also be used for reset.
func (this *Android) InitializeFigures(fillColor color.RGBA) {
radius := 5.0
lineWidth := this.GetLineWidth()
downBodyPart := NewRoundRectangle(radius, this.BodyWidth, 9.0/8*this.BodyHeight, lineWidth)
this.AddFigure("DownBodyPart", downBodyPart)
upBodyPart := NewRectangle(this.BodyWidth, this.BodyHeight, lineWidth)
this.AddFigure("upBodyPart", upBodyPart)
leftArm := NewRoundRectangle(radius, 0.2*this.BodyWidth, this.BodyHeight-2*lineWidth, lineWidth)
leftArm.SetStartPoint(Point{-0.2*this.BodyWidth - lineWidth, lineWidth})
this.AddFigure("leftArm", leftArm)
rightArm := NewRoundRectangle(radius, 0.2*this.BodyWidth, this.BodyHeight-2*lineWidth, lineWidth)
rightArm.SetStartPoint(Point{this.BodyWidth + lineWidth, lineWidth})
this.AddFigure("rightArm", rightArm)
leftLeg := NewRoundRectangle(radius, 0.2*this.BodyWidth, 5.0/8*this.BodyHeight, lineWidth)
leftLeg.SetStartPoint(Point{0.2 * this.BodyWidth, this.BodyHeight - lineWidth})
this.AddFigure("leftLeg", leftLeg)
rightLeg := NewRoundRectangle(radius, 0.2*this.BodyWidth, 5.0/8*this.BodyHeight, lineWidth)
rightLeg.SetStartPoint(Point{0.6 * this.BodyWidth, this.BodyHeight - lineWidth})
this.AddFigure("rightLeg", rightLeg)
head := NewEllipsis5(
0.5*this.BodyWidth, 5.0/8*this.BodyHeight, -1, Point{0.5 * this.BodyWidth, -lineWidth}, 0.0, lineWidth)
this.AddFigure("head", head)
leftEye := NewCircle4(radius, 0, Point{0.3 * this.BodyWidth, -0.3 * this.BodyWidth}, lineWidth)
this.AddFigure("leftEye", leftEye)
rightEye := NewCircle4(radius, 0, Point{0.7 * this.BodyWidth, -0.3 * this.BodyWidth}, lineWidth)
this.AddFigure("rightEye", rightEye)
antennaDeltaX := 0.1 * this.BodyWidth
antennaDeltaY := 1.5 / 8 * this.BodyHeight
antennaLength := math.Sqrt(antennaDeltaX*antennaDeltaX + antennaDeltaY*antennaDeltaY)
antennaAngle := -math.Asin(antennaDeltaY/antennaLength) * 180 / math.Pi
leftAntenna := NewLine5(
antennaLength, 0, Point{0.3 * this.BodyWidth, -5.0 / 8 * this.BodyHeight}, 180-antennaAngle, lineWidth)
this.AddFigure("leftAntenna", leftAntenna)
rightAntenna := NewLine5(
antennaLength, 0, Point{0.7 * this.BodyWidth, -5.0 / 8 * this.BodyHeight}, antennaAngle, lineWidth)
this.AddFigure("rightAntenna", rightAntenna)
this.figures.traverse(func(figure Figurer) {
figure.SetFillColor(fillColor)
})
} | draw2dAnimation/android.go | 0.878308 | 0.681462 | android.go | starcoder |
package regression
import (
"go.skia.org/infra/go/sklog"
"go.skia.org/infra/go/vec32"
"go.skia.org/infra/perf/go/clustering2"
"go.skia.org/infra/perf/go/config"
"go.skia.org/infra/perf/go/dataframe"
"go.skia.org/infra/perf/go/stepfit"
"go.skia.org/infra/perf/go/types"
)
// StepFit finds regressions by looking at each trace individually and seeing if that looks like a regression.
func StepFit(df *dataframe.DataFrame, k int, stddevThreshold float32, progress clustering2.Progress, interesting float32, stepDetection types.StepDetection) (*clustering2.ClusterSummaries, error) {
low := clustering2.NewClusterSummary()
high := clustering2.NewClusterSummary()
// Normalize each trace and then run through stepfit. If interesting then
// add to appropriate cluster.
count := 0
for key, trace := range df.TraceSet {
count++
if count%10000 == 0 {
sklog.Infof("stepfit count: %d", count)
}
var sf *stepfit.StepFit
sf = stepfit.GetStepFitAtMid(trace, stddevThreshold, interesting, stepDetection)
isLow := sf.Status == stepfit.LOW
isHigh := sf.Status == stepfit.HIGH
// If stepfit is at the middle and if it is a step up or down.
if isLow {
if low.StepFit.Status == "" {
low.StepFit = sf
low.StepFit.Status = stepfit.LOW
low.StepPoint = df.Header[sf.TurningPoint]
low.Centroid = vec32.Dup(trace)
}
low.Num++
if low.Num < config.MaxSampleTracesPerCluster {
low.Keys = append(low.Keys, key)
}
} else if isHigh {
if high.StepFit.Status == "" {
high.StepFit = sf
high.StepFit.Status = stepfit.HIGH
high.StepPoint = df.Header[sf.TurningPoint]
high.Centroid = vec32.Dup(trace)
}
high.Num++
if high.Num < config.MaxSampleTracesPerCluster {
high.Keys = append(high.Keys, key)
}
}
}
sklog.Infof("Found LOW: %d HIGH: %d", low.Num, high.Num)
ret := &clustering2.ClusterSummaries{
Clusters: []*clustering2.ClusterSummary{},
K: k,
StdDevThreshold: stddevThreshold,
}
if low.Num > 0 {
low.ParamSummaries = clustering2.GetParamSummariesForKeys(low.Keys)
ret.Clusters = append(ret.Clusters, low)
}
if high.Num > 0 {
high.ParamSummaries = clustering2.GetParamSummariesForKeys(high.Keys)
ret.Clusters = append(ret.Clusters, high)
}
return ret, nil
} | perf/go/regression/stepfit.go | 0.54577 | 0.441613 | stepfit.go | starcoder |
package uncategorized
/*
Given an array of integers and an integer k,
you need to find the total number of continuous subarrays whose sum equals to k.
Example 1:
Input:nums = [1,1,1], k = 2
Output: 2
Note:
The length of the array is in range [1, 20,000].
The range of numbers in the array is [-1000, 1000]
and the range of the integer k is [-1e7, 1e7].
Example explanation: [1,1] and [1,1]
*/
/* Solution 1: Brute force
Two for loops first i from 0 to len(arr). Second from i+1 to len(arr).
Check if current sum equals k. If yes then increase the count.
Return count
Time complexity : Considering every possible subarray takes O(n^2)
) time. Finding out the sum of any subarray takes O(1)
time after the initial processing of O(n) for creating the cumulative sum array.
Space complexity : O(1). Constant space is used.
*/
func SubarraySumBrute(nums []int, k int) int {
count := 0
for i := 0;i < len(nums); i += 1 {
curr := 0
for j := i;j < len(nums); j += 1 {
curr += nums[j]
if curr == k {
count += 1
}
}
}
return count
}
/* Solution 2: Using Cache
we make use of a hashmap mapmap which is used to store the cumulative sum upto
all the indices possible along with the number of times the same sum occurs.
We store the data in the form: (sum_i, no. of occurences of sum_i)(sum
i,no.ofoccurencesofsum
i). We traverse over the array nums and keep on finding the cumulative sum.
Every time we encounter a new sum, we make a new entry in the hashmap corresponding
to that sum. If the same sum occurs again, we increment the count corresponding
to that sum in the hashmap. Further, for every sum encountered, we also
determine the number of times the sum sum-ksum−k has occured already, since
it will determine the number of times a subarray with sum kk has occured upto the
current index. We increment the count by the same amount.
Return count
*/
func SubarraySum(nums []int, k int) int {
count := 0
curr := 0
cache := make(map[int]int)
cache[0] = 1
for i := 0;i < len(nums); i += 1 {
curr += nums[i]
// we previously store the count here for all sums from 0 to i
if val, ok := cache[curr - k]; ok {
count += val
}
cache[curr] = cache[curr] + 1
}
return count
} | uncategorized/SubArraySums.go | 0.81538 | 0.843509 | SubArraySums.go | starcoder |
package blockchain
import (
"math/big"
"github.com/elastos/Elastos.ELA.SPV/util"
"github.com/elastos/Elastos.ELA/common"
)
var PowLimit = new(big.Int).Sub(new(big.Int).Lsh(big.NewInt(1), 255), big.NewInt(1))
func CalcWork(bits uint32) *big.Int {
// Return a work value of zero if the passed difficulty bits represent
// a negative number. Note this should not happen in practice with valid
// blocks, but an invalid block could trigger it.
difficultyNum := CompactToBig(bits)
if difficultyNum.Sign() <= 0 {
return big.NewInt(0)
}
// (1 << 256) / (difficultyNum + 1)
denominator := new(big.Int).Add(difficultyNum, big.NewInt(1))
return new(big.Int).Div(new(big.Int).Lsh(big.NewInt(1), 256), denominator)
}
func checkProofOfWork(header util.Header) bool {
// The target difficulty must be larger than zero.
target := CompactToBig(header.Bits())
if target.Sign() <= 0 {
return false
}
// The target difficulty must be less than the maximum allowed.
if target.Cmp(PowLimit) > 0 {
return false
}
// The block hash must be less than the claimed target.
hash := header.PowHash()
hashNum := HashToBig(&hash)
if hashNum.Cmp(target) > 0 {
return false
}
return true
}
func HashToBig(hash *common.Uint256) *big.Int {
// A Hash is in little-endian, but the big package wants the bytes in
// big-endian, so reverse them.
buf := *hash
blen := len(buf)
for i := 0; i < blen/2; i++ {
buf[i], buf[blen-1-i] = buf[blen-1-i], buf[i]
}
return new(big.Int).SetBytes(buf[:])
}
func CompactToBig(compact uint32) *big.Int {
// Extract the mantissa, sign bit, and exponent.
mantissa := compact & 0x007fffff
isNegative := compact&0x00800000 != 0
exponent := uint(compact >> 24)
// Since the base for the exponent is 256, the exponent can be treated
// as the number of bytes to represent the full 256-bit number. So,
// treat the exponent as the number of bytes and shift the mantissa
// right or left accordingly. This is equivalent to:
// N = mantissa * 256^(exponent-3)
var bn *big.Int
if exponent <= 3 {
mantissa >>= 8 * (3 - exponent)
bn = big.NewInt(int64(mantissa))
} else {
bn = big.NewInt(int64(mantissa))
bn.Lsh(bn, 8*(exponent-3))
}
// Make it negative if the sign bit is set.
if isNegative {
bn = bn.Neg(bn)
}
return bn
} | blockchain/difficulty.go | 0.774583 | 0.416322 | difficulty.go | starcoder |
package assert
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
)
func min(a, b int) int {
if a < b {
return a
}
return b
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
func calculateRatio(matches, length int) float64 {
if length > 0 {
return 2.0 * float64(matches) / float64(length)
}
return 1.0
}
type Match struct {
A int
B int
Size int
}
type OpCode struct {
Tag byte
I1 int
I2 int
J1 int
J2 int
}
type SequenceMatcher struct {
a []string
b []string
b2j map[string][]int
IsJunk func(string) bool
autoJunk bool
bJunk map[string]struct{}
matchingBlocks []Match
fullBCount map[string]int
bPopular map[string]struct{}
opCodes []OpCode
}
func NewMatcher(a, b []string) *SequenceMatcher {
m := SequenceMatcher{autoJunk: true}
m.SetSeqs(a, b)
return &m
}
func NewMatcherWithJunk(a, b []string, autoJunk bool,
isJunk func(string) bool) *SequenceMatcher {
m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
m.SetSeqs(a, b)
return &m
}
func (m *SequenceMatcher) SetSeqs(a, b []string) {
m.SetSeq1(a)
m.SetSeq2(b)
}
func (m *SequenceMatcher) SetSeq1(a []string) {
if &a == &m.a {
return
}
m.a = a
m.matchingBlocks = nil
m.opCodes = nil
}
func (m *SequenceMatcher) SetSeq2(b []string) {
if &b == &m.b {
return
}
m.b = b
m.matchingBlocks = nil
m.opCodes = nil
m.fullBCount = nil
m.chainB()
}
func (m *SequenceMatcher) chainB() {
b2j := map[string][]int{}
for i, s := range m.b {
indices := b2j[s]
indices = append(indices, i)
b2j[s] = indices
}
m.bJunk = map[string]struct{}{}
if m.IsJunk != nil {
junk := m.bJunk
for s, _ := range b2j {
if m.IsJunk(s) {
junk[s] = struct{}{}
}
}
for s, _ := range junk {
delete(b2j, s)
}
}
popular := map[string]struct{}{}
n := len(m.b)
if m.autoJunk && n >= 200 {
ntest := n/100 + 1
for s, indices := range b2j {
if len(indices) > ntest {
popular[s] = struct{}{}
}
}
for s, _ := range popular {
delete(b2j, s)
}
}
m.bPopular = popular
m.b2j = b2j
}
func (m *SequenceMatcher) isBJunk(s string) bool {
_, ok := m.bJunk[s]
return ok
}
func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
besti, bestj, bestsize := alo, blo, 0
j2len := map[int]int{}
for i := alo; i != ahi; i++ {
newj2len := map[int]int{}
for _, j := range m.b2j[m.a[i]] {
if j < blo {
continue
}
if j >= bhi {
break
}
k := j2len[j-1] + 1
newj2len[j] = k
if k > bestsize {
besti, bestj, bestsize = i-k+1, j-k+1, k
}
}
j2len = newj2len
}
for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
m.a[besti-1] == m.b[bestj-1] {
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
}
for besti+bestsize < ahi && bestj+bestsize < bhi &&
!m.isBJunk(m.b[bestj+bestsize]) &&
m.a[besti+bestsize] == m.b[bestj+bestsize] {
bestsize += 1
}
for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
m.a[besti-1] == m.b[bestj-1] {
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
}
for besti+bestsize < ahi && bestj+bestsize < bhi &&
m.isBJunk(m.b[bestj+bestsize]) &&
m.a[besti+bestsize] == m.b[bestj+bestsize] {
bestsize += 1
}
return Match{A: besti, B: bestj, Size: bestsize}
}
func (m *SequenceMatcher) GetMatchingBlocks() []Match {
if m.matchingBlocks != nil {
return m.matchingBlocks
}
var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
match := m.findLongestMatch(alo, ahi, blo, bhi)
i, j, k := match.A, match.B, match.Size
if match.Size > 0 {
if alo < i && blo < j {
matched = matchBlocks(alo, i, blo, j, matched)
}
matched = append(matched, match)
if i+k < ahi && j+k < bhi {
matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
}
}
return matched
}
matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
nonAdjacent := []Match{}
i1, j1, k1 := 0, 0, 0
for _, b := range matched {
i2, j2, k2 := b.A, b.B, b.Size
if i1+k1 == i2 && j1+k1 == j2 {
k1 += k2
} else {
if k1 > 0 {
nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
}
i1, j1, k1 = i2, j2, k2
}
}
if k1 > 0 {
nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
}
nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
m.matchingBlocks = nonAdjacent
return m.matchingBlocks
}
func (m *SequenceMatcher) GetOpCodes() []OpCode {
if m.opCodes != nil {
return m.opCodes
}
i, j := 0, 0
matching := m.GetMatchingBlocks()
opCodes := make([]OpCode, 0, len(matching))
for _, m := range matching {
ai, bj, size := m.A, m.B, m.Size
tag := byte(0)
if i < ai && j < bj {
tag = 'r'
} else if i < ai {
tag = 'd'
} else if j < bj {
tag = 'i'
}
if tag > 0 {
opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
}
i, j = ai+size, bj+size
if size > 0 {
opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
}
}
m.opCodes = opCodes
return m.opCodes
}
func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
if n < 0 {
n = 3
}
codes := m.GetOpCodes()
if len(codes) == 0 {
codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
}
if codes[0].Tag == 'e' {
c := codes[0]
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
}
if codes[len(codes)-1].Tag == 'e' {
c := codes[len(codes)-1]
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
}
nn := n + n
groups := [][]OpCode{}
group := []OpCode{}
for _, c := range codes {
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
if c.Tag == 'e' && i2-i1 > nn {
group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
j1, min(j2, j1+n)})
groups = append(groups, group)
group = []OpCode{}
i1, j1 = max(i1, i2-n), max(j1, j2-n)
}
group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
}
if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
groups = append(groups, group)
}
return groups
}
func (m *SequenceMatcher) Ratio() float64 {
matches := 0
for _, m := range m.GetMatchingBlocks() {
matches += m.Size
}
return calculateRatio(matches, len(m.a)+len(m.b))
}
func (m *SequenceMatcher) QuickRatio() float64 {
if m.fullBCount == nil {
m.fullBCount = map[string]int{}
for _, s := range m.b {
m.fullBCount[s] = m.fullBCount[s] + 1
}
}
avail := map[string]int{}
matches := 0
for _, s := range m.a {
n, ok := avail[s]
if !ok {
n = m.fullBCount[s]
}
avail[s] = n - 1
if n > 0 {
matches += 1
}
}
return calculateRatio(matches, len(m.a)+len(m.b))
}
func (m *SequenceMatcher) RealQuickRatio() float64 {
la, lb := len(m.a), len(m.b)
return calculateRatio(min(la, lb), la+lb)
}
func formatRangeUnified(start, stop int) string {
beginning := start + 1
length := stop - start
if length == 1 {
return fmt.Sprintf("%d", beginning)
}
if length == 0 {
beginning -= 1
}
return fmt.Sprintf("%d,%d", beginning, length)
}
type UnifiedDiff struct {
A []string // First sequence lines
FromFile string // First file name
FromDate string // First file time
B []string // Second sequence lines
ToFile string // Second file name
ToDate string // Second file time
Eol string // Headers end of line, defaults to LF
Context int // Number of context lines
}
func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
buf := bufio.NewWriter(writer)
defer buf.Flush()
wf := func(format string, args ...interface{}) error {
_, err := buf.WriteString(fmt.Sprintf(format, args...))
return err
}
ws := func(s string) error {
_, err := buf.WriteString(s)
return err
}
if len(diff.Eol) == 0 {
diff.Eol = "\n"
}
started := false
m := NewMatcher(diff.A, diff.B)
for _, g := range m.GetGroupedOpCodes(diff.Context) {
if !started {
started = true
fromDate := ""
if len(diff.FromDate) > 0 {
fromDate = "\t" + diff.FromDate
}
toDate := ""
if len(diff.ToDate) > 0 {
toDate = "\t" + diff.ToDate
}
if diff.FromFile != "" || diff.ToFile != "" {
err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
if err != nil {
return err
}
err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
if err != nil {
return err
}
}
}
first, last := g[0], g[len(g)-1]
range1 := formatRangeUnified(first.I1, last.I2)
range2 := formatRangeUnified(first.J1, last.J2)
if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
return err
}
for _, c := range g {
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
if c.Tag == 'e' {
for _, line := range diff.A[i1:i2] {
if err := ws(" " + line); err != nil {
return err
}
}
continue
}
if c.Tag == 'r' || c.Tag == 'd' {
for _, line := range diff.A[i1:i2] {
if err := ws("-" + line); err != nil {
return err
}
}
}
if c.Tag == 'r' || c.Tag == 'i' {
for _, line := range diff.B[j1:j2] {
if err := ws("+" + line); err != nil {
return err
}
}
}
}
}
return nil
}
func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
w := &bytes.Buffer{}
err := WriteUnifiedDiff(w, diff)
return string(w.Bytes()), err
}
func formatRangeContext(start, stop int) string {
beginning := start + 1
length := stop - start
if length == 0 {
beginning -= 1
}
if length <= 1 {
return fmt.Sprintf("%d", beginning)
}
return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
}
type ContextDiff UnifiedDiff
func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
buf := bufio.NewWriter(writer)
defer buf.Flush()
var diffErr error
wf := func(format string, args ...interface{}) {
_, err := buf.WriteString(fmt.Sprintf(format, args...))
if diffErr == nil && err != nil {
diffErr = err
}
}
ws := func(s string) {
_, err := buf.WriteString(s)
if diffErr == nil && err != nil {
diffErr = err
}
}
if len(diff.Eol) == 0 {
diff.Eol = "\n"
}
prefix := map[byte]string{
'i': "+ ",
'd': "- ",
'r': "! ",
'e': " ",
}
started := false
m := NewMatcher(diff.A, diff.B)
for _, g := range m.GetGroupedOpCodes(diff.Context) {
if !started {
started = true
fromDate := ""
if len(diff.FromDate) > 0 {
fromDate = "\t" + diff.FromDate
}
toDate := ""
if len(diff.ToDate) > 0 {
toDate = "\t" + diff.ToDate
}
if diff.FromFile != "" || diff.ToFile != "" {
wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
}
}
first, last := g[0], g[len(g)-1]
ws("***************" + diff.Eol)
range1 := formatRangeContext(first.I1, last.I2)
wf("*** %s ****%s", range1, diff.Eol)
for _, c := range g {
if c.Tag == 'r' || c.Tag == 'd' {
for _, cc := range g {
if cc.Tag == 'i' {
continue
}
for _, line := range diff.A[cc.I1:cc.I2] {
ws(prefix[cc.Tag] + line)
}
}
break
}
}
range2 := formatRangeContext(first.J1, last.J2)
wf("--- %s ----%s", range2, diff.Eol)
for _, c := range g {
if c.Tag == 'r' || c.Tag == 'i' {
for _, cc := range g {
if cc.Tag == 'd' {
continue
}
for _, line := range diff.B[cc.J1:cc.J2] {
ws(prefix[cc.Tag] + line)
}
}
break
}
}
}
return diffErr
}
func GetContextDiffString(diff ContextDiff) (string, error) {
w := &bytes.Buffer{}
err := WriteContextDiff(w, diff)
return string(w.Bytes()), err
}
func SplitLines(s string) []string {
lines := strings.SplitAfter(s, "\n")
lines[len(lines)-1] += "\n"
return lines
} | difflib.go | 0.592667 | 0.407363 | difflib.go | starcoder |
package assert
// T reports when failures occur.
// testing.T implements this interface.
type T interface {
// Fail indicates that the test has failed but
// allowed execution to continue.
Fail()
// FailNow indicates that the test has failed and
// aborts the test.
// FailNow is called in strict mode (via New).
FailNow()
}
// New .
func New(t T, opts ...Option) *Assertion {
a := &Assertion{t: t}
if len(opts) > 0 {
a.opts = newOptions(opts...)
}
return a
}
// Assertions provides assertion methods around the TestingT interface.
type Assertion struct {
t T
opts *Options
}
func (a *Assertion) Positive(expected interface{}, argAndOpts ...interface{}) bool {
return Positive(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Negative(expected interface{}, argAndOpts ...interface{}) bool {
return Negative(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Greater(expected, actual interface{}, argAndOpts ...interface{}) bool {
return Greater(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) GreaterEqual(expected, actual interface{}, argAndOpts ...interface{}) bool {
return GreaterEqual(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Less(expected, actual interface{}, argAndOpts ...interface{}) bool {
return Less(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) LessEqual(expected, actual interface{}, argAndOpts ...interface{}) bool {
return LessEqual(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) IsIncreasing(expected interface{}, argAndOpts ...interface{}) bool {
return IsIncreasing(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) IsNonIncreasing(expected interface{}, argAndOpts ...interface{}) bool {
return IsNonIncreasing(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) IsDecreasing(expected interface{}, argAndOpts ...interface{}) bool {
return IsDecreasing(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) IsNonDecreasing(expected interface{}, argAndOpts ...interface{}) bool {
return IsNonDecreasing(a.t, expected, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) IsType(expected, object interface{}, argAndOpts ...interface{}) bool {
return IsType(a.t, expected, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Equal(expected, actual interface{}, argAndOpts ...interface{}) bool {
return Equal(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NotEqual(expected, actual interface{}, argAndOpts ...interface{}) bool {
return NotEqual(a.t, expected, actual, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Same(expected, object interface{}, argAndOpts ...interface{}) bool {
return Same(a.t, expected, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NotSame(expected, object interface{}, argAndOpts ...interface{}) bool {
return NotSame(a.t, expected, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Nil(object interface{}, argAndOpts ...interface{}) bool {
return Nil(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NotNil(object interface{}, argAndOpts ...interface{}) bool {
return NotNil(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Zero(object interface{}, argAndOpts ...interface{}) bool {
return Nil(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NotZero(object interface{}, argAndOpts ...interface{}) bool {
return NotNil(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Empty(object interface{}, argAndOpts ...interface{}) bool {
return Empty(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NotEmpty(object interface{}, argAndOpts ...interface{}) bool {
return NotEmpty(a.t, object, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Len(object interface{}, length int, argAndOpts ...interface{}) bool {
return Len(a.t, object, length, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) True(value bool, argAndOpts ...interface{}) bool {
return True(a.t, value, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) False(value bool, argAndOpts ...interface{}) bool {
return False(a.t, value, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) Contains(s interface{}, contains interface{}, argAndOpts ...interface{}) bool {
return Contains(a.t, s, contains, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) DirExists(path string, argAndOpts ...interface{}) bool {
return DirExists(a.t, path, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NoDirExists(path string, argAndOpts ...interface{}) bool {
return NoDirExists(a.t, path, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) FileExists(path string, argAndOpts ...interface{}) bool {
return FileExists(a.t, path, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) NoFileExists(path string, argAndOpts ...interface{}) bool {
return NoFileExists(a.t, path, a.wrapArgs(argAndOpts)...)
}
func (a *Assertion) wrapArgs(argAndOpts []interface{}) []interface{} {
if a.opts == nil {
return argAndOpts
}
return append(argAndOpts, withOptions(a.opts))
} | pkg/assert/assertion.go | 0.808029 | 0.545649 | assertion.go | starcoder |
package gdsp
import (
"math"
"math/cmplx"
)
// WindowType values represent a window function and its inverse.
type WindowType int
// Types of windows.
const (
WindowTypeHann WindowType = iota + 1
WindowTypeHamming
WindowTypeNuttal
)
// Window applies a window function given by windowType to the input signal.
func Window(windowType WindowType, input VectorComplex) VectorComplex {
switch windowType {
case WindowTypeHann:
return Hann(input)
case WindowTypeHamming:
return Hamming(input)
case WindowTypeNuttal:
return Nuttal(input)
}
return nil
}
// InverseWindow applies an inverse window function given by windowType to the input signal.
func InverseWindow(windowType WindowType, input VectorComplex) VectorComplex {
switch windowType {
case WindowTypeHann:
return Hann(input)
case WindowTypeHamming:
return Hamming(input)
case WindowTypeNuttal:
return Nuttal(input)
}
return nil
}
// Hann performs Hann windowing on the input vector.
func Hann(input VectorComplex) VectorComplex {
vh := input.Copy()
theta := 2.0 * math.Pi / float64(len(vh)-1)
for i := 0; i < len(vh); i++ {
x := complex(theta*float64(i), 0)
vh[i] *= (1.0 - cmplx.Cos(x)) / 2.0
}
return vh
}
// InverseHann performs inverse Hann windowing on the input vector.
func InverseHann(input VectorComplex) VectorComplex {
vih := input.Copy()
theta := 2.0 * math.Pi / float64(len(vih)-1)
for i := 0; i < len(vih); i++ {
x := complex(theta*float64(i), 0)
vih[i] /= (1.0 - cmplx.Cos(x)) / 2.0
}
return vih
}
// Hamming performs Hamming windowing on the input vector.
func Hamming(input VectorComplex) VectorComplex {
vh := input.Copy()
theta := 2.0 * math.Pi / float64(len(vh)-1)
a := complex(25.0/46.0, 0.0)
for i := 0; i < len(vh); i++ {
x := complex(theta*float64(i), 0)
vh[i] *= a - (1.0-a)*cmplx.Cos(x)
}
return vh
}
// InverseHamming performs inverse Hamming windowing on the input vector.
func InverseHamming(input VectorComplex) VectorComplex {
vih := input.Copy()
theta := 2.0 * math.Pi / float64(len(vih)-1)
a := complex(25.0/46.0, 0.0)
for i := 0; i < len(vih); i++ {
x := complex(theta*float64(i), 0)
vih[i] /= a - (1.0-a)*cmplx.Cos(x)
}
return vih
}
// Nuttal performs Nuttal windowing on the input vector.
func Nuttal(input VectorComplex) VectorComplex {
vh := input.Copy()
theta := 2.0 * math.Pi / float64(len(vh)-1)
a0 := complex(0.355768, 0.0)
a1 := complex(0.487396, 0.0)
a2 := complex(0.144232, 0.0)
a3 := complex(0.012604, 0.0)
for i := 0; i < len(vh); i++ {
x := complex(theta*float64(i), 0)
vh[i] *= a0 - a1*cmplx.Cos(x) + a2*cmplx.Cos(2.0*x) + a3*cmplx.Cos(3.0*x)
}
return vh
}
// InverseNuttal performs inverse Nuttal windowing on the input vector.
func InverseNuttal(input VectorComplex) VectorComplex {
vih := input.Copy()
theta := 2.0 * math.Pi / float64(len(vih)-1)
a0 := complex(0.355768, 0.0)
a1 := complex(0.487396, 0.0)
a2 := complex(0.144232, 0.0)
a3 := complex(0.012604, 0.0)
for i := 0; i < len(vih); i++ {
x := complex(theta*float64(i), 0)
vih[i] /= a0 - a1*cmplx.Cos(x) + a2*cmplx.Cos(2.0*x) + a3*cmplx.Cos(3.0*x)
}
return vih
} | window.go | 0.782455 | 0.446434 | window.go | starcoder |
package cmatrix
import (
"bytes"
"fmt"
)
// CMatrix represents a matrix storing complex128 values.
// Follows the Matrix interface of gonum, which is found at:
// https://github.com/gonum/matrix/blob/master/mat64/matrix.go
type CMatrix interface {
// Dims returns the dimensions of a CMatrix.
Dims() (r, c int)
// At returns the value of the matrix element at (r, c). It will
// panic if r or c are out of bounds for the matrix.
At(r, c int) complex128
String() string
}
type SliceCMatrix [][]complex128
// Create a SliceCMatrix of dimensions r x c with all values initialized to 0.
func InitSliceCMatrix(r, c int) SliceCMatrix {
M := make([][]complex128, r)
for i := 0; i < r; i++ {
M[i] = make([]complex128, c)
}
return M
}
// Assume all inner slices have the same length.
func (M SliceCMatrix) Dims() (r, c int) {
r = len(M)
c = len(M[0])
return r, c
}
func (M SliceCMatrix) At(r, c int) complex128 {
return M[r][c]
}
// String representation:
// "row1_col1 row1_col2 ... row1_colN \n row2_col1 ..."
func (M SliceCMatrix) String() string {
r, c := M.Dims()
var buffer bytes.Buffer
for i := 0; i < r; i++ {
for j := 0; j < c; j++ {
buffer.WriteString(fmt.Sprint(M.At(i, j)))
}
buffer.WriteString("\n")
}
return buffer.String()
}
// Add M to A in-place (i.e. A --> A + M).
func (M SliceCMatrix) AddTo(A *SliceCMatrix) {
mr, mc := M.Dims()
ar, ac := A.Dims()
if mr != ar || mc != ac {
panic("Adding incompatible matrices")
}
for i := 0; i < mr; i++ {
for j := 0; j < mc; j++ {
(*A)[i][j] += M[i][j]
}
}
}
// Add M*val to A in-place (i.e. A --> A + val*M).
func (M SliceCMatrix) AddMulTo(A *SliceCMatrix, val complex128) {
mr, mc := M.Dims()
ar, ac := A.Dims()
if mr != ar || mc != ac {
panic("Adding incompatible matrices")
}
for i := 0; i < mr; i++ {
for j := 0; j < mc; j++ {
(*A)[i][j] += val * M[i][j]
}
}
}
// Multiply M by the given scalar in-place (i.e. M --> val*M).
func (M *SliceCMatrix) MulBy(val complex128) {
mr, mc := M.Dims()
for i := 0; i < mr; i++ {
for j := 0; j < mc; j++ {
(*M)[i][j] *= val
}
}
} | CMatrix.go | 0.85446 | 0.500793 | CMatrix.go | starcoder |
package csg
import (
"fmt"
)
// Box is a bounding box representation
type Box struct {
Min Vector
Max Vector
}
// Center returns the center of the bounding box
func (b *Box) Center() *Vector {
return b.Max.Minus(&b.Min).DividedBy(2.0).Plus(&b.Min)
}
//AddVector increases the size of the boundig box to include the vector (as a point)
func (b *Box) AddVector(v *Vector) {
b.Min.Min(v)
b.Max.Max(v)
}
//String returns a string representation of the bounding box
func (b *Box) String() string {
return fmt.Sprintf("box [ %v %v ]", b.Min, b.Max)
}
//AddVertex increases the size of the bounding box to include the vertex
func (b *Box) AddVertex(v *Vertex) {
b.Min.Min(v.Position)
b.Max.Max(v.Position)
}
//AddPolygon increases the size of the bounting box to include all vertices from the polygon
func (b *Box) AddPolygon(p *Polygon) {
for _, v := range p.Vertices {
b.Min.Min(v.Position)
b.Max.Max(v.Position)
}
}
//Divide2x2x2 divides the bouding box into a 2x2x2 set of bounding boxes for use in an OctTree
func (b *Box) Divide2x2x2() []*Box {
dSize := b.Size().DividedBy(2)
bs := make([]*Box, 8)
for x := 0; x < 2; x++ {
for y := 0; y < 2; y++ {
for z := 0; z < 2; z++ {
bc := &Box{}
min := b.Min.Clone()
min.X += dSize.X * float64(x)
min.Y += dSize.Y * float64(y)
min.Z += dSize.Z * float64(z)
bc.Min.CopyFrom(min)
max := min.Plus(dSize)
bc.Max.CopyFrom(max)
i := x*4 + y*2 + z
bs[i] = bc
}
}
}
return bs
}
//CountContainedPolygonVertices returns a count of the number of vertices in the polygon which are contained within the bounding box
func (b *Box) CountContainedPolygonVertices(p *Polygon) int {
i := 0
for _, v := range p.Vertices {
if b.Contains(v.Position) {
i++
}
}
return i
}
//Contains determines if the vector (treated as a point) is included in the bounding box
func (b *Box) Contains(v *Vector) bool {
if v.X > b.Min.X && v.X < b.Max.X {
if v.Y > b.Min.Y && v.Y < b.Max.Y {
if v.Z > b.Min.Z && v.Z < b.Max.Z {
return true
}
}
}
return false
}
//Corners returns an slice of the vectors (as points) of the corners of the bounding box
func (b *Box) Corners() []*Vector {
size := b.Size()
return []*Vector{
b.Min.Clone(),
b.Min.Plus(&Vector{X: size.X}),
b.Min.Plus(&Vector{Y: size.Y}),
b.Min.Plus(&Vector{Z: size.Z}),
b.Max.Clone(),
b.Max.Minus(&Vector{X: size.X}),
b.Max.Minus(&Vector{Y: size.Y}),
b.Max.Minus(&Vector{Z: size.Z}),
}
}
//RelationToPlane returns a PlaneRelationship for the bounding box
func (b *Box) RelationToPlane(p *Plane) PlaneRelationship {
var boxType PlaneRelationship
corners := b.Corners()
for _, corner := range corners {
t := p.Normal.Dot(corner) - p.W
var pType PlaneRelationship
if t < (-EPSILON) {
pType = BACK
} else if t > EPSILON {
pType = FRONT
} else {
pType = COPLANAR
}
boxType |= pType
}
return boxType
}
//Size returns the dimensions of the bounding box as a vector
func (b *Box) Size() *Vector {
return &Vector{
X: b.Max.X - b.Min.X,
Y: b.Max.Y - b.Min.Y,
Z: b.Max.Z - b.Min.Z,
}
} | csg/box.go | 0.893559 | 0.704503 | box.go | starcoder |
package aggregate
import (
"math"
"squirreldb/types"
"github.com/prometheus/prometheus/pkg/value"
)
type AggregatedPoint struct {
Timestamp int64
Min float64
Max float64
Average float64
Count float64
}
type AggregatedData struct {
Points []AggregatedPoint
ID types.MetricID
TimeToLive int64
}
// Aggregate aggregates data.
func Aggregate(data types.MetricData, resolution int64) AggregatedData {
if len(data.Points) == 0 {
return AggregatedData{}
}
workingPoints := make([]types.MetricPoint, 0)
var currentAggregatedTimestamp int64
aggregatedData := AggregatedData{
TimeToLive: data.TimeToLive,
ID: data.ID,
}
for i, point := range data.Points {
aggregatedTimestamp := point.Timestamp - (point.Timestamp % resolution)
if i == 0 {
currentAggregatedTimestamp = aggregatedTimestamp
} else if currentAggregatedTimestamp != aggregatedTimestamp {
aggregatedPoint, ok := aggregatePoints(workingPoints, currentAggregatedTimestamp)
if ok {
aggregatedData.Points = append(aggregatedData.Points, aggregatedPoint)
}
workingPoints = workingPoints[:0]
currentAggregatedTimestamp = aggregatedTimestamp
}
workingPoints = append(workingPoints, point)
}
if len(workingPoints) > 0 {
aggregatedPoint, ok := aggregatePoints(workingPoints, currentAggregatedTimestamp)
if ok {
aggregatedData.Points = append(aggregatedData.Points, aggregatedPoint)
}
}
return aggregatedData
}
// Returns an aggregated point from a point list.
func aggregatePoints(points []types.MetricPoint, timestamp int64) (AggregatedPoint, bool) {
aggregatedPoint := AggregatedPoint{
Timestamp: timestamp,
Count: float64(len(points)),
}
count := 0
for _, point := range points {
if math.Float64bits(point.Value) == value.StaleNaN {
continue
}
count++
if count == 1 {
aggregatedPoint.Min = point.Value
aggregatedPoint.Max = point.Value
}
if point.Value < aggregatedPoint.Min || math.IsNaN(aggregatedPoint.Min) {
aggregatedPoint.Min = point.Value
}
if point.Value > aggregatedPoint.Max || math.IsNaN(aggregatedPoint.Max) {
aggregatedPoint.Max = point.Value
}
aggregatedPoint.Average += point.Value
}
if count == 0 {
return aggregatedPoint, false
}
aggregatedPoint.Count = float64(count)
aggregatedPoint.Average /= aggregatedPoint.Count
if math.IsNaN(aggregatedPoint.Average) {
aggregatedPoint.Average = math.Float64frombits(value.NormalNaN)
}
if math.IsNaN(aggregatedPoint.Min) {
aggregatedPoint.Min = math.Float64frombits(value.NormalNaN)
}
if math.IsNaN(aggregatedPoint.Max) {
aggregatedPoint.Max = math.Float64frombits(value.NormalNaN)
}
return aggregatedPoint, true
} | aggregate/aggregate.go | 0.818011 | 0.448004 | aggregate.go | starcoder |
package mat3
import (
"math"
"github.com/jakubDoka/mlok/mat"
)
type Vec struct {
X, Y, Z float64
}
func V(x, y, z float64) Vec {
return Vec{x, y, z}
}
// Rotated rotates vector around pivot by angle with a right hand rule
// so tongue is pivot and curved fingers point to the direction of rotation
func (v Vec) Rotated(angle float64, pivot Vec) Vec {
colinear := pivot.Scaled(v.Dot(pivot) / v.Dot(v))
orthogonal := v.Sub(colinear)
length := orthogonal.Len()
// x and y are two vectors orthogonal and normalized, we can now use tham as local
// coordinate system
y := v.Cross(pivot).Normalized()
x := orthogonal.Divided(length)
sin, cos := math.Sincos(angle)
// now we use coordinate system to project the angle, then scale it to original
// length and finally add the previously subtracted component
return x.Scaled(cos).Add(y.Scaled(sin)).Scaled(length).Add(colinear)
}
func (v Vec) Add(o Vec) Vec {
return Vec{v.X + o.X, v.Y + o.Y, v.Z + o.Z}
}
func (v Vec) Sub(o Vec) Vec {
return Vec{v.X - o.X, v.Y - o.Y, v.Z - o.Z}
}
func (v Vec) Mul(o Vec) Vec {
return Vec{v.X * o.X, v.Y * o.Y, v.Z * o.Z}
}
func (v *Vec) AddE(o Vec) {
v.X += o.X
v.Y += o.Y
v.Z += o.Z
}
func (v *Vec) MulE(o Vec) {
v.X *= o.X
v.Y *= o.Y
v.Z *= o.Z
}
func (v Vec) Scaled(scalar float64) Vec {
return Vec{v.X * scalar, v.Y * scalar, v.Z * scalar}
}
func (v Vec) Divided(scalar float64) Vec {
return Vec{v.X / scalar, v.Y / scalar, v.Z / scalar}
}
func (v Vec) Cross(o Vec) Vec {
return Vec{
v.Y*o.Z - v.Z*o.Y,
v.X*o.Z - v.Z*o.X,
v.Y*o.X - v.X*o.Y,
}
}
func (v Vec) Dot(o Vec) float64 {
return v.X*o.X + v.Y*o.Y + v.Z*o.Z
}
func (v Vec) Normalized() Vec {
len := v.Len()
return Vec{
v.X / len,
v.Y / len,
v.Z / len,
}
}
func (v Vec) Len() float64 {
return math.Sqrt(v.X*v.X + v.Y*v.Y + v.Z*v.Z)
}
func (v Vec) Inv() Vec {
return Vec{-v.X, -v.Y, -v.Z}
}
func (v Vec) Approx(o Vec, precision int) bool {
return mat.Approx(v.X, o.X, precision) && mat.Approx(v.Y, o.Y, precision) && mat.Approx(v.Z, o.Z, precision)
} | mat/mat3/vec.go | 0.912668 | 0.614481 | vec.go | starcoder |
package ch02
import (
"bytes"
"fmt"
"reflect"
)
type Interpreter struct {
// sym_table is part of the environment and holds the list of currently defined symbols.
sym_table Atom
}
func New() *Interpreter {
return &Interpreter{
sym_table: NIL{},
}
}
func (i *Interpreter) Version() string {
return "chapter-02"
}
// Atom is our basic unit of storage.
type Atom interface{}
// Pair is a pair of two atoms.
type Pair struct {
car Atom
cdr Atom
}
type NIL struct{}
func (n NIL) ToExpr() []byte {
return []byte{'N', 'I', 'L'}
}
type SExpr interface {
ToExpr() []byte
}
type Symbol struct {
name []byte
}
func (s *Symbol) ToExpr() []byte {
return s.name
}
// car returns the car of a pair.
// panics if input is not a pair.
func car(a Atom) Atom {
p, ok := a.(*Pair)
if !ok {
panic("assert(car arg is a pair)")
}
return p.car
}
// cdr returns the cdr of a pair.
// panics if input is not a pair.
func cdr(a Atom) Atom {
p, ok := a.(*Pair)
if !ok {
panic("assert(cdr arg is a pair)")
}
return p.cdr
}
// pairp is a predicate that returns true if the argument is a pair.
func pairp(a Atom) bool {
_, ok := a.(*Pair)
return ok
}
// nilp is a predicate that returns true if the argument is NIL.
func nilp(a Atom) bool {
_, ok := a.(NIL)
return ok
}
// cons returns a pair from the heap.
func cons(car, cdr Atom) Atom {
return &Pair{car: car, cdr: cdr}
}
// make_int returns a new Atom with the given integer value.
func make_int(x int) Atom {
return x
}
// make_sym returns an Atom with the given symbol value.
// If the symbol already exists, we return a copy of it from the symbol table.
func (i *Interpreter) make_sym(name []byte) Atom {
for p := i.sym_table; !nilp(p); p = cdr(p) {
a := car(p)
if st, ok := a.(*Symbol); ok && bytes.Equal(name, st.name) {
return a
}
}
a := &Symbol{name: name}
i.sym_table = cons(a, i.sym_table)
return a
}
// print_expr relies on the Atom's stringer to return a text representation of the atom.
func print_expr(a Atom) {
fmt.Printf("%s", string(atob(a)))
}
func atob(a Atom) []byte {
if s, ok := a.(SExpr); ok {
return s.ToExpr()
}
switch a.(type) {
case int:
integer, _ := a.(int)
return []byte(fmt.Sprintf("%d", integer))
case *Symbol:
symbol, _ := a.(*Symbol)
return symbol.name
case *Pair:
b := append([]byte{'('}, atob(car(a))...)
a = cdr(a)
for !nilp(a) {
if !pairp(a) {
b = append(b, ' ', '.', ' ')
b = append(b, atob(a)...)
break
}
b = append(b, ' ')
b = append(b, atob(car(a))...)
a = cdr(a)
}
return append(b, ')')
}
panic(fmt.Sprintf("assert(atom.type != %v)", reflect.TypeOf(a)))
} | ch02/lisp.go | 0.669205 | 0.477615 | lisp.go | starcoder |
package geom
import "fmt"
// operand represents either the first (A) or second (B) geometry in a binary
// operation (such as Union or Covers).
type operand int
const (
operandA operand = 0
operandB operand = 1
)
type label struct {
// Set to true once inSet has a valid value.
populated bool
// Indicates whether the thing being labelled is in a set or not (context
// specific to how the label is used).
inSet bool
}
func newHalfPopulatedLabels(operand operand, inSet bool) [2]label {
var labels [2]label
labels[operand].populated = true
labels[operand].inSet = inSet
return labels
}
func newPopulatedLabels(inSet bool) [2]label {
var labels [2]label
labels[0].populated = true
labels[1].populated = true
labels[0].inSet = inSet
labels[1].inSet = inSet
return labels
}
func mergeLabels(dst *[2]label, src [2]label) {
dst[0].populated = dst[0].populated || src[0].populated
dst[1].populated = dst[1].populated || src[1].populated
dst[0].inSet = dst[0].inSet || src[0].inSet
dst[1].inSet = dst[1].inSet || src[1].inSet
}
type location struct {
interior bool
boundary bool
}
func mergeLocations(dst *[2]location, src [2]location) {
dst[0].interior = dst[0].interior || src[0].interior
dst[1].interior = dst[1].interior || src[1].interior
dst[0].boundary = dst[0].boundary || src[0].boundary
dst[1].boundary = dst[1].boundary || src[1].boundary
}
func newLocationsOnBoundary(operand operand) [2]location {
var locs [2]location
locs[operand].boundary = true
return locs
}
func assertPresence(labels [2]label) {
if !labels[0].populated || !labels[1].populated {
panic(fmt.Sprintf("all presence flags in labels not set: %v", labels))
}
}
func selectUnion(labels [2]label) bool {
assertPresence(labels)
return labels[0].inSet || labels[1].inSet
}
func selectIntersection(labels [2]label) bool {
assertPresence(labels)
return labels[0].inSet && labels[1].inSet
}
func selectDifference(labels [2]label) bool {
assertPresence(labels)
return labels[0].inSet && !labels[1].inSet
}
func selectSymmetricDifference(labels [2]label) bool {
assertPresence(labels)
return labels[0].inSet != labels[1].inSet
} | geom/dcel_label.go | 0.734215 | 0.603056 | dcel_label.go | starcoder |
package jsoncodec
import (
"encoding/hex"
"github.com/orbs-network/crypto-lib-go/crypto/encoding"
"github.com/pkg/errors"
"math/big"
"reflect"
"strconv"
"strings"
)
const supported = "Supported types are: uint32 uint64 uint256 bool string bytes bytes20 bytes32 uint32Array uint64Array uint256Array boolArray stringArray bytesArray bytes20Array bytes32Array gamma:address gamma:keys-file-address"
type Arg struct {
Type string
Value interface{}
}
func isArgsInputStructureValid(args []*Arg) error {
for i, arg := range args {
rValue := reflect.TypeOf(arg.Value).String()
if strings.HasSuffix(arg.Type, "Array") {
if rValue != "[]interface {}" {
return errors.Errorf("Argument %d's Type is marked as an Array and it's Value should contain an array of string\nCurrently %s\n", i+1, rValue)
}
} else if rValue != "string" {
return errors.Errorf("Argument %d's Type is marked as a Scalar and it's Value should contain a string", i+1)
}
}
return nil
}
func unmarshalScalar(argType, value string) (interface{}, error) {
switch argType {
case "uint32":
val, err := strconv.ParseUint(value, 10, 32)
if err != nil {
return nil, errors.Errorf("a numeric value\nCurrent value: '%s'", value)
}
return uint32(val), nil
case "uint64":
val, err := strconv.ParseUint(value, 10, 64)
if err != nil {
return nil, errors.Errorf("a numeric value\nCurrent value: '%s'", value)
}
return val, nil
case "string":
return value, nil
case "bytes":
val, err := simpleDecodeHex(value)
if err != nil {
return nil, errors.Errorf("bytes in hex format\nHex decoder returned error: %s\nCurrent value: '%s'", err.Error(), value)
}
return val, nil
case "bool":
if value == "1" {
return true, nil
} else if value == "0" {
return false, nil
} else {
return nil, errors.Errorf("1 or 0\nCurrent value: '%s'", value)
}
case "uint256":
valBytes, err := simpleDecodeHex(value)
if err != nil {
return nil, errors.Errorf("uint256 value in bytes in a hex format (64 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", err.Error(), value)
}
if len(valBytes) != 32 {
return nil, errors.Errorf("uint256 value in bytes in a hex format (64 hexes)\n Actual size : %d", len(valBytes))
}
val := big.NewInt(0)
val.SetBytes(valBytes)
return val, nil
case "bytes20":
valBytes, err := simpleDecodeHex(value)
if err != nil {
return nil, errors.Errorf("bytes20 in a hex format (40 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", err.Error(), value)
}
if len(valBytes) != 20 {
return nil, errors.Errorf("bytes20 in a hex format (40 hexes)\n Actual size : %d", len(valBytes))
}
var val [20]byte
copy(val[:], valBytes)
return val, nil
case "bytes32":
valBytes, err := simpleDecodeHex(value)
if err != nil {
return nil, errors.Errorf("bytes32 in a hex format (64 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", err.Error(), value)
}
if len(valBytes) != 32 {
return nil, errors.Errorf("bytes32 in a hex format (64 hexes)\n Actual size : %d", len(valBytes))
}
var val [32]byte
copy(val[:], valBytes)
return val, nil
default:
return nil, errors.Errorf("a known type. '%s' is unsupported\n%s", argType, supported)
}
}
func unmarshalArray(argType string, argValues []interface{}) (interface{}, error) {
switch argType {
case "uint32Array":
var argArrayRes []uint32
for j, argValue := range argValues {
val, err := strconv.ParseUint(argValue.(string), 10, 32)
if err != nil {
return nil, errors.Errorf("element %d should be a string containing a numeric value\nCurrent value: '%s'", j+1, argValue)
}
argArrayRes = append(argArrayRes, uint32(val))
}
return argArrayRes, nil
case "uint64Array":
var argArrayRes []uint64
for j, argValue := range argValues {
val, err := strconv.ParseUint(argValue.(string), 10, 64)
if err != nil {
return nil, errors.Errorf("element %d should be a string containing a numeric value\nCurrent value: '%s'", j+1, argValue)
}
argArrayRes = append(argArrayRes, val)
}
return argArrayRes, nil
case "stringArray":
var argArrayRes []string
for _, argValue := range argValues {
argArrayRes = append(argArrayRes, argValue.(string))
}
return argArrayRes, nil
case "bytesArray":
var argArrayRes [][]byte
for j, argValue := range argValues {
val, err := simpleDecodeHex(argValue.(string))
if err != nil {
return nil, errors.Errorf("element %d should be a string containing bytes in hex format\nHex decoder returned error: %s\nCurrent value: '%s'", j+1, err.Error(), argValue)
}
argArrayRes = append(argArrayRes, val)
}
return argArrayRes, nil
case "boolArray":
var argArrayRes []bool
for j, argValue := range argValues {
s := argValue.(string)
if s == "1" {
argArrayRes = append(argArrayRes, true)
} else if s == "0" {
argArrayRes = append(argArrayRes, false)
} else {
return nil, errors.Errorf("element %d should be a string containing 1 or 0\nCurrent value: '%s'", j+1, argValue)
}
}
return argArrayRes, nil
case "uint256Array":
var argArrayRes []*big.Int
for j, argValue := range argValues {
valBytes, err := simpleDecodeHex(argValue.(string))
if err != nil {
return nil, errors.Errorf("element %d should be a string containing uint256 in hex format (64 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", j+1, err.Error(), argValue)
}
if len(valBytes) != 32 {
return nil, errors.Errorf("element %d should be a string containing uint256 in a hex format (64 hexes)\n Actual size : %d", j+1, len(valBytes))
}
val := big.NewInt(0)
val.SetBytes(valBytes)
argArrayRes = append(argArrayRes, val)
}
return argArrayRes, nil
case "bytes20Array":
var argArrayRes [][20]byte
for j, argValue := range argValues {
valBytes, err := simpleDecodeHex(argValue.(string))
if err != nil {
return nil, errors.Errorf("element %d should be a string containing bytes20 in hex format (40 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", j+1, err.Error(), argValue)
}
if len(valBytes) != 20 {
return nil, errors.Errorf("element %d should be a string containing bytes20 in a hex format (40 hexes)\n Actual size : %d", j+1, len(valBytes))
}
var val [20]byte
copy(val[:], valBytes)
argArrayRes = append(argArrayRes, val)
}
return argArrayRes, nil
case "bytes32Array":
var argArrayRes [][32]byte
for j, argValue := range argValues {
valBytes, err := simpleDecodeHex(argValue.(string))
if err != nil {
return nil, errors.Errorf("element %d should be a string containing bytes32 in hex format (64 hexes)\nHex decoder returned error: %s\nCurrent value: '%s'", j+1, err.Error(), argValue)
}
if len(valBytes) != 32 {
return nil, errors.Errorf("element %d should be a string containing bytes32 in a hex format (64 hexes)\n Actual size : %d", j+1, len(valBytes))
}
var val [32]byte
copy(val[:], valBytes)
argArrayRes = append(argArrayRes, val)
}
return argArrayRes, nil
default:
return nil, errors.Errorf("a known type. '%s' is unsupported\n%s", argType, supported)
}
}
func UnmarshalArgs(args []*Arg, getTestKeyFromFile func(string) *RawKey) ([]interface{}, error) {
if err := isArgsInputStructureValid(args); err != nil {
return nil, err
}
var res []interface{}
for i, arg := range args {
if arg.Type == "gamma:address" {
val, err := encoding.DecodeHex(arg.Value.(string))
if err != nil {
return nil, errors.Errorf("Value of argument %d should be a string containing the bytes in hex\nHex decoder returned error: %s\n\nCurrent value: '%s'", i+1, err.Error(), arg.Value)
}
res = append(res, val)
} else if arg.Type == "gamma:keys-file-address" {
key := getTestKeyFromFile(arg.Value.(string))
res = append(res, key.Address)
} else if strings.HasSuffix(arg.Type, "Array") {
valArray, err := unmarshalArray(arg.Type, arg.Value.([]interface{}))
if err != nil {
return nil, errors.Errorf("Value of array argument %d, %s", i+1, err.Error())
}
res = append(res, valArray)
} else {
val, err := unmarshalScalar(arg.Type, arg.Value.(string))
if err != nil {
return nil, errors.Errorf("Value of argument %d should be a string containing %s", i+1, err.Error())
}
res = append(res, val)
}
}
return res, nil
}
func MarshalArgs(arguments []interface{}) ([]*Arg, error) {
var res []*Arg
for i, arg := range arguments {
if reflect.TypeOf(arg).Kind() == reflect.Slice { // all []Type including []byte
var arrArguments []string
switch arg := arg.(type) {
case []byte:
res = append(res, &Arg{"bytes", "0x" + hex.EncodeToString(arg)})
case []uint32:
for _, v := range arg {
arrArguments = append(arrArguments, strconv.FormatUint(uint64(v), 10))
}
res = append(res, &Arg{"uint32Array", arrArguments})
case []uint64:
for _, v := range arg {
arrArguments = append(arrArguments, strconv.FormatUint(v, 10))
}
res = append(res, &Arg{"uint64Array", arrArguments})
case []string:
res = append(res, &Arg{"stringArray", arg})
case [][]byte:
for _, v := range arg {
arrArguments = append(arrArguments, "0x"+hex.EncodeToString(v))
}
res = append(res, &Arg{"bytesArray", arrArguments})
case []bool:
for _, v := range arg {
if v {
arrArguments = append(arrArguments, "1")
} else {
arrArguments = append(arrArguments, "0")
}
}
res = append(res, &Arg{"boolArray", arrArguments})
case []*big.Int:
val := [32]byte{}
for _, v := range arg {
b := v.Bytes()
copy(val[32-len(b):], b)
arrArguments = append(arrArguments, "0x"+hex.EncodeToString(val[:]))
}
res = append(res, &Arg{"uint256Array", arrArguments})
case [][20]byte:
for _, v := range arg {
arrArguments = append(arrArguments, "0x"+hex.EncodeToString(v[:]))
}
res = append(res, &Arg{"bytes20Array", arrArguments})
case [][32]byte:
for _, v := range arg {
arrArguments = append(arrArguments, "0x"+hex.EncodeToString(v[:]))
}
res = append(res, &Arg{"bytes32Array", arrArguments})
default:
return nil, errors.Errorf("Type of argument %d '%T' is unsupported\n\n%s", i+1, arg, supported)
}
} else {
switch arg := arg.(type) {
case uint32:
res = append(res, &Arg{"uint32", strconv.FormatUint(uint64(arg), 10)})
case uint64:
res = append(res, &Arg{"uint64", strconv.FormatUint(arg, 10)})
case string:
res = append(res, &Arg{"string", arg})
case bool:
if arg {
res = append(res, &Arg{"bool", "1"})
} else {
res = append(res, &Arg{"bool", "0"})
}
case *big.Int:
val := [32]byte{}
b := arg.Bytes()
copy(val[32-len(b):], b)
res = append(res, &Arg{"uint256", "0x" + hex.EncodeToString(val[:])})
case [20]byte:
res = append(res, &Arg{"bytes20", "0x" + hex.EncodeToString(arg[:])})
case [32]byte:
res = append(res, &Arg{"bytes32", "0x" + hex.EncodeToString(arg[:])})
default:
return nil, errors.Errorf("Type of argument %d '%T' is unsupported\n\n%s", i+1, arg, supported)
}
}
}
return res, nil
}
func simpleDecodeHex(value string) ([]byte, error) {
if strings.HasPrefix(value, "0x") {
value = value[2:]
}
return hex.DecodeString(value)
} | jsoncodec/args.go | 0.635109 | 0.471223 | args.go | starcoder |
package cmd
import (
"bufio"
"fmt"
"io"
"os"
"github.com/seanhagen/jane-coding-challenge/games"
"github.com/spf13/cobra"
)
var matchData *os.File
var ranking *games.Ranking
// parseCmd represents the parse command
var parseCmd = &cobra.Command{
Use: "parse path/to/match-data.txt",
Short: "Read and parse match data to produce rankings",
Long: `A win is worth 3 points for the winner, a loss is worth no points to the
loser, and a tie is worth 1 point for each team.
The only expected argument is a path to a file that contains match results.
The file format is as follows:
1. Each line represents a match:
- Each match is on a single line
- Each match is defined as "<team name> <score>, <team name> <score>"
- <team name> is a string of any length
- <score> is the score that team had at the end of the game, as a number
2. The lines should be in date order -- when the program finds a team that has
already played in a match day it considers that the end of the day and starts
a new match day to begin tracking.
An input file with the following contents:
Team A 1, Team B 2
Team C 2, Team D 2
Team A 2, Team D 1
Team C 1, Team B 0
Would produce two days, each with two matches.
On the other hand, a file with the contents:
Team A 1, Team B 2
Team A 2, Team D 1
Team C 1, Team B 0
Would produce two days, but day one would have just a single match ( A vs B ).`,
Args: cobra.ExactArgs(1),
DisableFlagsInUseLine: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
fName := args[0]
info, err := os.Stat(fName)
if os.IsNotExist(err) {
return fmt.Errorf("file %v does not exist", fName)
}
if info.IsDir() {
return fmt.Errorf("given path is a directory, need a file")
}
matchData, err = os.OpenFile(fName, os.O_RDONLY, 0644)
if err != nil {
return fmt.Errorf("unable to open file: %w", err)
}
ranking = games.NewRanking()
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
r := bufio.NewReader(matchData)
line, _, err := r.ReadLine()
for ln := 0; err == nil; line, _, err = r.ReadLine() {
ex := ranking.AddMatch(string(line))
if ex != nil {
return fmt.Errorf("error parsing line %v of match data: %w", ln, ex)
}
ln++
}
if err != nil && err != io.EOF {
return fmt.Errorf("error processing match data: %w", err)
}
fmt.Printf("%v", ranking.Results())
return nil
},
PersistentPostRunE: func(cmd *cobra.Command, args []string) error {
if err := matchData.Close(); err != nil {
return fmt.Errorf("unable to close match data file: %w", err)
}
return nil
},
}
func init() {
rootCmd.AddCommand(parseCmd)
} | cmd/parse.go | 0.611382 | 0.450178 | parse.go | starcoder |
package eval
import (
"errors"
"github.com/eliquious/aechbar/calculator/ast"
"github.com/eliquious/lexer"
"math/big"
)
func Evaluate(expr ast.Expression) (string, error) {
exp, err := evalExpression(expr)
if err != nil {
return "", err
}
return exp.String(), nil
}
func evalExpression(expr ast.Expression) (ast.Expression, error) {
switch expr.Type() {
case ast.IntegerLiteralType, ast.DecimalLiteralType,
ast.StringLiteralType, ast.DurationLiteralType,
ast.TimestampLiteralType, ast.BooleanLiteralType:
return expr, nil
case ast.UnaryExpressionType:
return evalUnaryExpression(expr.(*ast.UnaryExpression))
case ast.BinaryExpressionType:
return evalBinaryExpression(expr.(*ast.BinaryExpression))
default:
return nil, errors.New("Unsupported expression")
}
}
func evalUnaryExpression(expr *ast.UnaryExpression) (ast.Expression, error) {
switch expr.Expr.Type() {
case ast.IntegerLiteralType:
return evalUnaryIntegerExpression(expr.Op, expr.Expr.(*ast.IntegerLiteral))
case ast.DecimalLiteralType:
return evalUnaryDecimalExpression(expr.Op, expr.Expr.(*ast.DecimalLiteral))
default:
return nil, errors.New("Unsupported unary expression")
}
}
func evalUnaryIntegerExpression(op lexer.Token, expr *ast.IntegerLiteral) (ast.Expression, error) {
if op == lexer.MINUSMINUS {
return &ast.IntegerLiteral{expr.Value.Add(expr.Value, big.NewInt(-1))}, nil
} else if op == lexer.PLUSPLUS {
return &ast.IntegerLiteral{expr.Value.Add(expr.Value, big.NewInt(1))}, nil
}
return nil, errors.New("Unsupported integer unary expression")
}
func evalUnaryDecimalExpression(op lexer.Token, expr *ast.DecimalLiteral) (ast.Expression, error) {
if op == lexer.MINUSMINUS {
return &ast.DecimalLiteral{expr.Value.Add(expr.Value, big.NewFloat(-1))}, nil
} else if op == lexer.PLUSPLUS {
return &ast.DecimalLiteral{expr.Value.Add(expr.Value, big.NewFloat(1))}, nil
}
return nil, errors.New("Unsupported decimal unary expression")
}
func evalBinaryExpression(expr *ast.BinaryExpression) (ast.Expression, error) {
// Reduce the binary expression to it's lowest parts
exp, err := reduceBinaryExpression(expr)
if err != nil {
return nil, err
}
switch expr.Op {
case lexer.PLUS, lexer.MINUS, lexer.MUL, lexer.DIV, lexer.POW:
return evalBinaryMathExpression(exp)
case lexer.AMPERSAND, lexer.XOR, lexer.PIPE, lexer.LSHIFT, lexer.RSHIFT:
return evalBinaryBitwiseExpression(exp)
case lexer.AND, lexer.OR, lexer.EQEQ, lexer.NEQ, lexer.LT, lexer.LTE, lexer.GT, lexer.GTE:
return evalBinaryBooleanExpression(exp)
default:
return nil, errors.New("Unsupported binary expression")
}
}
func evalBinaryMathExpression(expr *ast.BinaryExpression) (ast.Expression, error) {
switch expr.Op {
case lexer.PLUS:
return evalPlusExpression(expr)
case lexer.MINUS:
return evalMinusExpression(expr)
case lexer.MUL:
return evalMultExpression(expr)
case lexer.DIV:
return evalDivExpression(expr)
case lexer.POW:
return evalPowExpression(expr)
default:
return nil, errors.New("Unsupported binary expression")
}
}
func evalBinaryBooleanExpression(expr *ast.BinaryExpression) (ast.Expression, error) {
switch expr.Op {
case lexer.AND:
case lexer.OR:
case lexer.EQEQ:
case lexer.NEQ:
case lexer.LT:
case lexer.LTE:
case lexer.GT:
case lexer.GTE:
default:
return nil, errors.New("Unsupported boolean expression")
}
return nil, errors.New("Unsupported boolean expression")
}
func evalBinaryBitwiseExpression(expr *ast.BinaryExpression) (ast.Expression, error) {
switch expr.Op {
case lexer.AMPERSAND:
case lexer.XOR:
case lexer.PIPE:
case lexer.LSHIFT:
case lexer.RSHIFT:
default:
return nil, errors.New("Unsupported boolean expression")
}
return nil, errors.New("Unsupported boolean expression")
}
func reduceBinaryExpression(expr *ast.BinaryExpression) (*ast.BinaryExpression, error) {
// Eval left hand side
lh, err := evalExpression(expr.LExpr)
if err != nil {
return nil, err
}
// Eval right hand side
rh, err := evalExpression(expr.RExpr)
if err != nil {
return nil, err
}
return &ast.BinaryExpression{expr.Op, lh, rh}, nil
} | calculator/eval/evaluator.go | 0.651577 | 0.468487 | evaluator.go | starcoder |
package toml
import (
"bufio"
"bytes"
"fmt"
"io"
"io/ioutil"
"strconv"
"strings"
"time"
"unicode/utf8"
)
type FormatRule func(*Formatter) error
// Set the character to indent lines. If tab is let to 0, tab character will
// be used otherwise one or multiple space(s)
func WithTab(tab int) FormatRule {
return func(ft *Formatter) error {
if tab >= 1 {
ft.withTab = strings.Repeat(" ", tab)
}
return nil
}
}
// Tell the formatter to keep empty table when rewritting the document.
func WithEmpty(with bool) FormatRule {
return func(ft *Formatter) error {
ft.withEmpty = with
return nil
}
}
// Tell the formatter to indent nested sub table(s). If not set, all tables will
// be aligned.
func WithNest(with bool) FormatRule {
return func(ft *Formatter) error {
ft.withNest = with
return nil
}
}
// Tell the formatter to keep comments from the original document when rewritting.
func WithComment(with bool) FormatRule {
return func(ft *Formatter) error {
ft.withComment = with
return nil
}
}
// Tell the formatter to keep the format of the values as found in the original
// document.
// Using this option disables other options that format values.
func WithRaw(with bool) FormatRule {
return func(ft *Formatter) error {
ft.withRaw = with
return nil
}
}
// Tell the formatter to reformat (array of) inline table(s) to (array of) regular table(s)
func WithInline(inline bool) FormatRule {
return func(ft *Formatter) error {
ft.withInline = inline
return nil
}
}
// Tell the formatter how to reformat arrays. By default, array with 0 or 1 element
// will always be written on the same line.
func WithArray(format string) FormatRule {
return func(ft *Formatter) error {
switch strings.ToLower(format) {
case "", "mixed":
ft.withArray = arrayMixed
case "multi":
ft.withArray = arrayMulti
case "single":
ft.withArray = arraySingle
default:
return fmt.Errorf("%s: unsupported array format", format)
}
return nil
}
}
// Tell the formatter to use the precision of millisecond to use and if it is needed
// to convert offset datetime to UTC.
func WithTime(millis int, utc bool) FormatRule {
return func(ft *Formatter) error {
if millis > 9 {
millis = 9
}
pattern := "2006-01-02 15:04:05"
if millis > 0 {
pattern += "." + strings.Repeat("0", millis)
}
pattern += "-07:00"
ft.timeconv = formatTime(pattern, utc)
return nil
}
}
// Tell the formatter how to format floating point number and where to write an
// underscore to make it more readable (if needed)
func WithFloat(format string, underscore int) FormatRule {
return func(ft *Formatter) error {
var spec byte
switch strings.ToLower(format) {
case "e", "scientific":
spec = 'e'
case "", "f", "float":
spec = 'f'
case "g", "auto":
spec = 'g'
default:
return fmt.Errorf("%s: unsupported specifier", format)
}
ft.floatconv = formatFloat(spec, underscore)
return nil
}
}
// Tell the formatter which base to use to rewrite integer number and where to write
// an underscore to make it more readable (if needed)
func WithNumber(format string, underscore int) FormatRule {
return func(ft *Formatter) error {
var (
base int
prefix string
)
switch strings.ToLower(format) {
case "x", "hexa", "hex":
base, prefix = 16, "0x"
case "o", "octal", "oct":
base, prefix = 8, "0o"
case "b", "binary", "bin":
base, prefix = 2, "0b"
case "", "d", "decimal", "dec":
base = 10
default:
return fmt.Errorf("%s: unsupported base", format)
}
ft.intconv = formatInteger(base, underscore, prefix)
return nil
}
}
// Tell the formatter which sequence of character to use to write the end of line.
func WithEOL(format string) FormatRule {
return func(ft *Formatter) error {
switch strings.ToLower(format) {
case "crlf", "windows":
ft.withEOL = "\r\n"
case "lf", "linux", "":
ft.withEOL = "\n"
default:
return fmt.Errorf("%s: unsupported eof", format)
}
return nil
}
}
const (
arrayMixed int = iota
arraySingle
arrayMulti
)
// Formatter is responsible to rewrite a TOML document according to the settings
// given by user.
type Formatter struct {
doc Node
writer *bufio.Writer
floatconv func(string) (string, error)
intconv func(string) (string, error)
timeconv func(string) (string, error)
withArray int
withInline bool
withTab string
withEOL string
withEmpty bool
withComment bool
withNest bool
currLevel int
withRaw bool
}
// Create a new Formatter that will rewrite the TOML document doc according to the
// rules specify.
func NewFormatter(doc string, rules ...FormatRule) (*Formatter, error) {
identity := func(str string) (string, error) {
return str, nil
}
f := Formatter{
floatconv: identity,
intconv: identity,
timeconv: identity,
withArray: arrayMixed,
withInline: false,
withEmpty: false,
withNest: false,
withComment: true,
withTab: "\t",
withEOL: "\n",
withRaw: false,
}
buf, err := ioutil.ReadFile(doc)
if err != nil {
return nil, err
}
f.doc, err = Parse(bytes.NewReader(buf))
if err != nil {
return nil, err
}
for _, rfn := range rules {
if err := rfn(&f); err != nil {
return nil, err
}
}
return &f, nil
}
// Reformat the document
func (f *Formatter) Format(w io.Writer) error {
f.writer = bufio.NewWriter(w)
root, ok := f.doc.(*Table)
if !ok {
return fmt.Errorf("document not parsed properly")
}
if err := f.formatTable(root, nil); err != nil {
return err
}
return f.writer.Flush()
}
func (f *Formatter) formatTable(curr *Table, paths []string) error {
options := curr.listOptions()
if f.withEmpty || len(options) > 0 {
f.formatHeader(curr, paths)
err := f.formatOptions(options, append(paths, curr.key.Literal))
if err != nil {
return nil
}
f.endLine()
}
if !curr.isRoot() && curr.kind.isContainer() {
paths = append(paths, curr.key.Literal)
}
if f.canNest(curr) {
f.enterLevel(false)
defer f.leaveLevel(false)
}
for _, next := range curr.listTables() {
if err := f.formatTable(next, paths); err != nil {
return err
}
}
return nil
}
func (f *Formatter) formatOptions(options []*Option, paths []string) error {
type table struct {
prefix string
*Table
}
var (
length = longestKey(options)
array int
inlines []table
)
for _, o := range options {
if i, ok := o.value.(*Table); ok && f.withInline {
i.kind = tableRegular
i.key = o.key
i.comment = o.comment
inlines = append(inlines, table{Table: i})
continue
}
if i, ok := o.value.(*Array); ok && f.withInline {
var (
a Array
t = Table{kind: tableArray, key: o.key}
)
for _, n := range i.nodes {
if nod, ok := n.(*Table); ok {
nod.key = o.key
nod.kind = tableItem
t.nodes = append(t.nodes, nod)
} else {
a.nodes = append(a.nodes, n)
}
}
if !t.isEmpty() {
sub := table{Table: &t}
if !a.isEmpty() {
sub.prefix = fmt.Sprintf("\"#%d\"", array)
array++
}
inlines = append(inlines, sub)
}
if a.isEmpty() {
continue
}
o.value = &a
}
f.formatComment(o.comment.pre, true)
f.beginLine()
f.writeKey(o.key.Literal, length)
if err := f.formatValue(o.value); err != nil {
return err
}
f.formatComment(o.comment.post, false)
f.endLine()
}
if len(inlines) > 0 {
f.endLine()
f.enterLevel(false)
defer f.leaveLevel(false)
for _, i := range inlines {
parents := append([]string{}, paths...)
if i.prefix != "" {
parents = append(parents, i.prefix)
}
if err := f.formatTable(i.Table, parents); err != nil {
return err
}
}
}
return nil
}
func (f *Formatter) formatValue(n Node) error {
if n == nil {
return nil
}
var err error
switch n := n.(type) {
case *Literal:
if f.withRaw {
f.writer.WriteString(n.token.Raw)
break
}
err = f.formatLiteral(n)
case *Array:
err = f.formatArray(n)
case *Table:
err = f.formatInline(n)
default:
err = fmt.Errorf("unexpected value type %T", n)
}
return err
}
func (f *Formatter) formatLiteral(i *Literal) error {
if i.token.isString() {
f.formatString(i.token)
return nil
}
str, err := f.convertValue(i.token)
if err == nil {
f.writer.WriteString(str)
}
return err
}
func (f *Formatter) formatString(tok Token) {
var (
isMulti bool
quoting string
escape func(rune) (rune, bool)
)
switch tok.Type {
case TokBasic:
escape = escapeBasic
quoting = "\""
case TokBasicMulti:
escape = escapeMulti
quoting, isMulti = "\"\"\"", true
case TokLiteral:
quoting = "'"
case TokLiteralMulti:
quoting, isMulti = "'''", true
default:
return
}
f.writer.WriteString(quoting)
if isMulti {
f.endLine()
}
str := escapeString(tok.Literal, isMulti, escape)
if isMulti && strings.IndexByte(str, newline) < 0 {
str = textWrap(str)
}
f.writer.WriteString(str)
f.writer.WriteString(quoting)
}
func textWrap(str string) string {
var (
scan = bufio.NewScanner(strings.NewReader(str))
buf strings.Builder
)
const (
length = 72
limit = 8
)
scan.Split(func(data []byte, ateof bool) (int, []byte, error) {
if ateof {
return len(data), data, bufio.ErrFinalToken
}
var i, j int
for i < length {
x := bytes.IndexAny(data[i:], " \t.?,!;")
if x < 0 {
return 0, nil, nil
}
j, i = i, i+x+1
}
if i >= length+limit {
i = j
}
return i, data[:i], nil
})
scan.Scan()
for {
buf.WriteString(scan.Text())
if !scan.Scan() {
break
}
buf.WriteRune(backslash)
buf.WriteRune(newline)
}
return buf.String()
}
func escapeBasic(r rune) (rune, bool) {
switch r {
case backslash:
case dquote:
case newline:
r = 'n'
case tab:
r = 't'
case formfeed:
r = 'f'
case backspace:
r = 'b'
case carriage:
r = 'r'
default:
return r, false
}
return r, true
}
func escapeMulti(r rune) (rune, bool) {
switch r {
case backslash:
case dquote:
case tab:
r = 't'
case formfeed:
r = 'f'
case backspace:
r = 'b'
case carriage:
r = 'r'
default:
return r, false
}
return r, true
}
func escapeString(str string, multi bool, escape func(r rune) (rune, bool)) string {
if escape == nil {
return str
}
var (
i int
b strings.Builder
ok bool
)
for i < len(str) {
char, z := utf8.DecodeRuneInString(str[i:])
if char == utf8.RuneError {
break
}
if multi && char == dquote {
i += z
b.WriteRune(char)
if char, z = utf8.DecodeRuneInString(str[i:]); char == dquote {
i += z
b.WriteRune(char)
char, z = utf8.DecodeRuneInString(str[i:])
}
}
if char, ok = escape(char); ok {
b.WriteRune(backslash)
}
b.WriteRune(char)
i += z
}
return b.String()
}
func (f *Formatter) convertValue(tok Token) (string, error) {
switch tok.Type {
default:
return tok.Literal, nil
case TokDatetime:
return f.timeconv(tok.Literal)
case TokInteger:
return f.intconv(tok.Literal)
case TokFloat:
return f.floatconv(tok.Literal)
}
}
func (f *Formatter) formatArray(a *Array) error {
if len(a.nodes) <= 1 || f.withArray == arraySingle {
return f.formatArrayLine(a)
}
if f.withArray == arrayMulti {
return f.formatArrayMultiline(a)
}
if a.isMultiline() {
return f.formatArrayMultiline(a)
}
return f.formatArrayLine(a)
}
func (f *Formatter) formatArrayMultiline(a *Array) error {
retr := func(n Node) comment {
var c comment
switch n := n.(type) {
case *Literal:
c = n.comment
case *Array:
c = n.comment
case *Table:
c = n.comment
}
return c
}
f.enterArray()
defer func() {
f.leaveArray()
f.beginLine()
f.writer.WriteString("]")
}()
f.writer.WriteString("[")
f.endLine()
for _, n := range a.nodes {
com := retr(n)
f.formatComment(com.pre, true)
f.beginLine()
if err := f.formatValue(n); err != nil {
return err
}
f.writer.WriteString(",")
f.formatComment(com.post, false)
f.endLine()
}
return nil
}
func (f *Formatter) formatArrayLine(a *Array) error {
f.writer.WriteString("[")
for i, n := range a.nodes {
if err := f.formatValue(n); err != nil {
return err
}
if i < len(a.nodes)-1 {
f.writer.WriteString(", ")
}
}
f.writer.WriteString("]")
return nil
}
func (f *Formatter) formatInline(t *Table) error {
defer func(array int) {
f.withArray = array
}(f.withArray)
f.withArray = arraySingle
f.writer.WriteString("{")
for i, o := range t.listOptions() {
if i > 0 {
f.writer.WriteString(", ")
}
f.writeKey(o.key.Literal, 0)
if err := f.formatValue(o.value); err != nil {
return err
}
}
f.writer.WriteString("}")
return nil
}
func (f *Formatter) formatHeader(curr *Table, paths []string) error {
if curr.isRoot() {
return nil
}
if curr.kind != tableItem {
paths = append(paths, curr.key.Literal)
}
f.formatComment(curr.comment.pre, true)
switch str := strings.Join(paths, "."); curr.kind {
case tableRegular, tableImplicit:
f.writeRegularHeader(str)
case tableItem:
f.writeArrayHeader(str)
default:
return fmt.Errorf("%s: can not write header for %s", curr.kind, str)
}
f.formatComment(curr.comment.post, false)
f.endLine()
return nil
}
func (f *Formatter) formatComment(comment string, pre bool) error {
if !f.withComment || comment == "" {
return nil
}
scan := bufio.NewScanner(strings.NewReader(comment))
for scan.Scan() {
f.writeComment(scan.Text(), pre)
}
return scan.Err()
}
func (f *Formatter) enterArray() {
f.enterLevel(true)
}
func (f *Formatter) leaveArray() {
f.leaveLevel(true)
}
func (f *Formatter) enterLevel(force bool) {
if f.withNest || force {
f.currLevel++
}
}
func (f *Formatter) leaveLevel(force bool) {
if f.withNest || force {
f.currLevel--
}
}
func (f *Formatter) canNest(curr *Table) bool {
if curr.isRoot() {
return false
}
if curr.kind == tableImplicit && !f.withEmpty {
return false
}
return curr.kind.canNest()
}
func (f *Formatter) writeKey(str string, length int) {
n, _ := f.writer.WriteString(str)
if length > 0 {
f.writer.WriteString(strings.Repeat(" ", length-n))
}
f.writer.WriteString(" = ")
}
func (f *Formatter) writeComment(str string, pre bool) {
if pre {
f.beginLine()
} else {
f.writer.WriteString(" ")
}
f.writer.WriteString("# ")
f.writer.WriteString(str)
if pre {
f.endLine()
}
}
func (f *Formatter) writeRegularHeader(str string) {
f.beginLine()
f.writer.WriteString("[")
f.writer.WriteString(str)
f.writer.WriteString("]")
}
func (f *Formatter) writeArrayHeader(str string) {
f.beginLine()
f.writer.WriteString("[[")
f.writer.WriteString(str)
f.writer.WriteString("]]")
}
func (f *Formatter) endLine() {
f.writer.WriteString(f.withEOL)
}
func (f *Formatter) beginLine() {
if f.currLevel == 0 {
return
}
f.writer.WriteString(strings.Repeat(f.withTab, f.currLevel))
}
func longestKey(options []*Option) int {
var length int
for _, o := range options {
n := len(o.key.Literal)
if length == 0 || length < n {
length = n
}
}
return length
}
func formatString(str string) string {
return str
}
func formatTime(pattern string, utc bool) func(string) (string, error) {
if pattern == "" {
pattern = time.RFC3339
}
return func(str string) (string, error) {
var (
when time.Time
err error
)
for _, pat := range makeAllPatterns() {
when, err = time.Parse(pat, str)
if err == nil {
break
}
}
if err != nil {
return "", err
}
if utc {
when = when.UTC()
}
return when.Format(pattern), nil
}
}
func formatFloat(specifier byte, underscore int) func(string) (string, error) {
return func(str string) (string, error) {
f, err := strconv.ParseFloat(str, 64)
if err != nil {
return "", err
}
str = strconv.FormatFloat(f, specifier, -1, 64)
return withUnderscore(str, underscore), nil
}
}
func formatInteger(base, underscore int, prefix string) func(string) (string, error) {
return func(str string) (string, error) {
n, err := strconv.ParseInt(str, 0, 64)
if err != nil {
return "", err
}
str = strconv.FormatInt(n, base)
return prefix + withUnderscore(str, underscore), nil
}
}
func withUnderscore(str string, every int) string {
if every == 0 || len(str) < every {
return str
}
x := strings.Index(str, ".")
if x < 0 {
return insertUnderscore(str, every)
}
part := insertUnderscore(str[:x], every) + "."
str = str[x+1:]
x = strings.IndexAny(str, "eE")
if x < 0 {
return part + insertUnderscore(str, every)
}
part += insertUnderscore(str[:x], every)
part += "e"
if str[x+1] == '+' || str[x+1] == '-' {
x++
part += string(str[x])
}
return part + insertUnderscore(str[x+1:], every)
}
func insertUnderscore(str string, every int) string {
if len(str) <= every {
return str
}
var (
i = len(str) % every
buf bytes.Buffer
)
if i > 0 {
buf.WriteString(str[:i])
buf.WriteString("_")
}
for i < len(str) && i+every < len(str) {
buf.WriteString(str[i : i+every])
buf.WriteString("_")
i += every
}
buf.WriteString(str[i:])
return buf.String()
} | format.go | 0.639286 | 0.411643 | format.go | starcoder |
package nurbs
import "fmt"
/* findSpan : determines the knot span index */
/* knot span :The range of parameter values between two successive knots in a spline. */
/* input:
--n : last index in control points vector
--p : degree
--u : the variable that lies on the knot span
--U : the knot vector
*/
func findSpan(n, p int, u float64, U []int) int {
if u == float64(U[n+1]) {
return n
}
/* do binary search */
low := p
high := n + 1
mid := (low + high) / 2
/* BUG: this is not robust: */
for u < float64(U[mid]) || u >= float64(U[mid+1]) {
fmt.Println("knot Vector:", U)
if u < float64(U[mid]) {
high = mid
} else {
low = mid
}
fmt.Println("mid:", mid)
mid = (low + high) / 2
}
return mid
}
/* basisFuns : computes the non vanishing basis functions */
/* in a dynamic programming fashion , this algorithm stores the
---results we computed by introducing the left and right notations */
/* input :
--i : the span index
--u : the variable that lies on the knot span
--p : the degree of the polynomial
--U : the knot vector
*/
func basisFuns(i int, u float64, p int, U []int) []float64 {
N := make([]float64, p+1)
left := make([]float64, p+1)
right := make([]float64, p+1)
N[0] = 1.
for j := 1; j <= p; j++ {
left[j] = u - float64(U[i+1-j])
right[j] = float64(U[i+j]) - u
saved := 0.
for r := 0; r < j; r++ {
temp := N[r] / (right[r+1] + left[j-r])
N[r] = saved + right[r+1]*temp
saved = left[j-r] * temp
}
N[j] = saved
}
return N
}
/* dersBasisFuns : computes nonzero basis functoins and their
---derivates. first step is the `basisFuns` modified tp store
---functions and and knot differences
*/
/* input:
-- i : the span index
-- u : the variable that lies on the knot span
-- p : the degree of the polynomial
-- n :
-- U : the knot vector
*/
/* output:
-- ders : the derivatives of the function
*/
func dersBasisFuns(i, u, p, n int, U []int) [][]float64 {
/* ndu stores the basis functions and knot differences */
ndu := make([][]float64, p+1)
ders := make([][]float64, p+1)
a := make([][]float64, p+1)
for entry := range ndu {
ndu[entry] = make([]float64, p+1)
ders[entry] = make([]float64, p+1)
a[entry] = make([]float64, p+1)
}
left := make([]float64, p+1)
right := make([]float64, p+1)
/* ------------------------------------ */
for j := 1; j <= p; j++ {
left[j] = float64(u - U[i+1-j])
right[j] = float64(U[i+1] - u)
saved := 0.
for r := 0; r < j; r++ {
/* lower triangle */
ndu[j][r] = right[r+1] + left[j-r]
temp := ndu[r][j-1] / ndu[j][r]
/* upper triangle */
ndu[r][j] = saved + right[r+1]*temp
saved = left[j-r] * temp
}
ndu[j][j] = saved
}
/* ------------------------------------ */
/* loading the basis functions */
for j := 0; j <= p; j++ {
ders[0][j] = ndu[j][p]
}
/* this section computes the derivatives: */
/* ------------------------------------ */
/* loop over function index */
for r := 0; r <= p; r++ {
/* define alternative rows in array a */
s1 := 0
s2 := 1
a[0][0] = 1.
/* loop to compute kth derivative */
for k := 1; k <= n; k++ {
d := 0.
rk := r - k
pk := p - k
if r >= k {
a[s2][0] = a[s1][0] / ndu[pk+1][rk]
d = a[s2][0] * ndu[rk][pk]
}
j1 := 0
j2 := 0
if rk >= -1 {
j1 = 1
} else {
j1 = -rk
}
if r-1 <= pk {
j2 = k - 1
} else {
j2 = p - r
}
for j := j1; j <= j2; j++ {
a[s2][j] = (a[s1][j] - a[s1][j-1]) / ndu[pk+1][rk+j]
d += a[s2][j] * ndu[r][pk]
}
if r <= pk {
a[s2][k] = -a[s1][k-1] / ndu[pk+1][r]
d += a[s2][k] * ndu[r][pk]
}
ders[k][r] = d
/* switching rows */
j := s1
s1 = s2
s2 = j
}
}
/* multiply through by the correct factors */
r := p
for k := 1; k <= n; k++ {
for j := 0; j <= p; j++ {
ders[k][j] *= float64(r)
}
r *= (p - k)
}
return ders
} | nurbs/basis.go | 0.543833 | 0.503479 | basis.go | starcoder |
package search
// BinarySearch 搜索和目标值相等的数
func BinarySearch(array []int, target int) int {
n := len(array)
if n <= 0 {
return -1
}
left := 0
right := n - 1
for left <= right {
mid := left + (right-left)/2
if array[mid] == target {
return mid
} else if array[mid] < target {
left = mid + 1
} else if array[mid] > target {
right = mid - 1
}
}
return -1
}
// BinarySearchFirstLargeOrEqual 查找第一个不小于目标值的数, 目标数并不一定就出现在数组中
func BinarySearchFirstLargeOrEqual(array []int, target int) int {
n := len(array)
if n <= 0 {
return -1
}
left := 0
right := n
for left < right {
mid := left + (right-left)/2
if array[mid] < target {
left = mid + 1
} else if array[mid] == target {
right = mid
} else if array[mid] > target {
right = mid
}
}
// target 比所有的数都大
if left == n {
return -1
}
return left
}
// BinarySearchLastLargeOrEqual 查找最后一个不小于目标值的数, 目标数并不一定就出现在数组中
func BinarySearchLastLargeOrEqual(array []int, target int) int {
n := len(array)
if n <= 0 {
return -1
}
left := 0
right := n
for left < right {
mid := left + (right-left)/2
if array[mid] < target {
left = mid + 1
} else if array[mid] == target {
left = mid + 1
} else if array[mid] > target {
right = mid
}
}
if array[max(left-1, 0)] < target {
return left
}
return max(left-1, 0)
}
// BinarySearchFirstLarge 查找第一个大于目标值的数, 目标数并不一定就出现在数组中
func BinarySearchFirstLarge(array []int, target int) int {
n := len(array)
if n <= 0 {
return -1
}
// 如果最后一个数小于等于目标值的话,返回-1
if array[n-1] <= target {
return -1
}
left := 0
right := n
for left < right {
mid := left + (right-left)/2
if array[mid] <= target {
left = mid + 1
} else {
right = mid
}
}
return left
}
// 用二分法的思想求x的n次方
func pow(x, n int) int {
res := 1
for n > 0 {
if n%2 == 1 {
res *= x
}
x = x * x
n = n >> 1
}
return res
}
// 搜索选择排序数组: 假设按照升序排序的数组在预先未知的某个点上进行了旋转
// (例如, 数组 [0,1,2,4,5,6,7] 可能变为 [4,5,6,7,0,1,2])
// 搜索一个给定的目标值, 如果数组中存在这个目标值, 则返回它的索引, 否则返回 -1
// 你可以假设数组中不存在重复的元素. 你的算法时间复杂度必须是 O(log n) 级别
// 示例 1:
// 输入: nums = [4,5,6,7,0,1,2], target = 0
// 输出: 4
// 示例 2:
// 输入: nums = [4,5,6,7,0,1,2], target = 3
// 输出: -1
// RotatedBinarySearch ...
func RotatedBinarySearch(array []int, target int) int {
n := len(array)
if n == 0 {
return -1
}
left := 0
right := n - 1
for left <= right {
mid := left + (right-left)/2
if array[mid] == target {
return mid
}
// 情况1: 如果中间元素在旋转点左侧
if array[mid] >= array[left] {
// target 如果位于中间元素的左侧
if array[mid] > target && target >= array[left] {
right = mid - 1
} else {
left = mid + 1
}
} else { // 情况2: 中间元素在旋转点的右侧
// target 如果位于中间元素的右侧
if array[mid] < target && target <= array[right] {
left = mid + 1
} else {
right = mid - 1
}
}
}
return -1
} | algorithm/binary-search/binary_search.go | 0.619817 | 0.557303 | binary_search.go | starcoder |
package value
import (
"fmt"
)
type valueType int
// ToNative converts a value from the jqp type system
// to the native go type system.
func ToNative(v Value) interface{} {
switch vt := v.(type) {
case Int:
return int(vt)
case String:
return string(vt)
case Float:
return float64(vt)
case Array:
res := make([]interface{}, len(vt))
for i := range vt {
res[i] = ToNative(vt[i])
}
return res
case Map:
res := make(map[string]interface{}, len(vt))
for k := range vt {
res[k] = ToNative(vt[k])
}
return res
case Func:
return func(args ...interface{}) interface{} {
res := make([]Value, len(args))
for i := range args {
res[i] = FromNative(args[i], false)
}
return ToNative(vt(res...))
}
default:
panic("jqp/value: cant convert value type '" + v.whichType().String() + "' to a native type")
}
}
// FromNative transforms any native supported go types to
// a value that can be evaluated in jqp. If 'port' is
// set to true, map and slice will become ports values.
// Else they will be converted to map and arrays.
func FromNative(v interface{}, port bool) Value {
switch vt := v.(type) {
case Value:
return vt
case int:
return Int(vt)
case float64:
return Float(vt)
case string:
return String(vt)
case func(...interface{}) interface{}:
return Func(func(args ...Value) Value {
res := make([]interface{}, len(args))
for i := range args {
res[i] = ToNative(args[i])
}
return FromNative(vt(res...), port)
})
case []interface{}:
if port {
return Port{slicePortCargo(vt)}
}
v := make(Array, len(vt))
for i := range v {
v[i] = FromNative(vt[i], port)
}
return v
case map[string]interface{}:
if port {
return Port{mapPortCargo(vt)}
}
v := make(Map, len(vt))
for k := range vt {
v[k] = FromNative(vt[k], port)
}
return v
default:
panic("jqp/value: cant convert this type from native:" + fmt.Sprintf("%T", vt))
}
}
const (
intType valueType = iota
floatType
stringType
arrayType
mapType
portType
funcType
_numTypes //number of types
)
func (vt valueType) String() string {
var typeName = [_numTypes]string{"int", "float", "string", "array", "map", "port", "func"}
return typeName[vt]
}
type Context struct {
Decl map[Var]Value
}
type Expr interface {
Eval(ctx Context) Value
}
type Value interface {
Expr
String() string
whichType() valueType
toType(valueType) Value
}
func binaryArithType(t1, t2 valueType) valueType {
if t1 > t2 {
return t1
}
return t2
} | value/value.go | 0.732305 | 0.437343 | value.go | starcoder |
package proc
import (
"reflect"
"github.com/gyuho/linux-inspect/schema"
)
// NetDevSchema represents '/proc/net/dev'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html
// and http://www.onlamp.com/pub/a/linux/2000/11/16/LinuxAdmin.html.
var NetDevSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "interface", Godoc: "network interface", Kind: reflect.String},
{Name: "receive_bytes", Godoc: "total number of bytes of data received by the interface", Kind: reflect.Uint64},
{Name: "receive_packets", Godoc: "total number of packets of data received by the interface", Kind: reflect.Uint64},
{Name: "receive_errs", Godoc: "total number of receive errors detected by the device driver", Kind: reflect.Uint64},
{Name: "receive_drop", Godoc: "total number of packets dropped by the device driver", Kind: reflect.Uint64},
{Name: "receive_fifo", Godoc: "number of FIFO buffer errors", Kind: reflect.Uint64},
{Name: "receive_frame", Godoc: "number of packet framing errors", Kind: reflect.Uint64},
{Name: "receive_compressed", Godoc: "number of compressed packets received by the device driver", Kind: reflect.Uint64},
{Name: "receive_multicast", Godoc: "number of multicast frames received by the device driver", Kind: reflect.Uint64},
{Name: "transmit_bytes", Godoc: "total number of bytes of data transmitted by the interface", Kind: reflect.Uint64},
{Name: "transmit_packets", Godoc: "total number of packets of data transmitted by the interface", Kind: reflect.Uint64},
{Name: "transmit_errs", Godoc: "total number of receive errors detected by the device driver", Kind: reflect.Uint64},
{Name: "transmit_drop", Godoc: "total number of packets dropped by the device driver", Kind: reflect.Uint64},
{Name: "transmit_fifo", Godoc: "number of FIFO buffer errors", Kind: reflect.Uint64},
{Name: "transmit_colls", Godoc: "number of collisions detected on the interface", Kind: reflect.Uint64},
{Name: "transmit_carrier", Godoc: "number of carrier losses detected by the device driver", Kind: reflect.Uint64},
},
ColumnsToParse: map[string]schema.RawDataType{
"receive_bytes": schema.TypeBytes,
"transmit_bytes": schema.TypeBytes,
},
}
// NetTCPSchema represents '/proc/net/tcp' and '/proc/net/tcp6'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html
// and http://www.onlamp.com/pub/a/linux/2000/11/16/LinuxAdmin.html.
var NetTCPSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "sl", Godoc: "kernel hash slot", Kind: reflect.Uint64},
{Name: "local_address", Godoc: "local-address:port", Kind: reflect.String},
{Name: "rem_address", Godoc: "remote-address:port", Kind: reflect.String},
{Name: "st", Godoc: "internal status of socket", Kind: reflect.String},
{Name: "tx_queue", Godoc: "outgoing data queue in terms of kernel memory usage", Kind: reflect.String},
{Name: "rx_queue", Godoc: "incoming data queue in terms of kernel memory usage", Kind: reflect.String},
{Name: "tr", Godoc: "internal information of the kernel socket state", Kind: reflect.String},
{Name: "tm->when", Godoc: "internal information of the kernel socket state", Kind: reflect.String},
{Name: "retrnsmt", Godoc: "internal information of the kernel socket state", Kind: reflect.String},
{Name: "uid", Godoc: "effective UID of the creator of the socket", Kind: reflect.Uint64},
{Name: "timeout", Godoc: "timeout", Kind: reflect.Uint64},
{Name: "inode", Godoc: "inode raw data", Kind: reflect.String},
},
ColumnsToParse: map[string]schema.RawDataType{
"local_address": schema.TypeIPAddress,
"rem_address": schema.TypeIPAddress,
"st": schema.TypeStatus,
},
}
// LoadAvgSchema represents '/proc/loadavg'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html.
var LoadAvgSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "load-avg-1-minute", Godoc: "total uptime in seconds", Kind: reflect.Float64},
{Name: "load-avg-5-minute", Godoc: "total uptime in seconds", Kind: reflect.Float64},
{Name: "load-avg-15-minute", Godoc: "total uptime in seconds", Kind: reflect.Float64},
{Name: "runnable-kernel-scheduling-entities", Godoc: "number of currently runnable kernel scheduling entities (processes, threads)", Kind: reflect.Int64},
{Name: "current-kernel-scheduling-entities", Godoc: "number of kernel scheduling entities that currently exist on the system", Kind: reflect.Int64},
{Name: "pid", Godoc: "PID of the process that was most recently created on the system", Kind: reflect.Int64},
},
ColumnsToParse: map[string]schema.RawDataType{},
}
// UptimeSchema represents '/proc/uptime'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html.
var UptimeSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "uptime-total", Godoc: "total uptime in seconds", Kind: reflect.Float64},
{Name: "uptime-idle", Godoc: "total amount of time in seconds spent in idle process", Kind: reflect.Float64},
},
ColumnsToParse: map[string]schema.RawDataType{
"uptime-total": schema.TypeTimeSeconds,
"uptime-idle": schema.TypeTimeSeconds,
},
}
// DiskStatSchema represents '/proc/diskstats'.
// Reference https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
// and https://www.kernel.org/doc/Documentation/iostats.txt.
var DiskStatSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "major-number", Godoc: "major device number", Kind: reflect.Uint64},
{Name: "minor-number", Godoc: "minor device number", Kind: reflect.Uint64},
{Name: "device-name", Godoc: "device name", Kind: reflect.String},
{Name: "reads-completed", Godoc: "total number of reads completed successfully", Kind: reflect.Uint64},
{Name: "reads-merged", Godoc: "total number of reads merged when adjacent to each other", Kind: reflect.Uint64},
{Name: "sectors-read", Godoc: "total number of sectors read successfully", Kind: reflect.Uint64},
{Name: "time-spent-on-reading-ms", Godoc: "total number of milliseconds spent by all reads", Kind: reflect.Uint64},
{Name: "writes-completed", Godoc: "total number of writes completed successfully", Kind: reflect.Uint64},
{Name: "writes-merged", Godoc: "total number of writes merged when adjacent to each other", Kind: reflect.Uint64},
{Name: "sectors-written", Godoc: "total number of sectors written successfully", Kind: reflect.Uint64},
{Name: "time-spent-on-writing-ms", Godoc: "total number of milliseconds spent by all writes", Kind: reflect.Uint64},
{Name: "I/Os-in-progress", Godoc: "only field that should go to zero (incremented as requests are on request_queue)", Kind: reflect.Uint64},
{Name: "time-spent-on-I/Os-ms", Godoc: "milliseconds spent doing I/Os", Kind: reflect.Uint64},
{Name: "weighted-time-spent-on-I/Os-ms", Godoc: "weighted milliseconds spent doing I/Os (incremented at each I/O start, I/O completion, I/O merge)", Kind: reflect.Uint64},
},
ColumnsToParse: map[string]schema.RawDataType{
"time-spent-on-reading-ms": schema.TypeTimeMicroseconds,
"time-spent-on-writing-ms": schema.TypeTimeMicroseconds,
"time-spent-on-I/Os-ms": schema.TypeTimeMicroseconds,
"weighted-time-spent-on-I/Os-ms": schema.TypeTimeMicroseconds,
},
}
// IOSchema represents 'proc/$PID/io'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html.
var IOSchema = schema.RawData{
IsYAML: true,
Columns: []schema.Column{
{Name: "rchar", Godoc: "number of bytes which this task has caused to be read from storage (sum of bytes which this process passed to read)", Kind: reflect.Uint64},
{Name: "wchar", Godoc: "number of bytes which this task has caused, or shall cause to be written to disk", Kind: reflect.Uint64},
{Name: "syscr", Godoc: "number of read I/O operations", Kind: reflect.Uint64},
{Name: "syscw", Godoc: "number of write I/O operations", Kind: reflect.Uint64},
{Name: "read_bytes", Godoc: "number of bytes which this process really did cause to be fetched from the storage layer", Kind: reflect.Uint64},
{Name: "write_bytes", Godoc: "number of bytes which this process caused to be sent to the storage layer", Kind: reflect.Uint64},
{Name: "cancelled_write_bytes", Godoc: "number of bytes which this process caused to not happen by truncating pagecache", Kind: reflect.Uint64},
},
ColumnsToParse: map[string]schema.RawDataType{
"rchar": schema.TypeBytes,
"wchar": schema.TypeBytes,
"read_bytes": schema.TypeBytes,
"write_bytes": schema.TypeBytes,
"cancelled_write_bytes": schema.TypeBytes,
},
}
// StatSchema represents '/proc/$PID/stat'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html.
var StatSchema = schema.RawData{
IsYAML: false,
Columns: []schema.Column{
{Name: "pid", Godoc: "process ID", Kind: reflect.Int64},
{Name: "comm", Godoc: "filename of the executable (originally in parentheses, automatically removed by this package)", Kind: reflect.String},
{Name: "state", Godoc: "one character that represents the state of the process", Kind: reflect.String},
{Name: "ppid", Godoc: "PID of the parent process", Kind: reflect.Int64},
{Name: "pgrp", Godoc: "group ID of the process", Kind: reflect.Int64},
{Name: "session", Godoc: "session ID of the process", Kind: reflect.Int64},
{Name: "tty_nr", Godoc: "controlling terminal of the process", Kind: reflect.Int64},
{Name: "tpgid", Godoc: "ID of the foreground process group of the controlling terminal of the process", Kind: reflect.Int64},
{Name: "flags", Godoc: "kernel flags word of the process", Kind: reflect.Int64},
{Name: "minflt", Godoc: "number of minor faults the process has made which have not required loading a memory page from disk", Kind: reflect.Uint64},
{Name: "cminflt", Godoc: "number of minor faults that the process's waited-for children have made", Kind: reflect.Uint64},
{Name: "majflt", Godoc: "number of major faults the process has made which have required loading a memory page from disk", Kind: reflect.Uint64},
{Name: "cmajflt", Godoc: "number of major faults that the process's waited-for children have made", Kind: reflect.Uint64},
{Name: "utime", Godoc: "number of clock ticks that this process has been scheduled in user mode (includes guest_time)", Kind: reflect.Uint64},
{Name: "stime", Godoc: "number of clock ticks that this process has been scheduled in kernel mode", Kind: reflect.Uint64},
{Name: "cutime", Godoc: "number of clock ticks that this process's waited-for children have been scheduled in user mode", Kind: reflect.Uint64},
{Name: "cstime", Godoc: "number of clock ticks that this process's waited-for children have been scheduled in kernel mode", Kind: reflect.Uint64},
{Name: "priority", Godoc: "for processes running a real-time scheduling policy, the negated scheduling priority, minus one; that is, a number in the range -2 to -100, corresponding to real-time priorities 1 to 99. For processes running under a non-real-time scheduling policy, this is the raw nice value. The kernel stores nice values as numbers in the range 0 (high) to 39 (low)", Kind: reflect.Int64},
{Name: "nice", Godoc: "nice value, a value in the range 19 (low priority) to -20 (high priority)", Kind: reflect.Int64},
{Name: "num_threads", Godoc: "number of threads in this process", Kind: reflect.Int64},
{Name: "itrealvalue", Godoc: "no longer maintained", Kind: reflect.Int64},
{Name: "starttime", Godoc: "time(number of clock ticks) the process started after system boot", Kind: reflect.Uint64},
{Name: "vsize", Godoc: "virtual memory size in bytes", Kind: reflect.Uint64},
{Name: "rss", Godoc: "resident set size: number of pages the process has in real memory (text, data, or stack space but does not include pages which have not been demand-loaded in, or which are swapped out)", Kind: reflect.Int64},
{Name: "rsslim", Godoc: "current soft limit in bytes on the rss of the process", Kind: reflect.Uint64},
{Name: "startcode", Godoc: "address above which program text can run", Kind: reflect.Uint64},
{Name: "endcode", Godoc: "address below which program text can run", Kind: reflect.Uint64},
{Name: "startstack", Godoc: "address of the start (i.e., bottom) of the stack", Kind: reflect.Uint64},
{Name: "kstkesp", Godoc: "current value of ESP (stack pointer), as found in the kernel stack page for the process", Kind: reflect.Uint64},
{Name: "kstkeip", Godoc: "current EIP (instruction pointer)", Kind: reflect.Uint64},
{Name: "signal", Godoc: "obsolete, because it does not provide information on real-time signals (use /proc/$PID/status)", Kind: reflect.Uint64},
{Name: "blocked", Godoc: "obsolete, because it does not provide information on real-time signals (use /proc/$PID/status)", Kind: reflect.Uint64},
{Name: "sigignore", Godoc: "obsolete, because it does not provide information on real-time signals (use /proc/$PID/status)", Kind: reflect.Uint64},
{Name: "sigcatch", Godoc: "obsolete, because it does not provide information on real-time signals (use /proc/$PID/status)", Kind: reflect.Uint64},
{Name: "wchan", Godoc: "channel in which the process is waiting (address of a location in the kernel where the process is sleeping)", Kind: reflect.Uint64},
{Name: "nswap", Godoc: "not maintained (number of pages swapped)", Kind: reflect.Uint64},
{Name: "cnswap", Godoc: "not maintained (cumulative nswap for child processes)", Kind: reflect.Uint64},
{Name: "exit_signal", Godoc: "signal to be sent to parent when we die", Kind: reflect.Int64},
{Name: "processor", Godoc: "CPU number last executed on", Kind: reflect.Int64},
{Name: "rt_priority", Godoc: "real-time scheduling priority, a number in the range 1 to 99 for processes scheduled under a real-time policy, or 0, for non-real-time processes", Kind: reflect.Uint64},
{Name: "policy", Godoc: "scheduling policy", Kind: reflect.Uint64},
{Name: "delayacct_blkio_ticks", Godoc: "aggregated block I/O delays, measured in clock ticks", Kind: reflect.Uint64},
{Name: "guest_time", Godoc: "number of clock ticks spent running a virtual CPU for a guest operating system", Kind: reflect.Uint64},
{Name: "cguest_time", Godoc: "number of clock ticks (guest_time of the process's children)", Kind: reflect.Uint64},
{Name: "start_data", Godoc: "address above which program initialized and uninitialized (BSS) data are placed", Kind: reflect.Uint64},
{Name: "end_data", Godoc: "address below which program initialized and uninitialized (BSS) data are placed", Kind: reflect.Uint64},
{Name: "start_brk", Godoc: "address above which program heap can be expanded with brk", Kind: reflect.Uint64},
{Name: "arg_start", Godoc: "address above which program command-line arguments are placed", Kind: reflect.Uint64},
{Name: "arg_end", Godoc: "address below program command-line arguments are placed", Kind: reflect.Uint64},
{Name: "env_start", Godoc: "address above which program environment is placed", Kind: reflect.Uint64},
{Name: "env_end", Godoc: "address below which program environment is placed", Kind: reflect.Uint64},
{Name: "exit_code", Godoc: "thread's exit status in the form reported by waitpid(2)", Kind: reflect.Int64},
},
ColumnsToParse: map[string]schema.RawDataType{
"state": schema.TypeStatus,
"vsize": schema.TypeBytes,
"rss": schema.TypeBytes,
"rsslim": schema.TypeBytes,
},
}
// StatusSchema represents 'proc/$PID/status'.
// Reference http://man7.org/linux/man-pages/man5/proc.5.html.
var StatusSchema = schema.RawData{
IsYAML: true,
Columns: []schema.Column{
{Name: "Name", Godoc: "command run by this process", Kind: reflect.String},
{Name: "Umask", Godoc: "process umask, expressed in octal with a leading", Kind: reflect.String},
{Name: "State", Godoc: "current state of the process: R (running), S (sleeping), D (disk sleep), T (stopped), T (tracing stop), Z (zombie), or X (dead)", Kind: reflect.String},
{Name: "Tgid", Godoc: "thread group ID", Kind: reflect.Int64},
{Name: "Ngid", Godoc: "NUMA group ID", Kind: reflect.Int64},
{Name: "Pid", Godoc: "process ID", Kind: reflect.Int64},
{Name: "PPid", Godoc: "parent process ID, which launches the Pid", Kind: reflect.Int64},
{Name: "TracerPid", Godoc: "PID of process tracing this process (0 if not being traced)", Kind: reflect.Int64},
{Name: "Uid", Godoc: "real, effective, saved set, and filesystem UIDs", Kind: reflect.String},
{Name: "Gid", Godoc: "real, effective, saved set, and filesystem UIDs", Kind: reflect.String},
{Name: "FDSize", Godoc: "number of file descriptor slots currently allocated", Kind: reflect.Uint64},
{Name: "Groups", Godoc: "supplementary group list", Kind: reflect.String},
{Name: "NStgid", Godoc: "thread group ID (i.e., PID) in each of the PID namespaces of which [pid] is a member", Kind: reflect.String},
{Name: "NSpid", Godoc: "thread ID (i.e., PID) in each of the PID namespaces of which [pid] is a member", Kind: reflect.String},
{Name: "NSpgid", Godoc: "process group ID (i.e., PID) in each of the PID namespaces of which [pid] is a member", Kind: reflect.String},
{Name: "NSsid", Godoc: "descendant namespace session ID hierarchy Session ID in each of the PID namespaces of which [pid] is a member", Kind: reflect.String},
{Name: "VmPeak", Godoc: "peak virtual memory usage. Vm includes physical memory and swap", Kind: reflect.String},
{Name: "VmSize", Godoc: "current virtual memory usage. VmSize is the total amount of memory required for this process", Kind: reflect.String},
{Name: "VmLck", Godoc: "locked memory size", Kind: reflect.String},
{Name: "VmPin", Godoc: "pinned memory size (pages can't be moved, requires direct-access to physical memory)", Kind: reflect.String},
{Name: "VmHWM", Godoc: `peak resident set size ("high water mark")`, Kind: reflect.String},
{Name: "VmRSS", Godoc: "resident set size. VmRSS is the actual amount in memory. Some memory can be swapped out to physical disk. So this is the real memory usage of the process", Kind: reflect.String},
{Name: "VmData", Godoc: "size of data segment", Kind: reflect.String},
{Name: "VmStk", Godoc: "size of stack", Kind: reflect.String},
{Name: "VmExe", Godoc: "size of text segments", Kind: reflect.String},
{Name: "VmLib", Godoc: "shared library code size", Kind: reflect.String},
{Name: "VmPTE", Godoc: "page table entries size", Kind: reflect.String},
{Name: "VmPMD", Godoc: "size of second-level page tables", Kind: reflect.String},
{Name: "VmSwap", Godoc: "swapped-out virtual memory size by anonymous private", Kind: reflect.String},
{Name: "HugetlbPages", Godoc: "size of hugetlb memory portions", Kind: reflect.String},
{Name: "Threads", Godoc: "number of threads in process containing this thread (process)", Kind: reflect.Uint64},
{Name: "SigQ", Godoc: "queued signals for the real user ID of this process (queued signals / limits)", Kind: reflect.String},
{Name: "SigPnd", Godoc: "number of signals pending for thread", Kind: reflect.String},
{Name: "ShdPnd", Godoc: "number of signals pending for process as a whole", Kind: reflect.String},
{Name: "SigBlk", Godoc: "masks indicating signals being blocked", Kind: reflect.String},
{Name: "SigIgn", Godoc: "masks indicating signals being ignored", Kind: reflect.String},
{Name: "SigCgt", Godoc: "masks indicating signals being caught", Kind: reflect.String},
{Name: "CapInh", Godoc: "masks of capabilities enabled in inheritable sets", Kind: reflect.String},
{Name: "CapPrm", Godoc: "masks of capabilities enabled in permitted sets", Kind: reflect.String},
{Name: "CapEff", Godoc: "masks of capabilities enabled in effective sets", Kind: reflect.String},
{Name: "CapBnd", Godoc: "capability Bounding set", Kind: reflect.String},
{Name: "CapAmb", Godoc: "ambient capability set", Kind: reflect.String},
{Name: "Seccomp", Godoc: "seccomp mode of the process (0 means SECCOMP_MODE_DISABLED; 1 means SECCOMP_MODE_STRICT; 2 means SECCOMP_MODE_FILTER)", Kind: reflect.Uint64},
{Name: "Cpus_allowed", Godoc: "mask of CPUs on which this process may run", Kind: reflect.String},
{Name: "Cpus_allowed_list", Godoc: "list of CPUs on which this process may run", Kind: reflect.String},
{Name: "Mems_allowed", Godoc: "mask of memory nodes allowed to this process", Kind: reflect.String},
{Name: "Mems_allowed_list", Godoc: "list of memory nodes allowed to this process", Kind: reflect.String},
{Name: "voluntary_ctxt_switches", Godoc: "number of voluntary context switches", Kind: reflect.Uint64},
{Name: "nonvoluntary_ctxt_switches", Godoc: "number of involuntary context switches", Kind: reflect.Uint64},
},
ColumnsToParse: map[string]schema.RawDataType{
"State": schema.TypeStatus,
"VmPeak": schema.TypeBytes,
"VmSize": schema.TypeBytes,
"VmLck": schema.TypeBytes,
"VmPin": schema.TypeBytes,
"VmHWM": schema.TypeBytes,
"VmRSS": schema.TypeBytes,
"VmData": schema.TypeBytes,
"VmStk": schema.TypeBytes,
"VmExe": schema.TypeBytes,
"VmLib": schema.TypeBytes,
"VmPTE": schema.TypeBytes,
"VmPMD": schema.TypeBytes,
"VmSwap": schema.TypeBytes,
"HugetlbPages": schema.TypeBytes,
},
} | proc/schema.go | 0.615088 | 0.449574 | schema.go | starcoder |
package check
import "math"
// IsValueInMapStringString checks if a string (X) is among the map's values.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapStringString(X string, Map map[string]string) ([]string, bool) {
var exists bool
keys := make([]string, 0)
if len(Map) == 0 {
return []string{}, false
}
for key, value := range Map {
if X == value {
exists = true
keys = append(keys, key)
}
}
return keys, exists
}
// IsValueInMapStringInt checks if an int (X) is among the map's values.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapStringInt(X int, Map map[string]int) ([]string, bool) {
var exists bool
keys := make([]string, 0)
if len(Map) == 0 {
return []string{}, false
}
for key, value := range Map {
if X == value {
exists = true
keys = append(keys, key)
}
}
return keys, exists
}
// IsValueInMapStringFloat64 checks if a foat64 (X) is among the map's values.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapStringFloat64(X float64, Map map[string]float64, Epsilon float64) ([]string, bool) {
var exists bool
keys := make([]string, 0)
if len(Map) == 0 {
return []string{}, false
}
for key, value := range Map {
if math.Abs(X-value) <= Epsilon {
exists = true
keys = append(keys, key)
}
}
return keys, exists
}
// IsValueInMapIntString checks if an int (X) is among the map's values.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapIntString(X string, Map map[int]string) ([]int, bool) {
var exists bool
keys := make([]int, 0)
if len(Map) == 0 {
return []int{}, false
}
for key, value := range Map {
if X == value {
exists = true
keys = append(keys, key)
}
}
return keys, exists
}
// IsValueInMapIntInt checks if an int (X) is among the map's values.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapIntInt(X int, Map map[int]int) ([]int, bool) {
var exists bool
keys := make([]int, 0)
if len(Map) == 0 {
return []int{}, false
}
for key, value := range Map {
if X == value {
exists = true
keys = append(keys, key)
}
}
return keys, exists
}
// IsValueInMapIntFloat64 checks if a float64 (X) is among the map's values.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
// Returns a tuple of slice providing the map's keys that have this value, and a bool value (true if the returned slice is not empty).
func IsValueInMapIntFloat64(X float64, Map map[int]float64, Epsilon float64) ([]int, bool) {
var exists bool
keys := make([]int, 0)
if len(Map) == 0 {
return []int{}, false
}
for key, value := range Map {
if math.Abs(X-value) <= Epsilon {
exists = true
keys = append(keys, key)
}
}
return keys, exists
} | isvalueinmap.go | 0.862641 | 0.628635 | isvalueinmap.go | starcoder |
package export
import "github.com/prometheus/client_golang/prometheus"
// QueryCacheExporter contains all the Prometheus metrics that are possible to gather from the tranquillity service
type QueryCacheExporter struct {
DeltaNumEntries *prometheus.GaugeVec `description:"number of cache entries (since last emission)"`
DeltaSizeBytes *prometheus.GaugeVec `description:"size in bytes of cache entries (since last emission)"`
DeltaHits *prometheus.GaugeVec `description:"number of cache hits (since last emission)"`
DeltaMisses *prometheus.GaugeVec `description:"number of cache misses (since last emission)"`
DeltaEvictions *prometheus.GaugeVec `description:"number of cache evictions (since last emission)"`
DeltaHitRate prometheus.Counter `description:"cache hit rate (since last emission)"`
DeltaAverageBytes prometheus.Counter `description:"average cache entry byte size (since last emission)"`
DeltaTimeouts *prometheus.GaugeVec `description:"number of cache timeouts (since last emission)"`
DeltaErrors *prometheus.GaugeVec `description:"number of cache errors (since last emission)"`
DeltaPutOK prometheus.Counter `description:"number of new cache entries successfully cached (since last emission)"`
DeltaPutError prometheus.Counter `description:"number of new cache entries that could not be cached due to errors (since last emission)"`
DeltaPutOversized prometheus.Counter `description:"number of potential new cache entries that were skipped due to being too large (based on druid.{broker,historical,realtime}.cache.maxEntrySize properties) (since last emission)"`
TotalNumEntries *prometheus.GaugeVec `description:"number of cache entries"`
TotalSizeBytes *prometheus.GaugeVec `description:"size in bytes of cache entries"`
TotalHits *prometheus.GaugeVec `description:"number of cache hits"`
TotalMisses *prometheus.GaugeVec `description:"number of cache misses"`
TotalEvictions *prometheus.GaugeVec `description:"number of cache evictions"`
TotalHitRate prometheus.Counter `description:"cache hit rate"`
TotalAverageBytes prometheus.Counter `description:"average cache entry byte size"`
TotalTimeouts *prometheus.GaugeVec `description:"number of cache timeouts"`
TotalErrors *prometheus.GaugeVec `description:"number of cache errors"`
TotalPutOK prometheus.Counter `description:"number of new cache entries successfully cached"`
TotalPutError prometheus.Counter `description:"number of new cache entries that could not be cached due to errors"`
TotalPutOversized prometheus.Counter `description:"number of potential new cache entries that were skipped due to being too large (based on druid.{broker,historical,realtime}.cache.maxEntrySize properties)"`
}
// NewQueryCacheExporter returns a new Cache exporter object
func NewQueryCacheExporter() *QueryCacheExporter {
qc := &QueryCacheExporter{
DeltaAverageBytes: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_average_bytes",
Help: "average cache entry byte size (since last emission)",
ConstLabels: prometheus.Labels{
"cache": "delta-average-bytes",
},
}),
DeltaErrors: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_errors",
Help: "number of cache errors (since last emission)",
}, []string{}),
DeltaEvictions: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_evictions",
Help: "number of cache evictions (since last emission)",
}, []string{}),
DeltaHitRate: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_hitrate",
Help: "cache hit rate (since last emission)",
ConstLabels: prometheus.Labels{
"cache": "delta-hitrate",
},
}),
DeltaHits: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_hits",
Help: "number of cache hits (since last emission)",
}, []string{}),
DeltaMisses: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_misses",
Help: "number of cache misses (since last emission)",
}, []string{}),
DeltaNumEntries: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_num_entries",
Help: "number of cache entries (since last emission)",
}, []string{}),
DeltaPutError: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_put_error",
Help: "number of new cache entries that could not be cached due to errors (since last emission)",
ConstLabels: prometheus.Labels{
"cache": "delta-put-error",
},
}),
DeltaPutOK: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_put_ok",
Help: "number of new cache entries successfully cached (since last emission)",
ConstLabels: prometheus.Labels{
"cache": "delta-put-ok",
},
}),
DeltaPutOversized: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_put_oversized",
Help: "number of potential new cache entries that were skipped due to being too large (based on druid.{broker,historical,realtime}.cache.maxEntrySize properties) (since last emission)",
ConstLabels: prometheus.Labels{
"cache": "delta-put-oversized",
},
}),
DeltaSizeBytes: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_size_bytes",
Help: "size in bytes of cache entries (since last emission)",
}, []string{}),
DeltaTimeouts: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "delta_timeouts",
Help: "number of cache timeouts (since last emission)",
}, []string{}),
TotalAverageBytes: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_average_bytes",
Help: "average cache entry byte size",
ConstLabels: prometheus.Labels{
"cache": "total-average-bytes",
},
}),
TotalErrors: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_errors",
Help: "number of cache errors",
}, []string{}),
TotalEvictions: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_evictions",
Help: "number of cache evictions",
}, []string{}),
TotalHitRate: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_hitrate",
Help: "cache hit rate",
ConstLabels: prometheus.Labels{
"cache": "total-hitrate",
},
}),
TotalHits: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_hits",
Help: "number of cache hits",
}, []string{}),
TotalMisses: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_misses",
Help: "number of cache misses",
}, []string{}),
TotalNumEntries: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_num_entries",
Help: "number of cache entries",
}, []string{}),
TotalPutError: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_put_error",
Help: "number of new cache entries that could not be cached due to errors",
ConstLabels: prometheus.Labels{
"cache": "total-put-error",
},
}),
TotalPutOK: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_put_ok",
Help: "number of new cache entries successfully cached",
ConstLabels: prometheus.Labels{
"cache": "total-put-ok",
},
}),
TotalSizeBytes: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_size_bytes",
Help: "size in bytes of cache entries",
}, []string{}),
TotalTimeouts: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_timeouts",
Help: "number of cache timeouts",
}, []string{}),
TotalPutOversized: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: "druid",
Subsystem: "cache",
Name: "total_put_oversized",
Help: "number of potential new cache entries that were skipped due to being too large (based on druid.{broker,historical,realtime}.cache.maxEntrySize properties)",
ConstLabels: prometheus.Labels{
"cache": "total-put-oversized",
},
}),
}
// register all the prometheus metrics
prometheus.MustRegister(qc.DeltaNumEntries)
prometheus.MustRegister(qc.DeltaSizeBytes)
prometheus.MustRegister(qc.DeltaHits)
prometheus.MustRegister(qc.DeltaMisses)
prometheus.MustRegister(qc.DeltaEvictions)
prometheus.MustRegister(qc.DeltaHitRate)
prometheus.MustRegister(qc.DeltaAverageBytes)
prometheus.MustRegister(qc.DeltaTimeouts)
prometheus.MustRegister(qc.DeltaErrors)
prometheus.MustRegister(qc.DeltaPutOK)
prometheus.MustRegister(qc.DeltaPutError)
prometheus.MustRegister(qc.DeltaPutOversized)
prometheus.MustRegister(qc.TotalNumEntries)
prometheus.MustRegister(qc.TotalSizeBytes)
prometheus.MustRegister(qc.TotalHits)
prometheus.MustRegister(qc.TotalMisses)
prometheus.MustRegister(qc.TotalEvictions)
prometheus.MustRegister(qc.TotalHitRate)
prometheus.MustRegister(qc.TotalAverageBytes)
prometheus.MustRegister(qc.TotalTimeouts)
prometheus.MustRegister(qc.TotalErrors)
prometheus.MustRegister(qc.TotalPutOK)
prometheus.MustRegister(qc.TotalPutError)
prometheus.MustRegister(qc.TotalPutOversized)
return qc
}
// SetDeltaNumEntries .
func (qc *QueryCacheExporter) SetDeltaNumEntries(val float64) {
qc.DeltaNumEntries.WithLabelValues().Add(val)
}
// SetDeltaSizeBytes .
func (qc *QueryCacheExporter) SetDeltaSizeBytes(val float64) {
qc.DeltaSizeBytes.WithLabelValues().Add(val)
}
// SetDeltaHits .
func (qc *QueryCacheExporter) SetDeltaHits(val float64) {
qc.DeltaHits.WithLabelValues().Add(val)
}
// SetDeltaMisses .
func (qc *QueryCacheExporter) SetDeltaMisses(val float64) {
qc.DeltaMisses.WithLabelValues().Add(val)
}
// SetDeltaEvictions .
func (qc *QueryCacheExporter) SetDeltaEvictions(val float64) {
qc.DeltaEvictions.WithLabelValues().Add(val)
}
// SetDeltaHitRate .
func (qc *QueryCacheExporter) SetDeltaHitRate(val float64) {
qc.DeltaHitRate.Add(val)
}
// SetDeltaAverageBytes .
func (qc *QueryCacheExporter) SetDeltaAverageBytes(val float64) {
qc.DeltaAverageBytes.Add(val)
}
// SetDeltaTimeouts .
func (qc *QueryCacheExporter) SetDeltaTimeouts(val float64) {
qc.DeltaTimeouts.WithLabelValues().Add(val)
}
// SetDeltaErrors .
func (qc *QueryCacheExporter) SetDeltaErrors(val float64) {
qc.DeltaErrors.WithLabelValues().Add(val)
}
// SetDeltaPutOK .
func (qc *QueryCacheExporter) SetDeltaPutOK(val float64) {
qc.DeltaPutOK.Add(val)
}
// SetDeltaPutError .
func (qc *QueryCacheExporter) SetDeltaPutError(val float64) {
qc.DeltaPutError.Add(val)
}
// SetDeltaPutOversized .
func (qc *QueryCacheExporter) SetDeltaPutOversized(val float64) {
qc.DeltaPutOversized.Add(val)
}
// SetTotalNumEntries .
func (qc *QueryCacheExporter) SetTotalNumEntries(val float64) {
qc.TotalNumEntries.WithLabelValues().Add(val)
}
// SetTotalSizeBytes .
func (qc *QueryCacheExporter) SetTotalSizeBytes(val float64) {
qc.TotalSizeBytes.WithLabelValues().Add(val)
}
// SetTotalHits .
func (qc *QueryCacheExporter) SetTotalHits(val float64) {
qc.TotalHits.WithLabelValues().Add(val)
}
// SetTotalMisses .
func (qc *QueryCacheExporter) SetTotalMisses(val float64) {
qc.TotalMisses.WithLabelValues().Add(val)
}
// SetTotalEvictions .
func (qc *QueryCacheExporter) SetTotalEvictions(val float64) {
qc.TotalEvictions.WithLabelValues().Add(val)
}
// SetTotalHitRate .
func (qc *QueryCacheExporter) SetTotalHitRate(val float64) {
qc.TotalHitRate.Add(val)
}
// SetTotalAverageBytes .
func (qc *QueryCacheExporter) SetTotalAverageBytes(val float64) {
qc.TotalAverageBytes.Add(val)
}
// SetTotalTimeouts .
func (qc *QueryCacheExporter) SetTotalTimeouts(val float64) {
qc.TotalTimeouts.WithLabelValues().Add(val)
}
// SetTotalErrors .
func (qc *QueryCacheExporter) SetTotalErrors(val float64) {
qc.TotalErrors.WithLabelValues().Add(val)
}
// SetTotalPutOK .
func (qc *QueryCacheExporter) SetTotalPutOK(val float64) {
qc.TotalPutOK.Add(val)
}
// SetTotalPutError .
func (qc *QueryCacheExporter) SetTotalPutError(val float64) {
qc.TotalPutError.Add(val)
}
// SetTotalPutOversized .
func (qc *QueryCacheExporter) SetTotalPutOversized(val float64) {
qc.TotalPutOversized.Add(val)
} | pkg/export/query_cache.go | 0.682997 | 0.491456 | query_cache.go | starcoder |
package cloudwatcher
import (
"gonum.org/v1/gonum/floats" // Float math tools.
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch"
"go.uber.org/zap" // Logging
)
// AggregationData represents a container for some data series
// about Elasticsearch nodes, that can be converted
// into a CloudWatch Metrics data point.
type AggregationData interface {
// AddSample adds the given node to the data series
AddSample(*NodeStats)
// Datum returns the aggregated data as a CloudWatch
// Metrics data point.
Datum() *cloudwatch.MetricDatum
}
// SelectorFn returns some aspect of a NodeStats to
// be aggregated. If the node shouldn't count towards
// a data set, the selector function should return nil.
type SelectorFn func(*NodeStats) *float64
// StatsData aggregates count, min, max, and sum of the samples.
type StatsData struct {
Template cloudwatch.MetricDatum
Selector SelectorFn
data []float64
}
var _ AggregationData = (*StatsData)(nil) // Assert StatsData implements the AggregationData interface.
func (d *StatsData) AddSample(ns *NodeStats) {
if f := d.Selector(ns); f != nil {
d.data = append(d.data, *f)
}
}
func (d *StatsData) Datum() *cloudwatch.MetricDatum {
if len(d.data) == 0 {
return nil
}
m := d.Template
if len(d.data) != 0 {
m.StatisticValues = &cloudwatch.StatisticSet{
SampleCount: aws.Float64(float64(len(d.data))),
Minimum: aws.Float64(floats.Min(d.data)),
Maximum: aws.Float64(floats.Max(d.data)),
Sum: aws.Float64(floats.Sum(d.data)),
}
}
return &m
}
// SumData aggregates a simple sum of samples.
type SumData struct {
Template cloudwatch.MetricDatum
Selector SelectorFn
data []float64
}
var _ AggregationData = (*SumData)(nil) // Assert SumData implements the AggregationData interface.
func (d *SumData) AddSample(ns *NodeStats) {
if f := d.Selector(ns); f != nil {
d.data = append(d.data, *f)
}
}
func (d *SumData) Datum() *cloudwatch.MetricDatum {
if len(d.data) == 0 {
return nil
}
m := d.Template
if len(d.data) != 0 {
m.Value = aws.Float64(floats.Sum(d.data))
}
return &m
}
// UtilizationData aggregates a percentage representing the
// utilization of some resource based on numerator and denominator.
// For example, disk utilization == bytes used / bytes total.
type UtilizationData struct {
Template cloudwatch.MetricDatum
Numerator SelectorFn
Denominator SelectorFn
num []float64
denom []float64
}
var _ AggregationData = (*UtilizationData)(nil) // Assert UtilizationData implements the AggregationData interface.
func (d *UtilizationData) AddSample(ns *NodeStats) {
num, denom := d.Numerator(ns), d.Denominator(ns)
if num != nil && denom != nil {
d.num = append(d.num, *num)
d.denom = append(d.denom, *denom)
}
}
func (d *UtilizationData) Datum() *cloudwatch.MetricDatum {
if len(d.num) == 0 {
return nil
}
m := d.Template
num, denom := floats.Sum(d.num), floats.Sum(d.denom)
if denom != 0 {
m.Value = aws.Float64((num / denom) * 100) // CloudWatch percents are int 0-100.
} else {
zap.L().Panic("Denominator is zero: cannot calculate UtilizationData Value")
}
return &m
} | v2/internal/app/cloudwatcher/aggregation.go | 0.770551 | 0.403684 | aggregation.go | starcoder |
package value
import (
"fmt"
"time"
)
// From creates a new Value variable from a given base value
func From(v interface{}) (ret Value) {
switch x := v.(type) {
case bool:
p := new(bool)
*p = x
return BindBool(p)
case *bool:
p := new(bool)
*p = *x
return BindBool(p)
case byte:
p := new(byte)
*p = x
return BindByte(p)
case *byte:
p := new(byte)
*p = *x
return BindByte(p)
case complex64:
p := new(complex64)
*p = x
return BindComplex64(p)
case *complex64:
p := new(complex64)
*p = *x
return BindComplex64(p)
case complex128:
p := new(complex128)
*p = x
return BindComplex128(p)
case *complex128:
p := new(complex128)
*p = *x
return BindComplex128(p)
case float32:
p := new(float32)
*p = x
return BindFloat32(p)
case *float32:
p := new(float32)
*p = *x
return BindFloat32(p)
case float64:
p := new(float64)
*p = x
return BindFloat64(p)
case *float64:
p := new(float64)
*p = *x
return BindFloat64(p)
case int:
p := new(int)
*p = x
return BindInt(p)
case *int:
p := new(int)
*p = *x
return BindInt(p)
case int8:
p := new(int8)
*p = x
return BindInt8(p)
case *int8:
p := new(int8)
*p = *x
return BindInt8(p)
case int16:
p := new(int16)
*p = x
return BindInt16(p)
case *int16:
p := new(int16)
*p = *x
return BindInt16(p)
case int32:
p := new(int32)
*p = x
return BindInt32(p)
case *int32:
p := new(int32)
*p = *x
return BindInt32(p)
case int64:
p := new(int64)
*p = x
return BindInt64(p)
case *int64:
p := new(int64)
*p = *x
return BindInt64(p)
case string:
p := new(string)
*p = x
return BindString(p)
case *string:
p := new(string)
*p = *x
return BindString(p)
case time.Duration:
p := new(time.Duration)
*p = x
return BindDuration(p)
case *time.Duration:
p := new(time.Duration)
*p = *x
return BindDuration(p)
case time.Time:
p := new(time.Time)
*p = x
return BindTime(p)
case *time.Time:
p := new(time.Time)
*p = *x
return BindTime(p)
case uint:
p := new(uint)
*p = x
return BindUint(p)
case *uint:
p := new(uint)
*p = *x
return BindUint(p)
case uint16:
p := new(uint16)
*p = x
return BindUint16(p)
case *uint16:
p := new(uint16)
*p = *x
return BindUint16(p)
case uint32:
p := new(uint32)
*p = x
return BindUint32(p)
case *uint32:
p := new(uint32)
*p = *x
return BindUint32(p)
case uint64:
p := new(uint64)
*p = x
return BindUint64(p)
case *uint64:
p := new(uint64)
*p = *x
return BindUint64(p)
default:
err := fmt.Errorf("cannot convert object type %T to a Value", x)
panic(err)
}
}
// Bind a base value to a Value variable
func Bind(v interface{}) (ret Value) {
switch x := v.(type) {
case *bool:
return BindBool(x)
case *byte:
return BindByte(x)
case *complex64:
return BindComplex64(x)
case *complex128:
return BindComplex128(x)
case *float32:
return BindFloat32(x)
case *float64:
return BindFloat64(x)
case *int:
return BindInt(x)
case *int8:
return BindInt8(x)
case *int16:
return BindInt16(x)
case *int32:
return BindInt32(x)
case *int64:
return BindInt64(x)
case *string:
return BindString(x)
case *time.Duration:
return BindDuration(x)
case *time.Time:
return BindTime(x)
case *uint:
return BindUint(x)
case *uint16:
return BindUint16(x)
case *uint32:
return BindUint32(x)
case *uint64:
return BindUint64(x)
default:
err := fmt.Errorf("cannot bind object type %T to a Value (is it a pointer?)", x)
panic(err)
}
} | factory.go | 0.535098 | 0.507202 | factory.go | starcoder |
package rfc5424
import (
"fmt"
"strings"
)
// StructuredData holds the structured data of a log record, if any.
type StructuredData []StructuredDataElement
// String returns the RFC 5424 representation of the structured data.
func (sd StructuredData) String() string {
if len(sd) == 0 {
return "-"
}
elems := make([]string, len(sd))
for i, elem := range sd {
elems[i] = structuredDataElementString(elem)
}
return strings.Join(elems, "")
}
// Validate ensures that the structured data is correct.
func (sd StructuredData) Validate() error {
for i, elem := range sd {
if err := structuredDataElementValidate(elem); err != nil {
return fmt.Errorf("element %d not valid: %v", i, err)
}
}
return nil
}
// StructuredDataElement, AKA "SD-ELEMENT", provides the functionality
// that StructuredData needs from each of its elements.
type StructuredDataElement interface {
// ID returns the "SD-ID" for the element.
ID() StructuredDataName
// Params returns all the elements items (if any), in order.
Params() []StructuredDataParam
// Validate ensures that the element is correct.
Validate() error
}
func structuredDataElementString(sde StructuredDataElement) string {
params := sde.Params()
if len(params) == 0 {
return fmt.Sprintf("[%s]", sde.ID())
}
paramStrs := make([]string, len(params))
for i, param := range params {
paramStrs[i] = param.String()
}
return fmt.Sprintf("[%s %s]", sde.ID(), strings.Join(paramStrs, " "))
}
func structuredDataElementValidate(sde StructuredDataElement) error {
if err := sde.Validate(); err != nil {
return err
}
id := sde.ID()
if id == "" {
return fmt.Errorf("empty ID")
}
if err := id.Validate(); err != nil {
return fmt.Errorf("invalid ID %q: %v", id, err)
}
for i, param := range sde.Params() {
if err := param.Validate(); err != nil {
return fmt.Errorf("param %d not valid: %v", i, err)
}
}
return nil
}
// StructuredDataName is a single name used in an element or its params.
type StructuredDataName string
// Validate ensures that the name is correct.
func (sdn StructuredDataName) Validate() error {
if sdn == "" {
return fmt.Errorf("empty name")
}
if strings.ContainsAny(string(sdn), `= ]"`) {
return fmt.Errorf(`invalid character`)
}
return validatePrintUSASCII(string(sdn), 32)
}
// StructuredDataParam, AKA "SD-PARAM", is a single item in an element's list.
type StructuredDataParam struct {
// Name identifies the item relative to an element. Note that an
// element may have more than one item with the same name.
Name StructuredDataName
// Value is the value associated with the item.
Value StructuredDataParamValue
}
// String returns the RFC 5424 representation of the item.
func (sdp StructuredDataParam) String() string {
return fmt.Sprintf("%s=%q", sdp.Name, sdp.Value)
}
// Validated ensures that the item is correct.
func (sdp StructuredDataParam) Validate() error {
if sdp.Name == "" {
return fmt.Errorf("empty Name")
}
if err := sdp.Name.Validate(); err != nil {
return fmt.Errorf("bad Name %q: %v", sdp.Name, err)
}
if err := sdp.Value.Validate(); err != nil {
return fmt.Errorf("bad Value for %q (%s): %v", sdp.Name, sdp.Value, err)
}
return nil
}
// StructuredDataParamValue is the value of a single element item.
type StructuredDataParamValue string // RFC 3629
// String returns the RFC 5424 representation of the value. In
// particular, it escapes \, ", and ].
func (sdv StructuredDataParamValue) String() string {
str := string(sdv)
for _, char := range []string{`\`, `"`, `]`} {
str = strings.Replace(str, char, `\`+char, -1)
}
return str
}
// Validate ensures that the value is correct.
func (sdv StructuredDataParamValue) Validate() error {
return validateUTF8(string(sdv))
} | cluster-autoscaler/vendor/github.com/juju/rfc/v2/rfc5424/structureddata.go | 0.756627 | 0.420778 | structureddata.go | starcoder |
package velocypack
import (
"reflect"
"time"
)
// Value is a helper structure used to build VPack structures.
// It holds a single data value with a specific type.
type Value struct {
vt ValueType
data interface{}
unindexed bool
}
// NewValue creates a new Value with type derived from Go type of given value.
// If the given value is not a supported type, a Value of type Illegal is returned.
func NewValue(value interface{}) Value {
v := reflect.ValueOf(value)
return NewReflectValue(v)
}
// NewReflectValue creates a new Value with type derived from Go type of given reflect value.
// If the given value is not a supported type, a Value of type Illegal is returned.
func NewReflectValue(v reflect.Value) Value {
vt := v.Type()
switch vt.Kind() {
case reflect.Bool:
return NewBoolValue(v.Bool())
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return NewIntValue(v.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return NewUIntValue(v.Uint())
case reflect.Float32, reflect.Float64:
return NewDoubleValue(v.Float())
case reflect.String:
return NewStringValue(v.String())
case reflect.Slice:
if vt.Elem().Kind() == reflect.Uint8 {
}
}
if v.CanInterface() {
raw := v.Interface()
if v, ok := raw.([]byte); ok {
return NewBinaryValue(v)
}
if v, ok := raw.(Slice); ok {
return NewSliceValue(v)
}
if v, ok := raw.(time.Time); ok {
return NewUTCDateValue(v)
}
if v, ok := raw.(Value); ok {
return v
}
}
return Value{Illegal, nil, false}
}
// NewBoolValue creates a new Value of type Bool with given value.
func NewBoolValue(value bool) Value {
return Value{Bool, value, false}
}
// NewIntValue creates a new Value of type Int with given value.
func NewIntValue(value int64) Value {
if value >= -6 && value <= 9 {
return Value{SmallInt, value, false}
}
return Value{Int, value, false}
}
// NewUIntValue creates a new Value of type UInt with given value.
func NewUIntValue(value uint64) Value {
return Value{UInt, value, false}
}
// NewDoubleValue creates a new Value of type Double with given value.
func NewDoubleValue(value float64) Value {
return Value{Double, value, false}
}
// NewStringValue creates a new Value of type String with given value.
func NewStringValue(value string) Value {
return Value{String, value, false}
}
// NewBinaryValue creates a new Value of type Binary with given value.
func NewBinaryValue(value []byte) Value {
return Value{Binary, value, false}
}
// NewUTCDateValue creates a new Value of type UTCDate with given value.
func NewUTCDateValue(value time.Time) Value {
return Value{UTCDate, value, false}
}
// NewSliceValue creates a new Value of from the given slice.
func NewSliceValue(value Slice) Value {
return Value{value.Type(), value, false}
}
// NewObjectValue creates a new Value that opens a new object.
func NewObjectValue(unindexed ...bool) Value {
return Value{Object, nil, optionalBool(unindexed, false)}
}
// NewArrayValue creates a new Value that opens a new array.
func NewArrayValue(unindexed ...bool) Value {
return Value{Array, nil, optionalBool(unindexed, false)}
}
// NewNullValue creates a new Value of type Null.
func NewNullValue() Value {
return Value{Null, nil, false}
}
// NewMinKeyValue creates a new Value of type MinKey.
func NewMinKeyValue() Value {
return Value{MinKey, nil, false}
}
// NewMaxKeyValue creates a new Value of type MaxKey.
func NewMaxKeyValue() Value {
return Value{MaxKey, nil, false}
}
// Type returns the ValueType of this value.
func (v Value) Type() ValueType {
return v.vt
}
// IsSlice returns true when the value already contains a slice.
func (v Value) IsSlice() bool {
_, ok := v.data.(Slice)
return ok
}
// IsIllegal returns true if the type of value is Illegal.
func (v Value) IsIllegal() bool {
return v.vt == Illegal
}
func (v Value) boolValue() bool {
return v.data.(bool)
}
func (v Value) intValue() int64 {
return v.data.(int64)
}
func (v Value) uintValue() uint64 {
return v.data.(uint64)
}
func (v Value) doubleValue() float64 {
return v.data.(float64)
}
func (v Value) stringValue() string {
return v.data.(string)
}
func (v Value) binaryValue() []byte {
return v.data.([]byte)
}
func (v Value) utcDateValue() int64 {
time := v.data.(time.Time)
sec := time.Unix()
nsec := int64(time.Nanosecond())
return sec*1000 + nsec/1000000
}
func (v Value) sliceValue() Slice {
return v.data.(Slice)
} | deps/github.com/arangodb/go-velocypack/value.go | 0.830078 | 0.630173 | value.go | starcoder |
package rule
import (
"regexp"
"github.com/insidersec/insider/engine"
)
var AndroidRules []engine.Rule = []engine.Rule{
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`), regexp.MustCompile(`Cipher\.getInstance\(\s*"RSA/.+/NoPadding`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`)},
CWE: "CWE-780",
AverageCVSS: 5.9,
Description: "This application uses RSA encryption without OAEP (Optimal Asymmetric Encryption Padding), OAEP has been standardized as PKCS # 1 v2 and RFC 2437. The padding scheme makes the operation 'semantically secure' and prevents some types of attacks, which would use the lack of padding as an attack vector.",
Recomendation: "It is recommended to use RSA in conjunction with OAEP, the RSA-OAEP method makes padding attacks much more complex and often unviable.",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`), regexp.MustCompile(`Cipher\.getInstance\(`), regexp.MustCompile(`"DES"`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`)},
CWE: "CWE-327",
AverageCVSS: 5.9,
Description: "DES (Data Encryption Standard) is a symmetric key cryptographic algorithm. Its 56-bit key makes it insecure for modern applications, it was developed in 1970, approved as a standard in 1976 and in 1977 the first vulnerability was discovered. Today it can be broken in about 2 days with a modern graphics card.",
Recomendation: "Whenever possible, the use of DES encryption should be avoided, the recommended encryption is AES (Advanced Encryption Standard) with 256 bits, which has been approved by the American security agency (NSA) for encrypting top secret information.",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`MODE_WORLD_WRITABLE|Context\.MODE_WORLD_WRITABLE`), regexp.MustCompile(`openFileOutput\(\s*".+"\s*,\s*2\s*\)`)},
CWE: "CWE-276",
AverageCVSS: 6,
Description: "The file is 'World Readable'. Any application can read the file.",
Recomendation: "According to official Google documentation, MODE_WORLD_WRITABLE mode is deprecated. It is recommended to use MODE_PRIVATE.",
},
Rule{
ExactMatch: regexp.MustCompile(`openFileOutput\(\s*".+"\s*,\s*3\s*\)`),
CWE: "CWE-276",
AverageCVSS: 6,
Description: "The file is 'World Readable' and 'World Writable'. Any application can read the file.",
Recomendation: "According to official Google documentation, both MODE_WORLD_WRITABLE and MODE_WORLD_READABLE modes are depreciated. It is recommended to use MODE_PRIVATE.",
},
Rule{
ExactMatch: regexp.MustCompile(`MessageDigest\.getInstance\("*MD5"*\)|MessageDigest\.getInstance\("*md5"*\)|DigestUtils\.md5\(`),
CWE: "CWE-327",
AverageCVSS: 7.4,
Description: "MD5 is a hash algorithm considered weak and can return the same result for two different contents, which can cause collisions and in extreme cases it can cause a security breach. https://en.wikipedia.org/wiki/Collision_resistance",
Recomendation: "It is recommended to use some CHF (Cryptographic Hash Function), which is mathematically strong and not reversible. SHA512 would be the most recommended hash for storing the password and it is also important to adopt some type of Salt, so that the Hash is more secure.",
},
Rule{
ExactMatch: regexp.MustCompile(`getInstance("md4")|getInstance("rc2")|getInstance("rc4")|getInstance("RC4")|getInstance("RC2")|getInstance("MD4")`),
CWE: "CWE-327",
AverageCVSS: 7.4,
Description: "MD4 is a hash algorithm considered weak and can return the same result for two different contents, which can cause collisions and in extreme cases it can cause a security breach. https://en.wikipedia.org/wiki/Collision_resistance",
Recomendation: "It is recommended to use some CHF (Cryptographic Hash Function), which is mathematically strong and not reversible. SHA512 would be the most recommended hash for storing the password and it is also important to adopt some type of Salt, so that the Hash is more secure.",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`.getExternalStorage`), regexp.MustCompile(`.getExternalFilesDir\(`)},
CWE: "CWE-276",
AverageCVSS: 5.5,
Description: "The application can read / write to external storage. External storage files can be modified by any application.",
Recomendation: "",
},
Rule{
ExactMatch: regexp.MustCompile(`\.createTempFile\(`),
CWE: "CWE-276",
AverageCVSS: 5.5,
Description: "The application creates a temporary file. Sensitive information should not be stored in temporary files.",
Recomendation: "",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00`), regexp.MustCompile(`0x01,0x02,0x03,0x04,0x05,0x06,0x07`)},
CWE: "CWE-329",
AverageCVSS: 9.8,
Description: "The Application uses weak Initialization Vectors (weak IVs in encryption), such as '0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 ' or '0x01,0x02,0x03,0x04,0x05,0x06 , 0x07 '. Failure to use random IVs makes the application vulnerable to dictionary attacks.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`onReceivedSslError\(WebView`), regexp.MustCompile(`\.proceed\(\);`), regexp.MustCompile(`webkit\.WebView`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`webkit\.WebView`)},
CWE: "CWE-295",
AverageCVSS: 7.4,
Description: "Insecure WebView implementation. WebView ignores SSL Certificate errors and accepts SSL. This application is vulnerable to MITM attacks.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`android\.database\.sqlite`), regexp.MustCompile(`execSQL\(|rawQuery\(`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`android\.database\.sqlite`)},
CWE: "CWE-89",
AverageCVSS: 5.9,
Description: "User input without validation can cause SQL Injection. All user input must be sanitized before performing the operation on the database.",
Recomendation: "Always validate user inputs before the server executes the query and reject requests that contain characters that are not strictly necessary.",
},
Rule{
ExactMatch: regexp.MustCompile(`SQLiteOpenHelper\.getWritableDatabase\(`),
CWE: "CWE-312",
AverageCVSS: 1,
Description: "The application uses SQL Cipher, but the key may be contained in the source code (hardcoded).",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`android\.app\.DownloadManager`), regexp.MustCompile(`getSystemService\(DOWNLOAD_SERVICE\)`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`android\.app\.DownloadManager`)},
CWE: "CWE-494",
AverageCVSS: 7.5,
Description: "The application downloads files using the Android Download Manager.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.setWebContentsDebuggingEnabled\(true\)`), regexp.MustCompile(`webkit\.WebView`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`webkit\.WebView`)},
CWE: "CWE-215",
AverageCVSS: 5.4,
Description: "Remote WebView debugging is enabled.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`webkit\.WebView`), regexp.MustCompile(`(setJavaScriptEnabled\(true\))|(.addJavascriptInterface\()`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`webkit\.WebView`)},
CWE: "CWE-749",
AverageCVSS: 8.8,
Description: "Insecure WebView implementation. User-controlled code execution is a security hole.",
Recomendation: "",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`content\.ClipboardManager`), regexp.MustCompile(`CLIPBOARD_SERVICE`), regexp.MustCompile(`ClipboardManager`)},
CWE: "CWE-200",
AverageCVSS: 5,
Description: "The classes in this file write or read data on the Clipboard. The transfer area is shared between all apps so attention is needed to data that is placed in this resource.",
Recomendation: "",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`net\.JarURLConnection`), regexp.MustCompile(`JarURLConnection`), regexp.MustCompile(`jar:((?:http|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)`)},
CWE: "CWE-611",
AverageCVSS: 5,
Description: "This code uses jar url, this functionality could generate an XML vulnerability External Entities (XXE), XXE is listed as fourth place (A4) in the Top 10 OWASP 2017",
Recomendation: "It is recommended to avoid using url jar when possible, there are other safer methods that can be used to consult the jar file.",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`), regexp.MustCompile(`Cipher\.getInstance\(`), regexp.MustCompile(`"AES"`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`)},
CWE: "CWE-326",
AverageCVSS: 5,
Description: "DES is a hash algorithm that is considered weak and can cause hash collisions.",
Recomendation: "It is always recommended to use some CHF (Cryptographic Hash Function), which is mathematically strong and not reversible. SHA512 would be the most recommended hash for storing the password and it is also important to adopt some type of Salt, so that the Hash is more secure.",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.net\.ssl`), regexp.MustCompile(`TrustAllSSLSocket-Factory|AllTrustSSLSocketFactory|NonValidatingSSLSocketFactory|net\.SSLCertificateSocketFactory|ALLOW_ALL_HOSTNAME_VERIFIER|\.setDefaultHostnameVerifier\(|NullHostnameVerifier\(`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.net\.ssl`)},
CWE: "CWE-295",
AverageCVSS: 7.4,
Description: "Insecure implementation of SSL. Trusting any certificate or accepting self-signed certificates can cause a serious security breach, making the application vulnerable to MITM (Man In The Middle) attacks.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.loadUrl\(`), regexp.MustCompile(`webkit\.WebView`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`webkit\.WebView`)},
CWE: "CWE-919",
AverageCVSS: 5,
Description: "The WebView loads files from external storage. External storage files can be modified by any application.",
Recomendation: "",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`MODE_WORLD_READABLE|Context\.MODE_WORLD_READABLE`), regexp.MustCompile(`openFileOutput\(\s*".+"\s*,\s*1\s*\)`)},
CWE: "CWE-276",
AverageCVSS: 4,
Description: "The file is 'World Readable'. Any application can read the file.",
Recomendation: "According to official Google documentation, MODE_WORLD_READABLE mode is deprecated. It is recommended to use MODE_PRIVATE. In case your application needs to share private files with other applications, you must use a FileProvider with the attribute FLAG_GRANT_READ_URI_PERMISSION.",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`), regexp.MustCompile(`Cipher\.getInstance\(`), regexp.MustCompile(`"ECB"`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`)},
CWE: "CWE-327",
AverageCVSS: 5.9,
Description: "The application uses ECB as the 'block mode' of the encryption algorithm. ECB is considered a weak cryptographic algorithm, as it results in the same cipher for identical blocks of plain text.",
Recomendation: "Whenever possible, avoid using the ECB mode, as it is predictable and can be broken with attacks such as frequency analysis. We recommend the use of Authenticated Encription (AE) and Authenticated Encryption with Associated Data (AEAD), which ensure both the confidentiality and authenticity of the data. Some recommended modes are GCM, EAX and OCB, CBC mode can also be used in combination with HMAC message authentication.",
},
Rule{
ExactMatch: regexp.MustCompile(`Runtime\.getRuntime\(`),
CWE: "CWE-78",
AverageCVSS: 9,
Description: "The application executes Commands directly on the Operating System. If using any user input, it must be sanitized to the maximum, cleaning any unnecessary characters. In general, it is recommended to never use calls to native commands, being recommended the JNI (Java Native Interface) for such low level operations.",
Recomendation: "",
},
Rule{
ExactMatch: regexp.MustCompile(`setVisibility\(View\.GONE\)|setVisibility\(View\.INVISIBLE\)`),
CWE: "CWE-919",
AverageCVSS: 4.3,
Description: "Invisible elements in the view can be used to hide data from the user, but can still be leaked.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`dalvik\.system\.PathClassLoader|dalvik\.system\.DexFile|dalvik\.system\.DexPathList|dalvik\.system\.DexClassLoader|java\.security\.ClassLoader|java\.net\.URLClassLoader|java\.security\.SecureClassLoader`), regexp.MustCompile(`loadDex|loadClass|DexClassLoader|loadDexFile`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`dalvik\.system\.PathClassLoader|dalvik\.system\.DexFile|dalvik\.system\.DexPathList|dalvik\.system\.DexClassLoader|java\.security\.ClassLoader|java\.net\.URLClassLoader|java\.security\.SecureClassLoader`)},
CWE: "CWE-695",
AverageCVSS: 4,
Description: "The application loads and / or manipulates Dex files (Dexloading and dynamic classes). ",
Recomendation: "It is not recommended to use APIs with low level of manipulation as this can facilitate the injection of code within the application. If the intention is to obfuscate the code, it is always important to take care of your obfuscator settings so as not to allow the loading of static files, which can easily be replaced by a forged file.",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`com\.fasterxml\.jackson\.databind\.ObjectMapper`), regexp.MustCompile(`\.enableDefaultTyping\(`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`com\.fasterxml\.jackson\.databind\.ObjectMapper`)},
CWE: "CWE-502",
AverageCVSS: 7.5,
Description: "The app uses jackson deserialization libraryDeserialization of untrusted input can result inarbitary code execution.",
Recomendation: "",
},
Rule{
ExactMatch: regexp.MustCompile(`MODE_PRIVATE|Context\.MODE_PRIVATE`),
CWE: "CWE-919",
AverageCVSS: 5,
Description: "App can write to App Directory. Sensitive Information should be encrypted.",
Recomendation: "",
},
Rule{
Or: []*regexp.Regexp{regexp.MustCompile(`com\.noshufou\.android\.su`), regexp.MustCompile(`com\.thirdparty\.superuser`), regexp.MustCompile(`eu\.chainfire\.supersu`), regexp.MustCompile(`com\.koushikdutta\.superuser`), regexp.MustCompile(`eu\.chainfire\.`)},
CWE: "CWE-250",
AverageCVSS: 7.5,
Description: "This application has packages to access root privileges (Super User). A Super User on the user's device can do absolutely anything, making the application very powerful and possibly facilitating malicious actions. etc.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`WebView`), regexp.MustCompile(`loadData\(`), regexp.MustCompile(`android\.webkit`)},
NotOr: []*regexp.Regexp{regexp.MustCompile(`WebView`), regexp.MustCompile(`android\.webkit`)},
CWE: "CWE-749",
AverageCVSS: 8,
Description: "WebView request via GET. The Android WebViews API is very sensitive because it allows resources coming from the network to access data only available in the context of the application, making it easier for an attacker to execute Remote Code Execution, it is always necessary to ensure that the content sources presented in WebViews are encrypted and protected and user inputs are always sanitized.",
Recomendation: "",
},
Rule{
ExactMatch: regexp.MustCompile(`Log\.(v|d|i|w|e|f|s)|System\.out\.print|System\.err\.print`),
CWE: "CWE-532",
AverageCVSS: 3.2,
Description: "The App logs information. Sensitive information should not be logged.",
Recomendation: "",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`java\.util\.Random`), regexp.MustCompile(`Random\(\)`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`java\.util\.Random`)},
CWE: "CWE-330",
AverageCVSS: 1,
Description: "The application uses a predictable, therefore insecure, random number generator.",
Recomendation: "Instances of java.util.Random are not cryptographically secure. Consider instead using SecureRandom to get a cryptographically secure pseudo-random number generator for use by security-sensitive applications. https://docs.oracle.com/javase/8/docs/api/java/util/Random.html",
},
Rule{
ExactMatch: regexp.MustCompile(`Random\..*\(.*\)|Random\(.*\)|\(.*\).random\(\)`),
CWE: "CWE-330",
AverageCVSS: 1,
Description: "The application uses a predictable, therefore insecure, random number generator.",
Recomendation: "Instances of java.util.Random are not cryptographically secure. Consider instead using SecureRandom to get a cryptographically secure pseudo-random number generator for use by security-sensitive applications. https://docs.oracle.com/javase/8/docs/api/java/util/Random.html",
},
Rule{
And: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`), regexp.MustCompile(`Cipher\.getInstance\(`), regexp.MustCompile(`"GCM"`)}, NotAnd: []*regexp.Regexp{regexp.MustCompile(`\.crypto\.Cipher`)},
CWE: "CWE-326",
AverageCVSS: 2,
Description: "The application uses GCM as the 'block mode' of the encryption algorithm. GCM is considered a secure cryptographic algorithm as long as each encrypted block is indistinguishable from a random permutation, otherwise, it would only increase the value of IV (Initialization Vector) by 1, so the keys become predictable, when analyzing a block with a sufficient number of messages encrypted with the same key and / or IV. It may be vulnerable to 'Stream Cipher Attack'.",
Recomendation: "Security depends on choosing a unique initialization vector (IV) for each encryption performed with the same key.",
},
Rule{
ExactMatch: regexp.MustCompile(`MessageDigest\.getInstance\("*SHA-1"*\)|MessageDigest\.getInstance\("*sha-1"*\)|DigestUtils\.sha\(`),
CWE: "CWE-327",
AverageCVSS: 5.9,
Description: "SHA-1 is a hash algorithm considered weak and can return the same result for two different contents, which can cause collisions and in extreme cases it can cause a security breach. https://en.wikipedia.org/wiki/Collision_resistance",
Recomendation: "It is recommended to use some CHF (Cryptographic Hash Function), which is mathematically strong and not reversible. SHA512 would be the most recommended hash for storing the password and it is also important to adopt some type of Salt, so that the Hash is more secure.",
},
} | rule/android.go | 0.604749 | 0.456834 | android.go | starcoder |
package graph
import (
"fmt"
)
const errInvaliVertexNumber = "graph: invalid vertex number %v"
// UndirectedGraph represents an undirected graph.
type UndirectedGraph struct {
bags [][]*vertex
}
type vertex struct {
i int
isVisited bool
}
// New returns a new undirected graph instance with v vertexs.
func New(v int) *UndirectedGraph {
return &UndirectedGraph{bags: make([][]*vertex, v)}
}
// Vertexs returns the number of vertexs of the graph.
func (g UndirectedGraph) Vertexs() int {
return len(g.bags)
}
// addEdge adds an edge to the graph.
func (g *UndirectedGraph) addEdge(v, w int) {
g.checkVertexs(v, w)
for _, b := range g.bags[v] {
if b.i == w {
panic(fmt.Errorf(errInvaliVertexNumber, w))
}
}
for _, b := range g.bags[w] {
if b.i == v {
panic(fmt.Errorf(errInvaliVertexNumber, v))
}
}
g.bags[v] = append(g.bags[v], &vertex{i: w})
g.bags[w] = append(g.bags[w], &vertex{i: v})
}
// GetAdjacentVertexs returns all adjacent vertexs of v in the graph.
func (g *UndirectedGraph) GetAdjacentVertexs(v int) []int {
if v < 0 || v >= len(g.bags) {
panic(fmt.Errorf(errInvaliVertexNumber, v))
}
vs := make([]int, len(g.bags[v]))
for i, ver := range g.bags[v] {
vs[i] = ver.i
}
return vs
}
// Degree returns the degree of the vertexs v.
func (g *UndirectedGraph) Degree(v int) int {
return len(g.GetAdjacentVertexs(v))
}
// MaxDegree returns the max degree of the graph.
func (g *UndirectedGraph) MaxDegree() int {
max := 0
for _, adj := range g.bags {
if len(adj) > max {
max = len(adj)
}
}
return max
}
// PathTo returns path from vertex v to w using breadth first traversal.
func (g *UndirectedGraph) PathTo(v, w int) ([]int, bool) {
g.checkVertexs(v, w)
if s, ok := g.pathTo(v, w, []int{v}); ok {
return s, true
}
return nil, false
}
func (g *UndirectedGraph) pathTo(v, w int, s []int) ([]int, bool) {
for _, n := range g.bags[v] {
if n.isVisited {
break
}
if n.i == w {
return append(s, w), true
}
n.isVisited = true
if p, ok := g.pathTo(n.i, w, append(s, n.i)); ok {
return p, true
}
}
return nil, false
}
// HasPathTo checks whether the graph has a path from vertex v to w.
func (g *UndirectedGraph) HasPathTo(v, w int) bool {
if _, ok := g.PathTo(v, w); ok {
return true
}
return false
}
func (g UndirectedGraph) checkVertexs(v, w int) {
if v < 0 || v >= len(g.bags) {
panic(fmt.Errorf(errInvaliVertexNumber, v))
}
if w < 0 || w >= len(g.bags) {
panic(fmt.Errorf(errInvaliVertexNumber, w))
}
} | graph/undirected_graph.go | 0.822118 | 0.505737 | undirected_graph.go | starcoder |
package mapperz
import (
"github.com/modfin/henry/compare"
"github.com/modfin/henry/exp/numberz"
"github.com/modfin/henry/slicez"
)
type IElm[E any] struct {
Element E
Index int
}
// Indexed wraps each element in a IElm struct and adds the current index in the slice of the element
// eg.
// slice := []int{1,2,3,4,5}
// everyOtherIndexed := slicez.Filter(slices.Indexed(slice), func(e slicez.IElm[int]) bool { return e.Index % 2 == 0})
// everyOther := slicez.Map(everyOtherIndexed, func(e slicez.IElm[int]) int { return e.Element})
// // [1 3 5]
func Indexed[A any](slice []A) []IElm[A] {
var index = -1
return slicez.Map(slice, func(a A) IElm[A] {
index++
return IElm[A]{
Element: a,
Index: index,
}
})
}
func Elements[A any](slice []IElm[A]) []A {
return slicez.Map(slice, func(a IElm[A]) A {
return a.Element
})
}
// Negate will return -1*n
func Negate[N compare.Number](slice []N) []N {
return slicez.Map(slice, numberz.Negate[N])
}
// Abs will return the absolut number of n
func Abs[N compare.Number](slice []N) []N {
return slicez.Map(slice, numberz.Abs[N])
}
// CastFloat64 will cast n to a float64
func CastFloat64[N compare.Number](slice []N) []float64 {
return slicez.Map(slice, numberz.CastFloat64[N])
}
// CastFloat32 will cast n to a float32
func CastFloat32[N compare.Number](slice []N) []float32 {
return slicez.Map(slice, numberz.CastFloat32[N])
}
// CastInt will cast n to a int
func CastInt[N compare.Number](slice []N) []int {
return slicez.Map(slice, numberz.CastInt[N])
}
// CastInt8 will cast n to a int8
func CastInt8[N compare.Number](slice []N) []int8 {
return slicez.Map(slice, numberz.CastInt8[N])
}
// CastInt16 will cast n to a int16
func CastInt16[N compare.Number](slice []N) []int16 {
return slicez.Map(slice, numberz.CastInt16[N])
}
// CastInt32 will cast n to a int32
func CastInt32[N compare.Number](slice []N) []int32 {
return slicez.Map(slice, numberz.CastInt32[N])
}
// CastInt64 will cast n to a int64
func CastInt64[N compare.Number](slice []N) []int64 {
return slicez.Map(slice, numberz.CastInt64[N])
}
// CastUInt will cast n to an uint
func CastUInt[N compare.Number](slice []N) []uint {
return slicez.Map(slice, numberz.CastUInt[N])
}
// CastUInt8 will cast n to an uint8
func CastUInt8[N compare.Number](slice []N) []uint8 {
return slicez.Map(slice, numberz.CastUInt8[N])
}
// CastUInt16 will cast n to an uint16
func CastUInt16[N compare.Number](slice []N) []uint16 {
return slicez.Map(slice, numberz.CastUInt16[N])
}
// CastUInt32 will cast n to an uint32
func CastUInt32[N compare.Number](slice []N) []uint32 {
return slicez.Map(slice, numberz.CastUInt32[N])
}
// CastUInt64 will cast n to an uint64
func CastUInt64[N compare.Number](slice []N) []uint64 {
return slicez.Map(slice, numberz.CastUInt64[N])
}
// CastByte will cast n to an byte
func CastByte[N compare.Number](slice []N) []byte {
return slicez.Map(slice, numberz.CastByte[N])
} | exp/mapperz/mapperz.go | 0.756178 | 0.492432 | mapperz.go | starcoder |
package retry
// Retrier is how a task should be retried
type Retrier interface {
// This attempts to retry the function passed into it.
// It will return nil if no error occurred, or an Errorer if at least 1 failure occurred.
// Scaling failure occurs when the function passed as a parameter returns a non-nil value
// @param test the function to test for failure. Return an error to indicate a failure, nil to indicate success
// @return nil if the function (eventually) succeeded, an Errorer containing a reference to the errors if not
This(test func(controller ServiceController) error) Errorer
}
// Service keeps the state of what to do when retrying things
type Service interface {
// ShouldTry returns an indication that another retry should be attempted
// Calling this method should NOT be interpreted as though a retry has occurred. Use Wait for that.
ShouldTry() bool
// Yield should trigger some sort of wait state (unless ShouldTry returns false) that causes the CPU to
// become free for a duration of time. Do NOT use this to count retries.
Yield()
// ServiceController returns the object that changes the service.
Controller() ServiceController
// NotifyRetry indicates to the service that a retry has occurred. This is used to track how many attempts of
// occurred. Do not use any other method as a proxy for this
NotifyRetry()
// NewErrorList Creates a new error list to which to add the errors encountered when performing the test
NewErrorList() ErrorAppender
}
// ServiceController controls the retry service
type ServiceController interface {
// Abort informs the service to no longer perform retries. Calling multiple times should have no additional effects.
// When this is called by developers, your service should return false when ShouldTry is called. This is useful if
// the error is not retryable.
Abort()
}
// Errorer contains the errors returned by the
type Errorer interface {
// Errors gets every error returned by each try
Errors() []error
// Error returns this error and all nested errors as a string, for easy debugging
Error() string
// Last gets the very last error message generated by a failure, or nil if no error was encountered
Last() error
}
// ErrorAppender allows the Errorer to be a facade so that callers of the retry methods don't see the Append method. This is only used internally, but if you wish to provide your own, custom Errorer, you'll need to satisfy this interface
type ErrorAppender interface {
Errorer
// Append adds an error to the list of errors
Append(err error)
} | interface.go | 0.681833 | 0.420421 | interface.go | starcoder |
package types
import (
"fmt"
xo "github.com/xo/xo/types"
"golang.org/x/exp/maps"
)
// sqlTypeConverter is an internal struct that contains data related to current
// conversion.
type sqlTypeConverter struct {
// Enums is a map of the enums' name to the underlying enum.
Enums map[string]*xo.Enum
// Target is the database driver the converter is targeting.
Target string
// idUsedCache stores the number of types the identifier has been used.
idUsedCache map[string]int
// idMappedCache stores the current mapped identifier.
idMappedCache map[string]string
}
// sqlType converts the provided Type to a xo.Type. It panics if provided an
// array type on non-Postgres databases.
func (s sqlTypeConverter) sqlType(goType Type) xo.Type {
if goType.IsArray {
if s.Target != "postgres" {
panic("unsupported database type for arrays: " + s.Target)
}
// Postgres supports arrays natively so re-run the code and treat as
// usual singular types except setting the array flag after.
goType.IsArray = false
singleTyp := s.sqlType(goType)
singleTyp.IsArray = true
return singleTyp
}
if s.Target == "sqlserver" && goType.Type == "string" {
// Check unique.
}
t, ok := typeMap[s.Target][goType.Type]
if ok {
return t
}
if goType.Type != "enum" {
panic("unexpected go type: " + goType.Type)
}
return s.buildEnum(goType)
}
// buildEnum creates an enum type by assigning an appropriate type for the
// database and setting the Enum field. It panics if the Type provided is of
// unknown enum type.
func (s sqlTypeConverter) buildEnum(goType Type) xo.Type {
enumName := s.id(goType.EnumName)
var typ xo.Type
switch s.Target {
case "postgres", "mysql":
typ = buildType(enumName)
case "sqlite3", "sqlserver":
typ = buildType("int") // enum index
default:
panic("unsupported database type for enum creation: " + s.Target)
}
var ok bool
typ.Enum, ok = s.Enums[enumName]
if !ok {
panic(fmt.Sprintf(
"missing enum type for %q\n\tEnums present: %v",
enumName, maps.Keys(s.Enums),
))
}
return typ
}
// To use instead of true and false to aid with readability.
var (
single = false
array = true
)
var typeMap = map[string]map[string]xo.Type{
"postgres": {
"int64": buildType("bigint"),
"int32": buildType("integer"),
"float64": buildType("double precision"),
"float32": buildType("real"),
"bool": buildType("boolean"),
"string": buildType("text"),
"time": buildType("timestamp with time zone"),
"duration": buildType("bigint"),
"[]byte": buildType("json"),
},
"mysql": {
"int64": buildType("bigint"),
"int32": buildType("int"),
"float64": buildType("double"),
"float32": buildType("real"),
"bool": buildType("boolean"),
"string": buildType("text"),
"time": buildType("timestamp"),
"duration": buildType("bigint"),
"[]byte": buildType("json"),
},
"sqlite3": {
"int64": buildType("bigint"),
"int32": buildType("integer"),
"float64": buildType("double"),
"float32": buildType("real"),
"bool": buildType("boolean"),
"string": buildType("text"),
"time": buildType("datetime"),
"duration": buildType("bigint"),
"[]byte": buildType("blob"),
},
"sqlserver": {
"int64": buildType("bigint"),
"int32": buildType("int"),
"float64": buildType("decimal"),
"float32": buildType("real"),
"bool": buildType("tinyint"),
"string": buildType("text"),
"time": buildType("datetime2"),
"duration": buildType("bigint"),
"[]byte": buildType("binary"),
},
}
var indexTypes = map[string]xo.Type{
"text": buildType("varchar", 255),
}
// buildType is a helper to create a type with the provided type string.
func buildType(typ string, opts ...int) xo.Type {
t := xo.Type{
Type: typ,
}
switch len(opts) {
case 2:
t.Scale = opts[1]
fallthrough
case 1:
t.Prec = opts[0]
}
return t
} | types/sqlType.go | 0.509032 | 0.424054 | sqlType.go | starcoder |
package main
import (
"go-guide/datastruct/binaryTree/traversal/levelorder"
. "go-guide/datastruct/binaryTree/treeNode"
"log"
"math/rand"
)
/**
题目:https://leetcode-cn.com/problems/convert-sorted-array-to-binary-search-tree/
给你一个整数数组 nums ,其中元素已经按 升序 排列,请你将其转换为一棵 高度平衡 二叉搜索树。
高度平衡 二叉树是一棵满足「每个节点的左右两个子树的高度差的绝对值不超过 1 」的二叉树。
注意:1.中序遍历正好是升序数组
2.升序+高度平衡无法确定唯一二叉树,根节点不固定
3.通过数组的二分查找算法可知,二分后的区间构成的二叉树就是高度平衡的
*/
func main() {
nums := []int{-10, -3, 0, 5, 9}
log.Println("构建平衡二叉树-递归-中左:", levelorder.TraversalRecursive(sortedArrayToBST(nums)))
log.Println("构建平衡二叉树-递归-中右:", levelorder.TraversalRecursive(sortedArrayToBST1(nums)))
log.Println("构建平衡二叉树-递归-中随意:", levelorder.TraversalRecursive(sortedArrayToBST2(nums)))
}
// sortedArrayToBST 中序遍历,总是选择中间位置左边的数字作为根节点
func sortedArrayToBST(nums []int) *TreeNode {
var helper func(left, right int) *TreeNode
helper = func(left, right int) *TreeNode {
if left > right {
return nil
}
mid := (right-left)/2 + left
root := &TreeNode{Val: nums[mid]}
root.Left = helper(left, mid-1)
root.Right = helper(mid+1, right)
return root
}
return helper(0, len(nums)-1)
}
// sortedArrayToBST1 中序遍历,总是选择中间位置右边的数字作为根节点
func sortedArrayToBST1(nums []int) *TreeNode {
var helper func(left, right int) *TreeNode
helper = func(left, right int) *TreeNode {
if left > right {
return nil
}
// 总是选择中间位置右边的数字作为根节点
mid := (left + right + 1) / 2
root := &TreeNode{Val: nums[mid]}
root.Left = helper(left, mid-1)
root.Right = helper(mid+1, right)
return root
}
return helper(0, len(nums)-1)
}
// sortedArrayToBST2 中序遍历,选择任意一个中间位置数字作为根节点
func sortedArrayToBST2(nums []int) *TreeNode {
var helper func(left, right int) *TreeNode
helper = func(left, right int) *TreeNode {
if left > right {
return nil
}
// 选择任意一个中间位置数字作为根节点
mid := (left + right + rand.Intn(2)) / 2
root := &TreeNode{Val: nums[mid]}
root.Left = helper(left, mid-1)
root.Right = helper(mid+1, right)
return root
}
return helper(0, len(nums)-1)
} | datastruct/binaryTree/leetcodeQuestion/sortedArrayToBST/sortedArrayToBST.go | 0.678007 | 0.403537 | sortedArrayToBST.go | starcoder |
package geometry
import (
"math"
)
// Triangle is a triangle
type Triangle struct {
A Point
B Point
C Point
AB Edge
BC Edge
CA Edge
}
// TriangleMesh is a mesh of triangles
type TriangleMesh struct {
Triangles []Triangle
Nodes []Point // the centroids of each triangle
Vertices []Point
Edges []Edge
}
// BowyerWatson returns a Delaunay triangulation of the given points using the Bowyer-Watson algorithm.
func BowyerWatson(points []Point) TriangleMesh {
var circumcircle Circle
var mesh TriangleMesh
var badTriangles []Triangle
var finalTriangles []Triangle
var otherTriangles []Triangle
var newTrianglePoints []Point
var newTriangle Triangle
var polygon Polygon
var vertices []Point
superTriangle := SuperTriangle(points)
mesh.Triangles = append(mesh.Triangles, superTriangle)
for _, p := range points {
mesh.Vertices = append(mesh.Vertices, p)
badTriangles = []Triangle{}
for _, t := range mesh.Triangles {
circumcircle = Circumcircle(t)
if p.InCircle(circumcircle) {
badTriangles = append(badTriangles, t)
}
}
polygon = Polygon{}
for _, t := range badTriangles {
for _, e := range t.Edges() {
otherTriangles = t.RemoveFrom(badTriangles)
if !e.IsSharedByTriangles(otherTriangles) {
polygon.Edges = append(polygon.Edges, e)
}
}
}
for _, t := range badTriangles {
mesh.Triangles = t.RemoveFrom(mesh.Triangles)
}
for _, e := range polygon.Edges {
newTrianglePoints = []Point{
{
X: e.A.X,
Y: e.A.Y,
},
{
X: e.B.X,
Y: e.B.Y,
},
{
X: p.X,
Y: p.Y,
},
}
newTriangle = TriangleFromPoints(newTrianglePoints)
mesh.Triangles = append(mesh.Triangles, newTriangle)
}
}
// Remove any triangles that have a vertex from the super-triangle from the triangulation
superVertices := superTriangle.Vertices()
match := false
for _, t := range mesh.Triangles {
vertices = t.Vertices()
for _, v := range vertices {
for _, s := range superVertices {
if v.Equals(s) {
match = true
}
}
}
if !match {
finalTriangles = append(finalTriangles, t)
} else {
match = false
}
}
mesh.Triangles = finalTriangles
for _, t := range mesh.Triangles {
mesh.Edges = append(mesh.Edges, t.Edges()...)
}
mesh.Vertices = GetUniquePoints(mesh.Vertices)
mesh.Edges = GetUniqueEdges(mesh.Edges)
for _, t := range mesh.Triangles {
mesh.Nodes = append(mesh.Nodes, Centroid(t.Vertices()))
}
return mesh
}
// Circumcenter finds the circumcenter for a given triangle
func Circumcenter(triangle Triangle) Point {
a := triangle.A
b := triangle.B
c := triangle.C
// We use only two of the sides, since the bisector of the third will intersect at the same point
// Calculate midpoints of each side (AB, BC)
mab := Point{
X: (a.X + b.X) / 2,
Y: (a.Y + b.Y) / 2,
}
mbc := Point{
X: (b.X + c.X) / 2,
Y: (b.Y + c.Y) / 2,
}
// Calculate slopes of each side (AB, BC)
sab := (b.Y - a.Y) / (b.X - a.X)
sbc := (c.Y - b.Y) / (c.X - b.X)
// Calculate the slope of the perpendicular bisector of each side (AB, BC)
bab := -1 / sab
bbc := -1 / sbc
// Calculate the y-intercept of the perpendicular bisector of each side
yab := -((bab * mab.X) - mab.Y)
ybc := -((bbc * mbc.X) - mbc.Y)
// Calculate the point where the lines intersect
x := (ybc - yab) / (bab - bbc)
y := (bab * x) + yab
center := Point{
X: x,
Y: y,
}
return center
}
// Circumcircle calculates the circumcircle for a given triangle
func Circumcircle(triangle Triangle) Circle {
a := triangle.AB.Length()
b := triangle.BC.Length()
c := triangle.CA.Length()
radius := (a * b * c) / math.Sqrt((a+b+c)*(b+c-a)*(c+a-b)*(a+b-c))
center := Circumcenter(triangle)
circle := Circle{
Center: center,
Radius: radius,
}
return circle
}
// Edges returns all the edges of a triangle
func (t Triangle) Edges() []Edge {
edges := []Edge{
t.AB,
t.BC,
t.CA,
}
return edges
}
// Equals checks to see if a triangle equals another triangle
func (t Triangle) Equals(other Triangle) bool {
if t.A.Equals(other.A) && t.B.Equals(other.B) && t.C.Equals(other.C) {
return true
}
return false
}
// RemoveFrom removes a triangle from a slice of triangles
func (t Triangle) RemoveFrom(triangles []Triangle) []Triangle {
var newTriangles []Triangle
for _, o := range triangles {
if !t.Equals(o) {
newTriangles = append(newTriangles, o)
}
}
return newTriangles
}
// IsEdgeSharedByTriangles checks to see if the given edge is shared by any of the triangles in the given slice
func (e Edge) IsSharedByTriangles(triangles []Triangle) bool {
var edges []Edge
for _, t := range triangles {
edges = append(edges, t.Edges()...)
}
for _, other := range edges {
if e.Equals(other) {
return true
}
}
return false
}
// PointInTriangle determines if the given point is in the given triangle
func PointInTriangle(pt Point, t Triangle) bool {
v1 := t.A
v2 := t.B
v3 := t.C
var d1, d2, d3 float64
var hasNeg, hasPos bool
d1 = sign(pt, v1, v2)
d2 = sign(pt, v2, v3)
d3 = sign(pt, v3, v1)
hasNeg = (d1 < 0) || (d2 < 0) || (d3 < 0)
hasPos = (d1 > 0) || (d2 > 0) || (d3 > 0)
return !(hasNeg && hasPos)
}
// SuperTriangle returns a triangle big enough to contain all of the given points
func SuperTriangle(points []Point) Triangle {
minY := 0.0
maxY := 0.0
minX := 0.0
maxX := 0.0
for _, p := range points {
if p.X < minX {
minX = p.X
}
if p.X > maxX {
maxX = p.X
}
if p.Y < minY {
minY = p.Y
}
if p.Y > maxY {
maxY = p.Y
}
}
leftBound := Point{
X: minX - 5000,
Y: minY - 5000,
}
rightBound := Point{
X: maxX * 50,
Y: maxY * 50,
}
corner := Point{
X: leftBound.X,
Y: 0,
}
trianglePoints := []Point{
leftBound,
rightBound,
corner,
}
triangle := TriangleFromPoints(trianglePoints)
return triangle
}
// ToPolygon turns a triangle into a polygon
func (t Triangle) ToPolygon() Polygon {
var edges []Edge
var points []Point
edges = append(edges, t.AB)
edges = append(edges, t.BC)
edges = append(edges, t.CA)
points = append(points, t.A)
points = append(points, t.B)
points = append(points, t.C)
polygon := Polygon{
Edges: edges,
Points: points,
}
return polygon
}
// Vertices returns a slice of the points of the given triangle
func (t Triangle) Vertices() []Point {
vertices := []Point{
t.A,
t.B,
t.C,
}
return vertices
}
// TriangleFromPoints returns a triangle based on a set of points
func TriangleFromPoints(points []Point) Triangle {
polygon := PolygonFromPoints(points)
triangle := Triangle{
A: polygon.Points[0],
B: polygon.Points[1],
C: polygon.Points[2],
AB: polygon.Edges[0],
BC: polygon.Edges[1],
CA: polygon.Edges[2],
}
return triangle
}
func sign(p1 Point, p2 Point, p3 Point) float64 {
return (p1.X-p3.X)*(p2.Y-p3.Y) - (p2.X-p3.X)*(p1.Y-p3.Y)
} | pkg/geometry/triangles.go | 0.879069 | 0.638131 | triangles.go | starcoder |
package genworldvoronoi
import (
"math"
)
// rErode erodes all region by the given amount.
// NOTE: This is based on mewo2's erosion code
// See: https://github.com/mewo2/terrain
func (m *Map) rErode(amount float64) []float64 {
er := m.rErosionRate()
newh := make([]float64, m.mesh.numRegions)
_, maxr := minMax(er)
for i := 0; i < m.mesh.numRegions; i++ {
newh[i] = m.r_elevation[i] - amount*(er[i]/maxr)
}
return newh
}
// rErosionRate returns the erosion rate per region.
// NOTE: This is based on mewo2's erosion code
// See: https://github.com/mewo2/terrain
func (m *Map) rErosionRate() []float64 {
const nbErosionFactor = 0.015
erodeNeighbors := true
erodeNeighborDepth := 2
m.assignFlux()
flux := m.r_flux
_, maxFlux := minMax(m.r_flux)
slope := m.getRSlope()
newh := make([]float64, m.mesh.numRegions)
var erodeRegion func(r, rem int, toErode float64)
erodeRegion = func(r, rem int, toErode float64) {
if toErode > newh[r] {
newh[r] = toErode
}
rem--
// Check erosion depth.
if rem < 0 || !erodeNeighbors {
return
}
// Additionally erode all neighbors by a certain fraction.
toErode *= nbErosionFactor
nbs := m.rNeighbors(r)
for _, nb := range nbs {
erodeRegion(nb, rem, toErode)
}
}
for i := 0; i < m.mesh.numRegions; i++ {
river := math.Sqrt(flux[i]/maxFlux) * slope[i] //flux[i] * slope[i] / maxFlux
creep := slope[i] * slope[i]
total := 1000*river + creep
if total > 200 {
total = 200
}
erodeRegion(i, erodeNeighborDepth, total)
}
return newh
}
// getRSlope returns the region slope by averaging the slopes of the triangles
// around a given region.
// NOTE: This is based on mewo2's erosion code
// See: https://github.com/mewo2/terrain
func (m *Map) getRSlope() []float64 {
slope := make([]float64, m.mesh.numRegions)
for i := 0; i < m.mesh.numRegions; i++ {
s := m.rSlope(i)
slope[i] = math.Sqrt(s[0]*s[0] + s[1]*s[1])
}
return slope
}
func (m *Map) rSlope(i int) [2]float64 {
var res [2]float64
var count int
for _, t := range m.mesh.r_circulate_t(nil, i) {
slope := m.rTriSlope(m.mesh.t_circulate_r(nil, t))
res[0] += slope[0]
res[1] += slope[1]
count++
}
res[0] /= float64(count)
res[1] /= float64(count)
return res
}
// NOTE: This is based on mewo2's erosion code
// See: https://github.com/mewo2/terrain
func (m *Map) rTriSlope(nbs []int) [2]float64 {
if len(nbs) != 3 {
return [2]float64{0, 0}
}
p0 := convToVec3(m.r_xyz[nbs[0]*3:])
p1 := convToVec3(m.r_xyz[nbs[1]*3:])
p2 := convToVec3(m.r_xyz[nbs[2]*3:])
x1 := p1.X - p0.X
x2 := p2.X - p0.X
y1 := p1.Y - p0.Y
y2 := p2.Y - p0.Y
det := x1*y2 - x2*y1
h1 := m.r_elevation[nbs[1]] - m.r_elevation[nbs[0]]
h2 := m.r_elevation[nbs[2]] - m.r_elevation[nbs[0]]
return [2]float64{
(y2*h1 - y1*h2) / det,
(-x2*h1 + x1*h2) / det,
}
} | genworldvoronoi/erosion.go | 0.778818 | 0.469763 | erosion.go | starcoder |
package maze
import (
"fmt"
"github.com/KludgePub/TheMazeRunner/maze/asset"
)
// Graph with nodes and paths
type Graph struct {
// Nodes with their relation
Nodes map[string]*Node `json:"maze_nodes"`
}
// Node represent single cell
type Node struct {
// Visited if node was traversed
Visited bool `json:"-"`
// Entity represent an value
Entity byte `json:"entity,omitempty"`
// Point holds location
Point Point `json:"point"`
// IsLeftNeighbor exist
IsLeftNeighbor bool `json:"is_left_neighbor"`
// IsRightNeighbor exist
IsRightNeighbor bool `json:"is_right_neighbor"`
// IsTopNeighbor exist
IsTopNeighbor bool `json:"is_top_neighbor"`
// IsBottomNeighbor exist
IsBottomNeighbor bool `json:"is_bottom_neighbor"`
// Do not marshal nodes, it will be recursive
// LeftNeighbor edged nodes
LeftNeighbor *Node `json:"-"`
// RightNeighbor edged nodes
RightNeighbor *Node `json:"-"`
// TopNeighbor edged nodes
TopNeighbor *Node `json:"-"`
// BottomNeighbor edged nodes
BottomNeighbor *Node `json:"-"`
}
// GetId creates unique point hash
func (p Point) GetId() string {
return fmt.Sprintf("x:%d,y:%d", p.X, p.Y)
}
// DispatchToGraph assemble graph to provide it to player
func DispatchToGraph(m *Map) *Graph {
graph := Graph{
Nodes: make(map[string]*Node, m.Size),
}
for x := 0; x < m.Width; x++ {
for y := 0; y < m.Width; y++ {
var cNode *Node
cPoint := Point{X: x, Y: y}
if n, exist := graph.Nodes[cPoint.GetId()]; exist {
cNode = n
} else {
cNode = &Node{
Entity: m.Container[x][y],
Point: cPoint,
}
}
// Check left neighbor
if y-1 >= 0 && m.Walls.Vertical[x][y] != asset.VerticalWall {
var lNode *Node
lnp := Point{X: x, Y: y - 1}
if n, exist := graph.Nodes[lnp.GetId()]; exist {
lNode = n
} else {
lNode = &Node{
Entity: m.Container[x][y-1],
Point: lnp,
}
}
cNode.IsLeftNeighbor = true
graph.Nodes[lNode.Point.GetId()], cNode.LeftNeighbor = lNode, lNode
}
// Check right neighbor
if m.Height > y+1 && m.Walls.Vertical[x][y+1] != asset.VerticalWall {
var rNode *Node
rnp := Point{X: x, Y: y + 1}
if n, exist := graph.Nodes[rnp.GetId()]; exist {
rNode = n
} else {
rNode = &Node{
Entity: m.Container[x][y+1],
Point: rnp,
}
}
cNode.IsRightNeighbor = true
graph.Nodes[rNode.Point.GetId()], cNode.RightNeighbor = rNode, rNode
}
// Check top neighbor
if x-1 >= 0 && m.Walls.Horizontal[x][y] != asset.HorizontalWall {
var topNode *Node
tnp := Point{X: x - 1, Y: y}
if n, exist := graph.Nodes[tnp.GetId()]; exist {
topNode = n
} else {
topNode = &Node{
Entity: m.Container[x-1][y],
Point: tnp,
}
}
cNode.IsTopNeighbor = true
graph.Nodes[topNode.Point.GetId()], cNode.TopNeighbor = topNode, topNode
}
// Check bottom neighbor
if m.Width > x+1 && m.Walls.Horizontal[x+1][y] != asset.HorizontalWall {
var bNode *Node
bnp := Point{X: x + 1, Y: y}
if n, exist := graph.Nodes[bnp.GetId()]; exist {
bNode = n
} else {
bNode = &Node{
Entity: m.Container[x+1][y],
Point: bnp,
}
}
cNode.IsBottomNeighbor = true
graph.Nodes[bNode.Point.GetId()], cNode.BottomNeighbor = bNode, bNode
}
// Update graph
graph.Nodes[cNode.Point.GetId()] = cNode
}
}
return &graph
} | maze/dispatcher.go | 0.717408 | 0.482856 | dispatcher.go | starcoder |
package radolan
import (
"bufio"
)
// encoding types of the composite
type encoding int
const (
runlength encoding = iota
littleEndian
singleByte
unknown
)
// parsing methods
var parse = [4]func(c *Composite, rd *bufio.Reader) error{}
// init maps the parsing methods to the encoding type
func init() {
parse[runlength] = (*Composite).parseRunlength
parse[littleEndian] = (*Composite).parseLittleEndian
parse[singleByte] = (*Composite).parseSingleByte
parse[unknown] = (*Composite).parseUnknown
}
// identifyEncoing identifies the encoding type of the data section by
// only comparing header characteristics.
// This method requires header data to be already written.
func (c *Composite) identifyEncoding() encoding {
values := c.Px * c.Py
if c.level != nil {
return runlength
}
if c.dataLength == values*2 {
return littleEndian
}
if c.dataLength == values {
return singleByte
}
return unknown
}
// parseData parses the composite data and writes the related fields.
// This method requires header data to be already written.
func (c *Composite) parseData(reader *bufio.Reader) error {
if c.Px == 0 || c.Py == 0 {
return newError("parseData", "parsed header data required")
}
// create Data fields
c.PlainData = make([][]float32, c.Py)
for i := range c.PlainData {
c.PlainData[i] = make([]float32, c.Px)
}
return parse[c.identifyEncoding()](c, reader)
}
// arrangeData slices plain data into its data layers or strips preceeding
// vertical projection
func (c *Composite) arrangeData() {
if c.Py%c.Dy == 0 { // multiple layers are linked downwards
c.DataZ = make([][][]float32, c.Py/c.Dy)
for i := range c.DataZ {
c.DataZ[i] = c.PlainData[c.Dy*i : c.Dy*(i+1)] // split layers
}
} else { // only use bottom most part of plain data
c.DataZ = [][][]float32{c.PlainData[c.Py-c.Dy:]} // strip elevation
}
c.Dz = len(c.DataZ)
c.Data = c.DataZ[0] // alias
}
// parseUnknown performs no action and always returns an error.
func (c *Composite) parseUnknown(rd *bufio.Reader) error {
return newError("parseUnknown", "unknown encoding")
} | data.go | 0.668015 | 0.416737 | data.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// AttackSimulationSimulationUserCoverage
type AttackSimulationSimulationUserCoverage struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// User in an attack simulation and training campaign.
attackSimulationUser AttackSimulationUserable
// Number of link clicks in the received payloads by the user in attack simulation and training campaigns.
clickCount *int32
// Number of compromising actions by the user in attack simulation and training campaigns.
compromisedCount *int32
// Date and time of latest attack simulation and training campaign that the user was included in.
latestSimulationDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Number of attack simulation and training campaigns that the user was included in.
simulationCount *int32
}
// NewAttackSimulationSimulationUserCoverage instantiates a new attackSimulationSimulationUserCoverage and sets the default values.
func NewAttackSimulationSimulationUserCoverage()(*AttackSimulationSimulationUserCoverage) {
m := &AttackSimulationSimulationUserCoverage{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateAttackSimulationSimulationUserCoverageFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAttackSimulationSimulationUserCoverageFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewAttackSimulationSimulationUserCoverage(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AttackSimulationSimulationUserCoverage) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetAttackSimulationUser gets the attackSimulationUser property value. User in an attack simulation and training campaign.
func (m *AttackSimulationSimulationUserCoverage) GetAttackSimulationUser()(AttackSimulationUserable) {
if m == nil {
return nil
} else {
return m.attackSimulationUser
}
}
// GetClickCount gets the clickCount property value. Number of link clicks in the received payloads by the user in attack simulation and training campaigns.
func (m *AttackSimulationSimulationUserCoverage) GetClickCount()(*int32) {
if m == nil {
return nil
} else {
return m.clickCount
}
}
// GetCompromisedCount gets the compromisedCount property value. Number of compromising actions by the user in attack simulation and training campaigns.
func (m *AttackSimulationSimulationUserCoverage) GetCompromisedCount()(*int32) {
if m == nil {
return nil
} else {
return m.compromisedCount
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AttackSimulationSimulationUserCoverage) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["attackSimulationUser"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAttackSimulationUserFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetAttackSimulationUser(val.(AttackSimulationUserable))
}
return nil
}
res["clickCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetClickCount(val)
}
return nil
}
res["compromisedCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetCompromisedCount(val)
}
return nil
}
res["latestSimulationDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetLatestSimulationDateTime(val)
}
return nil
}
res["simulationCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetSimulationCount(val)
}
return nil
}
return res
}
// GetLatestSimulationDateTime gets the latestSimulationDateTime property value. Date and time of latest attack simulation and training campaign that the user was included in.
func (m *AttackSimulationSimulationUserCoverage) GetLatestSimulationDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.latestSimulationDateTime
}
}
// GetSimulationCount gets the simulationCount property value. Number of attack simulation and training campaigns that the user was included in.
func (m *AttackSimulationSimulationUserCoverage) GetSimulationCount()(*int32) {
if m == nil {
return nil
} else {
return m.simulationCount
}
}
// Serialize serializes information the current object
func (m *AttackSimulationSimulationUserCoverage) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteObjectValue("attackSimulationUser", m.GetAttackSimulationUser())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("clickCount", m.GetClickCount())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("compromisedCount", m.GetCompromisedCount())
if err != nil {
return err
}
}
{
err := writer.WriteTimeValue("latestSimulationDateTime", m.GetLatestSimulationDateTime())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("simulationCount", m.GetSimulationCount())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AttackSimulationSimulationUserCoverage) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetAttackSimulationUser sets the attackSimulationUser property value. User in an attack simulation and training campaign.
func (m *AttackSimulationSimulationUserCoverage) SetAttackSimulationUser(value AttackSimulationUserable)() {
if m != nil {
m.attackSimulationUser = value
}
}
// SetClickCount sets the clickCount property value. Number of link clicks in the received payloads by the user in attack simulation and training campaigns.
func (m *AttackSimulationSimulationUserCoverage) SetClickCount(value *int32)() {
if m != nil {
m.clickCount = value
}
}
// SetCompromisedCount sets the compromisedCount property value. Number of compromising actions by the user in attack simulation and training campaigns.
func (m *AttackSimulationSimulationUserCoverage) SetCompromisedCount(value *int32)() {
if m != nil {
m.compromisedCount = value
}
}
// SetLatestSimulationDateTime sets the latestSimulationDateTime property value. Date and time of latest attack simulation and training campaign that the user was included in.
func (m *AttackSimulationSimulationUserCoverage) SetLatestSimulationDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.latestSimulationDateTime = value
}
}
// SetSimulationCount sets the simulationCount property value. Number of attack simulation and training campaigns that the user was included in.
func (m *AttackSimulationSimulationUserCoverage) SetSimulationCount(value *int32)() {
if m != nil {
m.simulationCount = value
}
} | models/attack_simulation_simulation_user_coverage.go | 0.609757 | 0.468973 | attack_simulation_simulation_user_coverage.go | starcoder |
package shortestPath
import (
"context"
"fmt"
"github.com/RyanCarrier/dijkstra"
"github.com/romanornr/autodealer/internal/singleton"
"github.com/sirupsen/logrus"
"github.com/thrasher-corp/gocryptotrader/currency"
exchange "github.com/thrasher-corp/gocryptotrader/exchanges"
"github.com/thrasher-corp/gocryptotrader/exchanges/asset"
)
// MatchPairsForCurrency returns a list of pairs that match the given currency
func MatchPairsForCurrency(e exchange.IBotExchange, code currency.Code, assetType asset.Item) currency.Pairs {
availablePairs, err := e.GetAvailablePairs(assetType)
if err != nil {
return nil
}
matchingPairs := currency.Pairs{}
for _, pair := range availablePairs {
if pair.Base == code {
matchingPairs = append(matchingPairs, pair)
}
}
return matchingPairs
}
type Nodes struct {
Nodes []*Node
}
type Node struct {
Pairs currency.Pairs
ID int
}
// PathToAsset returns the shortest path to the given asset
func PathToAsset(e exchange.IBotExchange, code currency.Code, destination currency.Code, assetType asset.Item) ([]currency.Code, error) {
availablePairs := currency.Pairs{}
// chosen asset is "ANC"
// INFO[0121]: relatablePairs [ANC-BTC ANC-BUSD ANC-USDT]
relatablePairs := MatchPairsForCurrency(e, code, assetType)
availablePairs = append(availablePairs, relatablePairs...)
nodes := Nodes{}
nodes.Nodes = append(nodes.Nodes, &Node{Pairs: availablePairs, ID: 0})
graph := dijkstra.NewGraph()
for i, p := range availablePairs {
m := MatchPairsForCurrency(e, p.Quote, assetType)
nodes.Nodes = append(nodes.Nodes, &Node{Pairs: m, ID: i + 1})
availablePairs = append(availablePairs, m...)
}
vertices := make(map[currency.Code]int)
count := 0
for _, n := range nodes.Nodes {
// each n.Pairs.Quote is a currency.Code and each n.Pairs.Base is a currency.Code
for _, p := range n.Pairs {
vertices[p.Quote] = int(count)
count++
vertices[p.Base] = int(count)
count++
graph.AddVertex(vertices[p.Quote])
graph.AddVertex(vertices[p.Base])
}
//INFO[0121] 1: [BTC-USDT BTC-TUSD BTC-USDC BTC-BUSD BTC-NGN BTC-RUB BTC-TRY BTC-EUR BTC-GBP BTC-UAH BTC-BIDR BTC-AUD BTC-DAI BTC-BRL BTC-USDP]
//INFO[0121] 2: [BUSD-USDT BUSD-RUB BUSD-TRY BUSD-BIDR BUSD-DAI BUSD-BRL BUSD-VAI BUSD-UAH]
//INFO[0121] 3: [USDT-TRY USDT-RUB USDT-IDRT USDT-UAH USDT-BIDR USDT-DAI USDT-NGN USDT-BRL]
//logrus.Printf("%d: %s", n.ID, n.Pairs)
}
for _, n := range nodes.Nodes {
// each n.Pairs.Quote is a currency.Code and each n.Pairs.Base is a currency.Code
for _, p := range n.Pairs {
err := graph.AddArc(vertices[p.Base], vertices[p.Quote], int64(n.ID))
if err != nil {
return nil, fmt.Errorf("error adding arc: %s", err)
}
}
}
// logrus.Printf("vertices %v\n", vertices)
// add the edges to the graph
best, err := graph.Shortest(vertices[code], vertices[destination])
if err != nil {
logrus.Errorf("error finding best path: %s", err)
}
fmt.Println("Shortest distance ", best.Distance, " following path ", best.Path)
var codesToPath []currency.Code
// convert the path to currency codes
for _, n := range best.Path {
key, ok := mapkeyVertices(vertices, n)
if !ok {
return nil, fmt.Errorf("error finding key for %d", n)
}
codesToPath = append(codesToPath, key)
}
return codesToPath, nil
}
// mapkeyVertices returns the key for the given value
func mapkeyVertices(m map[currency.Code]int, value int) (key currency.Code, ok bool) {
for k, v := range m {
if v == value {
key = k
ok = true
return
}
}
return
}
func fetchTickerPrice(e exchange.IBotExchange, currencies []currency.Code, assetType asset.Item) (float64, error) {
// currencies is a slice
// currencies VIA, BTC, USD (3 currency codes)
// which should become VIA-BTC, BTC-USD (2 pairs created from 3 currency codes)
d := singleton.GetDealer()
e, err := d.ExchangeManager.GetExchangeByName(e.GetName())
if err != nil {
return 0, err
}
for _, c := range currencies {
logrus.Printf("currency %s\n", c)
}
pairs := currency.Pairs{}
// create pairs from the currencies Slice
for i, c := range currencies {
if i == len(currencies)-1 {
break
}
pair := currency.NewPair(c, currencies[i+1]) // the quote is the next currency code
pairs = append(pairs, pair)
}
var accumulatedPrice float64
for _, p := range pairs {
price, err := e.FetchTicker(context.Background(), p, assetType)
if err != nil {
return 0, err
}
if accumulatedPrice == 0.0 {
accumulatedPrice = price.Last
continue
}
accumulatedPrice = accumulatedPrice * price.Last
}
return accumulatedPrice, nil
}
// Write Bellman Ford Algorithm to find the shortest path to a dollar pair by using an algorithm
func WriteBellmanFordAlgorithm(e exchange.IBotExchange, code currency.Code, assetType asset.Item) {
}
// Get the dollar value of the given asset. However, there might not be a direct conversion to USD so we need to use the exchange's conversion rate
// to get the value in USD. Possibly use an intermediate currency pair to convert to USD. | internal/algo/shortestPath/pairs.go | 0.797044 | 0.400339 | pairs.go | starcoder |
package main
import (
"bufio"
"fmt"
"io"
"log"
"os"
"strings"
)
// Body is a "small Solar System body", i.e. an object in space. A Body orbits
// another Body and they are represented as a tree. The root of the tree is the
// COM (Center of Mass).
// See https://english.stackexchange.com/a/281983
type Body struct {
desc string
orbits *Body
orbiting []*Body
}
// UniversalOrbitMap maps bodies names to their entry in the tree.
type UniversalOrbitMap map[string]*Body
// IsCenterOfMass returns true when b is the Center of Mass, false otherwise.
func (b *Body) IsCenterOfMass() bool {
// When b doesn't orbit another body, then it is the Center of Mass.
return b.orbits == nil
}
// Depth returns the distance between b and the Center of Mass.
func (b *Body) Depth() int {
d := 0
for c := b; !c.IsCenterOfMass(); c = c.orbits {
d++
}
return d
}
// OrbitCount compute and return the b's total of direct and indirect orbits.
// The depth argument is the distance between b and the Center of Mass.
func (b *Body) OrbitCount(depth int) (direct, indirect int) {
// we have a direct orbit iff we're not the COM.
if !b.IsCenterOfMass() {
direct = 1
indirect = depth - 1
}
// Compute recursively the direct and indirect orbits.
for _, o := range b.orbiting {
d, i := o.OrbitCount(depth + 1)
direct += d
indirect += i
}
return
}
// OrbitalTransfers compute and returns the minimum of orbital transfers
// required to move from the object b is orbiting to the object o is orbiting.
// It returns -1 when either b or o is the Center of Mass, or when they are not
// part of the same UniversalOrbitMap.
func (b *Body) OrbitalTransfers(o *Body) int {
if b.IsCenterOfMass() || o.IsCenterOfMass() {
return -1
}
// let n0, n1 be the objects b (respectively o) is orbiting.
n0, n1 := b.orbits, o.orbits
// let n0 be the deepest node.
d0, d1 := n0.Depth(), n1.Depth()
diff := d0 - d1
if diff < 0 {
n0, n1 = n1, n0
diff = -diff
}
// Bring n0 "up" to the same level as n1.
for i := 0; i < diff; i++ {
n0 = n0.orbits
}
// Travel "up" from both sides until we get a match.
for i := 0; n0 != nil && n1 != nil; i++ {
if n0 == n1 {
return i*2 + diff
}
n0, n1 = n0.orbits, n1.orbits
}
return -1
}
// OrbitCount compute and return the Center of Mass's total of direct and
// indirect orbits. When the map has no Center of Mass, (-1, -1) is returned.
func (uom UniversalOrbitMap) OrbitCount() (direct, indirect int) {
if com, ok := uom["COM"]; ok {
direct, indirect = com.OrbitCount(0)
} else {
direct, indirect = -1, -1
}
return
}
// main parse the universal orbit map, then compute and display the total of
// direct and indirect orbits.
func main() {
uom, err := Parse(os.Stdin)
if err != nil {
log.Fatalf("input error: %s\n", err)
}
you, ok := uom["YOU"]
if !ok {
log.Fatalf("YOU not found\n")
}
san, ok := uom["SAN"]
if !ok {
log.Fatalf("SAN not found\n")
}
direct, indirect := uom.OrbitCount()
distance := you.OrbitalTransfers(san)
fmt.Printf("the total number of direct and indirect orbits is %v,\n", direct+indirect)
fmt.Printf("and the minimum of orbital transfers required is %v.\n", distance)
}
// Parse a map of the local orbits.
// It returns the UniversalOrbitMap and any read or parsing error encountered.
func Parse(r io.Reader) (UniversalOrbitMap, error) {
uom := make(UniversalOrbitMap)
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := scanner.Text()
desc := strings.Split(line, ")")
if len(desc) != 2 {
return nil, fmt.Errorf("invalid orbit line: %s", line)
}
// c)o means o directly orbits c.
cdesc := desc[0]
odesc := desc[1]
c, ok := uom[cdesc]
if !ok {
c = &Body{desc: cdesc}
}
o, ok := uom[odesc]
if !ok {
o = &Body{desc: odesc, orbits: c}
}
if o.orbits == nil {
o.orbits = c
}
if o.orbits != c {
return nil, fmt.Errorf("%v orbits %v; expected %v instead", o.desc, o.orbits.desc, c.desc)
}
o.orbits = c
c.orbiting = append(c.orbiting, o)
uom[c.desc] = c
uom[o.desc] = o
}
if err := scanner.Err(); err != nil {
return nil, err
}
return uom, nil
} | day06/main.go | 0.702224 | 0.475118 | main.go | starcoder |
package raytracer
import (
"gonum.org/v1/gonum/spatial/r3"
"math"
)
type Light interface {
hasPosition() bool
getPosition() *r3.Vec
getColorFrac() r3.Vec
getLightIntensity() float64
getSpecularLightIntensity() float64
getInverseSquareLawDecayFactor() float64
isPointVisible(point *r3.Vec, traceFunction func(r *ray, tMin float64) (hit bool, record *hitRecord), monteCarloVariance *r3.Vec) bool
}
type AmbientLight struct {
ColorFrac r3.Vec
LightIntensity float64
}
type PointLight struct {
ColorFrac r3.Vec
Position r3.Vec
LightIntensity float64
SpecularLightIntensity float64
InverseSquareLawDecayFactor float64
}
type SpotLight struct {
ColorFrac r3.Vec
Position r3.Vec
LightIntensity float64
SpecularLightIntensity float64
LookAt r3.Vec
Angle float64 // specified in degrees
InverseSquareLawDecayFactor float64
}
func (a AmbientLight) hasPosition() bool {
return false
}
func (a AmbientLight) getPosition() *r3.Vec {
return &r3.Vec{}
}
func (a AmbientLight) getColorFrac() r3.Vec {
return a.ColorFrac
}
func (a AmbientLight) getLightIntensity() float64 {
return a.LightIntensity
}
func (a AmbientLight) getSpecularLightIntensity() float64 {
return 0
}
func (a AmbientLight) getInverseSquareLawDecayFactor() float64 {
return 0
}
func (a AmbientLight) isPointVisible(point *r3.Vec, traceFunction func(r *ray, tMin float64) (hit bool, record *hitRecord), monteCarloVariance *r3.Vec) bool {
return true
}
func (p PointLight) hasPosition() bool {
return true
}
func (p PointLight) getPosition() *r3.Vec {
return &p.Position
}
func (p PointLight) getColorFrac() r3.Vec {
return p.ColorFrac
}
func (p PointLight) getLightIntensity() float64 {
return p.LightIntensity
}
func (p PointLight) getSpecularLightIntensity() float64 {
return p.SpecularLightIntensity
}
func (p PointLight) getInverseSquareLawDecayFactor() float64 {
return p.InverseSquareLawDecayFactor
}
func (p PointLight) isPointVisible(point *r3.Vec, traceFunction func(r *ray, tMin float64) (hit bool, record *hitRecord), monteCarloVariance *r3.Vec) bool {
shiftedPosition := r3.Add(p.Position, *monteCarloVariance)
return doesReachLight(point, &shiftedPosition, traceFunction)
}
func (s SpotLight) hasPosition() bool {
return true
}
func (s SpotLight) getPosition() *r3.Vec {
return &s.Position
}
func (s SpotLight) getColorFrac() r3.Vec {
return s.ColorFrac
}
func (s SpotLight) getLightIntensity() float64 {
return s.LightIntensity
}
func (s SpotLight) getSpecularLightIntensity() float64 {
return s.SpecularLightIntensity
}
func (s SpotLight) getInverseSquareLawDecayFactor() float64 {
return s.InverseSquareLawDecayFactor
}
func (s SpotLight) isPointVisible(point *r3.Vec, traceFunction func(r *ray, tMin float64) (hit bool, record *hitRecord), monteCarloVariance *r3.Vec) bool {
shiftedPosition := r3.Add(s.Position, *monteCarloVariance)
reachesLight := doesReachLight(point, &shiftedPosition, traceFunction)
// get angle between light direction vector and vector of light to point
lightDirection := r3.Unit(r3.Sub(s.LookAt, s.Position))
lightPositionToShape := r3.Unit(r3.Sub(*point, shiftedPosition))
angle := angleBetweenVectors(&lightDirection, &lightPositionToShape)
return reachesLight && angle <= s.Angle
}
// unit is in degrees
func angleBetweenVectors(a, b *r3.Vec) float64 {
aLengthSqrd := math.Sqrt(a.X*a.X + a.Y*a.Y + a.Z*a.Z)
bLengthSqrd := math.Sqrt(b.X*b.X + b.Y*b.Y + b.Z*b.Z)
angleRadians := math.Acos(r3.Dot(*a, *b) / (aLengthSqrd * bLengthSqrd))
return angleRadians * 180 / math.Pi
}
func doesReachLight(origin *r3.Vec, lightPosition *r3.Vec, traceFunction func(r *ray, tMin float64) (hit bool, record *hitRecord)) bool {
lightDirection := r3.Sub(*lightPosition, *origin)
unitLightDirection := r3.Unit(lightDirection)
r := ray{
p: *origin,
normalizedDirection: unitLightDirection,
}
hit, hitRecord := traceFunction(
&r,
0.01, // don't let the shadow ray hit the same object
)
if !hit {
return true
}
lengthFromPointToLight := r3.Dot(lightDirection, lightDirection)
hitPointDirection := r3.Sub(hitRecord.p, *origin)
lengthFromPointToHitObject := r3.Dot(hitPointDirection, hitPointDirection)
return lengthFromPointToLight < lengthFromPointToHitObject
} | raytracer/light.go | 0.890764 | 0.443962 | light.go | starcoder |
/*
Package spec provides a flexible behavior-driven development (BDD) framework.
It wraps the functionality of the standard Go package "testing" to provide
descriptive and maintainable behavior specifications. Because it wraps
"testing", it can be used with command Gotest. Or, it can be used with the
companion command Gospec.
Specifications (or Specs) are defined by nesting them in a Describe call.
Spec and Describe are methods of the SpecTest type, the primary type of "spec".
The Describe method has aliases They and It. A new SpecTest is created with the
NewSpecTest function.
import (
"import"
. "spec"
)
func TestFunction(T *testing.T) {
s := NewSpecTest(T)
s.Describe(`The "strconv" package`, func() {
s.Describe("integer conversion", func(){
s.Describe("with Itoa", func() {
s.It("makes strings from integers", func() {
s.Spec(
func()string{ return strconv.Itoa(123) },
Should, Equal, "123")
})
})
s.It("with Atoi", func() {
s.It("converts decimal character strings to integers", func() {
decconv := func()(int,error){ return strconv.Atoi("123") }
s.Spec(decconv, Should, Not, HaveError)
s.Spec(decconv, Should, Satisfy, func(x int) bool { return x == 123 })
})
s.It("can't convert hex character strings to integers", func() {
hexconv := func()(int,error){ return strconv.Atoi("0x123") }
s.Spec(hexconv , Should, Not, Equal, 0x123)
})
})
})
})
}
The above example defines the following tests
The "strconv" package integer conversion with ItoA makes strings from integers
The "strconv" package integer conversion with Atoi converts decimal character strings to integers
The "strconv" package integer conversion with Atoi can't convert hex character strings to integers
The spec package makes use of runtime reflection, predicate functions, and deep
equality checking to be a flexible and lightweight test framework.
The Spec argument sequence has the following grammar
VALUE [INDEX] Should [Not] FUNCTION [ARGUMENT]
The general thinking is that (element INDEX of) VALUE is an object and FUNCTION
acts as a method of VALUE with a boolean return type. The "Not" keyword
obviously negates the returned value of FUNCTION.
VALUE can be either a normal Go value (int, string, float64, struct, interface,
...). It can also be a nil-adic function with at least one return value. If
VALUE is a nil-adic function, it is called before the spec is evaluated. If
INDEX is given, that nil-adic function return value is used as the object.
The first nil-adic function return value is used an the object when no INDEX
is given.
FUNCTION can be any of the keywords
Equal
Satisfy
HaveError
Equal and Satisfy both require a single argument while HaveError requires none.
Equal performs a deep equality test of the object against an argument object.
Satisfy requires a predicate function (boolean function of one argument) and
is true if the predicate is true for the object. HaveError requires the object
to be nil-adic which returns an error in its last return value. It returns true
if the function returned an error.
*/
package spec
import (
"strings"
"regexp"
"fmt"
"os"
)
var SpecPattern = os.Getenv("GOSPECPATTERN")
var specregexp *regexp.Regexp
func (t SpecTest) getSpecRegexp() {
if len(SpecPattern) > 0 && specregexp == nil {
var err error
specregexp, err = regexp.Compile(SpecPattern)
if err != nil {
t.Fatalf("Can't compile GOSPECPATTERN %s", SpecPattern)
}
}
}
// An abstraction of the type *testing.T with identical exported methods.
type Test interface {
Log(...interface{})
Logf(string, ...interface{})
Error(...interface{})
Errorf(string, ...interface{})
Fatal(...interface{})
Fatalf(string, ...interface{})
Fail()
FailNow()
Failed() bool
}
type stringer interface {
String() string
}
// Returns a human-readable interpretation of a Spec sequence.
func specString(spec sequence) string {
s := make([]string, len(spec))
for i, v := range spec {
switch v.value.(type) {
case stringer:
s[i] = v.value.(stringer).String()
default:
s[i] = fmt.Sprintf("%#v", v.value)
}
}
return strings.Join(s, " ")
}
type pos uint8
type Quantifier uint8
const (
All Quantifier = iota
First
Last
)
var quantStr = []string{
All: "All",
First: "First",
Last: "Last",
}
func (q Quantifier) String() string { return quantStr[q] }
func errTrigger(pos string, q Quantifier) error {
return fmt.Errorf("Bad trigger %s %s", pos, q.String())
}
type trigger struct {
Quantifier
fn func()
}
func popTrigger(ptr *[]trigger) (back trigger) {
stack := *ptr
n := len(stack)
if n == 0 {
return
}
back = stack[n-1]
stack[n-1] = trigger{}
*ptr = stack[:n-1]
return
}
func popTriggerAt(i int, ptr *[]trigger) (tr trigger) {
n := len(*ptr)
if n == 0 {
return
} else if i >= n {
return
}
tstack := (*ptr)[:i+1]
tr = popTrigger(&(tstack))
copy((*ptr)[i:], (*ptr)[i+1:])
popTrigger(ptr)
return
}
func (t SpecTest) Before(q Quantifier, fn func()) error {
t.doDebug(func() {
t.Logf("Before maketrigger")
})
return t.makeTrigger(&t.beforestack[t.depth-1], "Before", q, fn)
}
func (t SpecTest) After(q Quantifier, fn func()) error {
t.doDebug(func() {
t.Logf("After maketrigger")
})
return t.makeTrigger(&t.deferstack[t.depth-1], "After", q, fn)
}
func (t SpecTest) makeTrigger(stack *[]trigger, pos string, q Quantifier, fn func()) error {
if q == Last && pos == "Before" {
return errTrigger(pos, q)
}
s := *stack
s = append(s, trigger{q, fn})
t.doDebug(func() {
t.Logf("triggers %#v", s)
})
*stack = s
return nil
}
// The primary object of the spec package. Describe tests using the Describe,
// It, and They methods. Write individual tests using the Spec methods.
type SpecTest struct {
Test
depth int
spec sequence
runspec bool
passed bool
ranspec bool
err error
beforestack [][]trigger
deferstack [][]trigger
descstack []string
debug bool
}
// Create a new SpecTest. Call this function at the begining of your test functions.
// import (
// "testing"
// . "spec"
// )
// func TestObject(T *testing.T) {
// s := NewSpecTest(T)
// s.Describe("My object", func() {
// ...
// })
// }
func NewSpecTest(T Test) *SpecTest {
return &SpecTest{Test: T, descstack: nil, debug: false}
}
// Execute a function if t.debug is true.
func (t *SpecTest) doDebug(fn func()) {
if t.debug {
fn()
}
}
// Return a string describing the current tests being executed by t.
func (t *SpecTest) String() string { return strings.Join(t.descstack, " ") }
// Begin a block that describes a given thing. Can be called again from the
// does function to describe more specific elements of that thing.
func (t *SpecTest) Describe(thing string, does func()) {
t.getSpecRegexp()
t.descstack = append(t.descstack, thing)
t.beforestack = append(t.beforestack, nil)
t.deferstack = append(t.deferstack, nil)
t.depth++
oldrunspec := t.runspec
if specregexp != nil && !specregexp.MatchString(t.String()) {
t.runspec = false
} else if specregexp != nil {
t.runspec = true
}
defer func() {
// Clear the SpecTest when the description's scope is left.
t.depth--
t.descstack = t.descstack[:t.depth]
popTrigger(&t.beforestack[t.depth])
popTrigger(&t.deferstack[t.depth])
t.spec = nil
t.passed = true
t.ranspec = false
t.err = nil
t.runspec = oldrunspec
}()
after := t.deferstack
if k := len(after[t.depth-1]); k > 0 {
for j := 0; j < k; j++ {
tr := after[t.depth-1][j]
if tr.Quantifier == Last {
defer tr.fn()
}
}
}
t.deferstack = after
// Do the described tests.
does()
if t.ranspec {
// Compute the result of executed Spec calls.
ok := t.passed && t.err == nil
var result string
switch {
case ok:
result = "PASS"
case t.err != nil:
result = "ERROR"
case !t.passed:
result = "FAIL"
default:
panic("unexpected outcome")
}
// Write a message summarizing Spec calls.
msg := fmt.Sprintf("%s: %s", t.String(), result)
if !ok {
msg += fmt.Sprintf("\n\t%s", specString(t.spec))
}
if t.err != nil {
msg += fmt.Sprintf("\n\tError: %s", t.err.Error())
}
// Write the message as an error if there was a problem.
if ok {
t.Log(msg)
} else {
t.Error(msg)
}
}
}
// A synonymn of Describe. It's function is meant to contain calls to Spec.
func (t *SpecTest) It(specification string, check func()) { t.Describe(specification, check) }
// A synonymn of Describe. It's function is meant to contain calls to Spec.
func (t *SpecTest) They(specification string, check func()) { t.Describe(specification, check) }
// Specify a relation between two objects.
// Spec("abc", Should, Equal, "abc")
// Spec("abc", Should, Satisfy, func(x string)bool{ return "abc" })
// v := Value(func() (string, os.Error) { return "abc", os.NewError("Oops!"))}
// Spec( v, Should, HaveError)
// Spec( v, Should, Equal, "abc")
func (t *SpecTest) Spec(spec ...interface{}) {
// Run triggers regardless of context's matching status
before := t.beforestack
for i := 0; i < t.depth; i++ {
if k := len(before[i]); k > 0 {
for j := 0; j < k; j++ {
tr := before[i][j]
t.doDebug(func() {
t.Logf("firing %#v", tr)
})
tr.fn()
if tr.Quantifier == First {
popTriggerAt(j, &before[i])
k--
j--
}
}
}
}
t.beforestack = before
after := t.deferstack
for i := 0; i < t.depth; i++ {
if k := len(after[i]); k > 0 {
for j := 0; j < k; j++ {
tr := after[i][j]
t.doDebug(func() {
t.Logf("defering %#v", tr)
})
defer tr.fn()
if tr.Quantifier == First {
popTriggerAt(j, &after[i])
k--
j--
}
}
}
}
t.deferstack = after
// Don't run the spec if we are not in a matching context.
if !t.runspec {
return
}
var (
seq sequence
m Matcher
negated bool
args []interface{}
)
t.doDebug(func() {
t.Logf("Executing")
})
t.ranspec = true
seq, t.err = t.scan(spec)
if t.err != nil {
t.spec = seq
return
}
m, negated, args, t.err = t.parse(seq)
if t.err != nil {
t.spec = seq
return
}
t.exec(m, negated, args)
if !t.passed || t.err != nil {
t.spec = seq
}
} | src/github.com/bmatsuo/go-spec/spec/spec.go | 0.613468 | 0.594316 | spec.go | starcoder |
package gfx
import (
"github.com/go-gl/gl/v4.5-core/gl"
"github.com/go-gl/mathgl/mgl32"
)
// Vertex is a Vertex.
type Vertex struct {
Vert, Norm mgl32.Vec3
UV mgl32.Vec2
}
// BindVertexAttributes binds the attributes per vertex.
func BindVertexAttributes(s uint32) {
vertAttrib := uint32(gl.GetAttribLocation(s, gl.Str("vert\x00")))
gl.EnableVertexAttribArray(vertAttrib)
gl.VertexAttribPointer(vertAttrib, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(0))
normAttrib := uint32(gl.GetAttribLocation(s, gl.Str("norm\x00")))
gl.EnableVertexAttribArray(normAttrib)
gl.VertexAttribPointer(normAttrib, 3, gl.FLOAT, false, 8*4, gl.PtrOffset(12))
uvAttrib := uint32(gl.GetAttribLocation(s, gl.Str("uv\x00")))
gl.EnableVertexAttribArray(uvAttrib)
gl.VertexAttribPointer(uvAttrib, 2, gl.FLOAT, false, 8*4, gl.PtrOffset(24))
}
// SkyVertex is a Vertex for the sky.
type SkyVertex struct {
Vert mgl32.Vec2
}
// BindSkyVertexAttributes binds the attributes per vertex.
func BindSkyVertexAttributes(s uint32) {
vertAttrib := uint32(gl.GetAttribLocation(s, gl.Str("vert\x00")))
gl.EnableVertexAttribArray(vertAttrib)
gl.VertexAttribPointer(vertAttrib, 3, gl.FLOAT, false, 3*4, gl.PtrOffset(0))
}
// LineVertex is a Vertex.
type LineVertex struct {
Vert, Color mgl32.Vec3
}
// BindLineVertexAttributes binds the attributes per vertex.
func BindLineVertexAttributes(s uint32) {
vertAttrib := uint32(gl.GetAttribLocation(s, gl.Str("vert\x00")))
gl.EnableVertexAttribArray(vertAttrib)
gl.VertexAttribPointer(vertAttrib, 3, gl.FLOAT, false, 6*4, gl.PtrOffset(0))
colorAttrib := uint32(gl.GetAttribLocation(s, gl.Str("color\x00")))
gl.EnableVertexAttribArray(colorAttrib)
gl.VertexAttribPointer(colorAttrib, 3, gl.FLOAT, false, 6*4, gl.PtrOffset(12))
}
// PipVertex is a Vertex for the PIP.
type PipVertex struct {
Vert, UV mgl32.Vec2
}
// BindPipVertexAttributes binds the attributes per vertex.
func BindPipVertexAttributes(s uint32) {
posAttrib := uint32(gl.GetAttribLocation(s, gl.Str("pos\x00")))
gl.EnableVertexAttribArray(posAttrib)
gl.VertexAttribPointer(posAttrib, 2, gl.FLOAT, false, 4*4, gl.PtrOffset(0))
uvAttrib := uint32(gl.GetAttribLocation(s, gl.Str("uv\x00")))
gl.EnableVertexAttribArray(uvAttrib)
gl.VertexAttribPointer(uvAttrib, 2, gl.FLOAT, false, 4*4, gl.PtrOffset(8))
} | gfx/vertex.go | 0.687525 | 0.51013 | vertex.go | starcoder |
package gmeasure
import "time"
/*
Stopwatch provides a convenient abstraction for recording durations. There are two ways to make a Stopwatch:
You can make a Stopwatch from an Experiment via experiment.NewStopwatch(). This is how you first get a hold of a Stopwatch.
You can subsequently call stopwatch.NewStopwatch() to get a fresh Stopwatch.
This is only necessary if you need to record durations on a different goroutine as a single Stopwatch is not considered thread-safe.
The Stopwatch starts as soon as it is created. You can Pause() the stopwatch and Reset() it as needed.
Stopwatches refer back to their parent Experiment. They use this reference to record any measured durations back with the Experiment.
*/
type Stopwatch struct {
Experiment *Experiment
t time.Time
pauseT time.Time
pauseDuration time.Duration
running bool
}
func newStopwatch(experiment *Experiment) *Stopwatch {
return &Stopwatch{
Experiment: experiment,
t: time.Now(),
running: true,
}
}
/*
NewStopwatch returns a new Stopwatch pointing to the same Experiment as this Stopwatch
*/
func (s *Stopwatch) NewStopwatch() *Stopwatch {
return newStopwatch(s.Experiment)
}
/*
Record captures the amount of time that has passed since the Stopwatch was created or most recently Reset(). It records the duration on it's associated Experiment in a Measurement with the passed-in name.
Record takes all the decorators that experiment.RecordDuration takes (e.g. Annotation("...") can be used to annotate this duration)
Note that Record does not Reset the Stopwatch. It does, however, return the Stopwatch so the following pattern is common:
stopwatch := experiment.NewStopwatch()
// first expensive operation
stopwatch.Record("first operation").Reset() //records the duration of the first operation and resets the stopwatch.
// second expensive operation
stopwatch.Record("second operation").Reset() //records the duration of the second operation and resets the stopwatch.
omitting the Reset() after the first operation would cause the duration recorded for the second operation to include the time elapsed by both the first _and_ second operations.
The Stopwatch must be running (i.e. not paused) when Record is called.
*/
func (s *Stopwatch) Record(name string, args ...interface{}) *Stopwatch {
if !s.running {
panic("stopwatch is not running - call Resume or Reset before calling Record")
}
duration := time.Since(s.t) - s.pauseDuration
s.Experiment.RecordDuration(name, duration, args...)
return s
}
/*
Reset resets the Stopwatch. Subsequent recorded durations will measure the time elapsed from the moment Reset was called.
If the Stopwatch was Paused it is unpaused after calling Reset.
*/
func (s *Stopwatch) Reset() *Stopwatch {
s.running = true
s.t = time.Now()
s.pauseDuration = 0
return s
}
/*
Pause pauses the Stopwatch. While pasued the Stopwatch does not accumulate elapsed time. This is useful for ignoring expensive operations that are incidental to the behavior you are attempting to characterize.
Note: You must call Resume() before you can Record() subsequent measurements.
For example:
stopwatch := experiment.NewStopwatch()
// first expensive operation
stopwatch.Record("first operation").Reset()
// second expensive operation - part 1
stopwatch.Pause()
// something expensive that we don't care about
stopwatch.Resume()
// second expensive operation - part 2
stopwatch.Record("second operation").Reset() // the recorded duration captures the time elapsed during parts 1 and 2 of the second expensive operation, but not the bit in between
The Stopwatch must be running when Pause is called.
*/
func (s *Stopwatch) Pause() *Stopwatch {
if !s.running {
panic("stopwatch is not running - call Resume or Reset before calling Pause")
}
s.running = false
s.pauseT = time.Now()
return s
}
/*
Resume resumes a paused Stopwatch. Any time that elapses after Resume is called will be accumulated as elapsed time when a subsequent duration is Recorded.
The Stopwatch must be Paused when Resume is called
*/
func (s *Stopwatch) Resume() *Stopwatch {
if s.running {
panic("stopwatch is running - call Pause before calling Resume")
}
s.running = true
s.pauseDuration = s.pauseDuration + time.Since(s.pauseT)
return s
} | gmeasure/stopwatch.go | 0.709925 | 0.435181 | stopwatch.go | starcoder |
package parser
import (
"bufio"
"io"
"log"
"github.com/jessejohnston/ProductIngester/product"
"github.com/pkg/errors"
"github.com/shopspring/decimal"
)
const (
// RecordLength is the expected length of each flat-file record
RecordLength = 142
// TaxRate is the product tax rate.
TaxRate = 0.07775
// NumberFieldLength is the expected length of all number fields.
NumberFieldLength = 8
// CurrencyFieldLength is the expected length of all currency fields.
CurrencyFieldLength = 8
// FlagsFieldLength is the expected length of all flag fields.
FlagsFieldLength = 9
)
var (
// ErrBadParameter is the error returned when invalid input is provided.
ErrBadParameter = errors.New("Invalid parameter")
)
// Converter is the behavior of a type that converts fixed-length text values to other types.
//go:generate mockery -name Converter
type Converter interface {
ToNumber(text []byte) (int, error)
ToString(text []byte) string
ToCurrency(text []byte) (decimal.Decimal, error)
ToFlags(text []byte) (product.Flags, error)
}
// Parser reads from an input source, producing parsed records in it's Output channel.
type Parser struct {
src io.Reader
convert Converter
records chan *product.Record
errors chan error
done chan bool
}
// New creates a new product parser.
func New(input io.Reader, c Converter) (*Parser, error) {
if input == nil || c == nil {
return nil, ErrBadParameter
}
return &Parser{
src: input,
convert: c,
records: make(chan *product.Record),
errors: make(chan error),
done: make(chan bool),
}, nil
}
// Parse reads each line from the input and sends parsed records to the output channel.
func (p *Parser) Parse() (<-chan *product.Record, <-chan error, <-chan bool) {
// "go" runs p.execute() asynchronously so that the caller can start reading
// records and errors off the returned channels.
go p.execute()
return p.records, p.errors, p.done
}
func (p *Parser) execute() {
defer func() {
close(p.done)
close(p.records)
close(p.errors)
}()
scanner := bufio.NewScanner(p.src)
for row := 0; scanner.Scan(); row++ {
data := scanner.Bytes()
record, err := p.ParseRecord(row, data)
if err != nil {
log.Println(errors.WithStack(err))
p.errors <- err
} else {
p.records <- record
}
}
p.done <- true
}
func (p *Parser) ParseRecord(row int, text []byte) (*product.Record, error) {
if len(text) != RecordLength {
return nil, errors.WithStack(ErrBadParameter)
}
record := &product.Record{}
var err error
fragment := text[0:8]
record.ID, err = p.convert.ToNumber(fragment)
if err != nil {
return nil, NewParserError(row, 0, fragment, "Error parsing ID", err)
}
fragment = text[9:68]
record.Description = p.convert.ToString(fragment)
fragment = text[69:77]
singularPrice, err := p.convert.ToCurrency(fragment)
if err != nil {
return nil, NewParserError(row, 69, fragment, "Error parsing singular price", err)
}
// If singular price is zero, read the split price and use it instead.
if singularPrice.Equal(decimal.Zero) {
fragment = text[87:95]
splitPrice, err := p.convert.ToCurrency(fragment)
if err != nil {
return nil, NewParserError(row, 87, fragment, "Error parsing split price", err)
}
fragment = text[105:113]
forX, err := p.convert.ToNumber(fragment)
if err != nil {
return nil, NewParserError(row, 105, fragment, "Error parsing for X", err)
}
if forX == 0 {
return nil, NewParserError(row, 105, fragment, "Error calculating split price (zero for X)", err)
}
// Round to 4 decimal places, half down
record.Price = splitPrice.Div(decimal.New(int64(forX), 0)).RoundBank(4)
} else {
record.Price = singularPrice
}
fragment = text[78:86]
singularPromoPrice, err := p.convert.ToCurrency(fragment)
if err != nil {
return nil, NewParserError(row, 78, fragment, "Error parsing singular promotional price", err)
}
// If singular promo price is zero, read the split promo price and use it instead.
if singularPromoPrice.Equal(decimal.Zero) {
fragment = text[96:104]
splitPromoPrice, err := p.convert.ToCurrency(fragment)
if err != nil {
return nil, NewParserError(row, 96, text[96:104], "Error parsing split promo price", err)
}
if splitPromoPrice.GreaterThan(decimal.Zero) {
fragment = text[114:122]
promoForX, err := p.convert.ToNumber(fragment)
if err != nil {
return nil, NewParserError(row, 114, fragment, "Error parsing promo for X", err)
}
if promoForX == 0 {
return nil, NewParserError(row, 114, fragment, "Error calculating promo split price (zero for X)", err)
}
// Round to 4 decimal places, half down
record.PromoPrice = splitPromoPrice.Div(decimal.New(int64(promoForX), 0)).RoundBank(4)
}
} else {
record.PromoPrice = singularPromoPrice
}
record.DisplayPrice = "$" + record.Price.StringFixed(2)
record.PromoDisplayPrice = "$" + record.PromoPrice.StringFixed(2)
fragment = text[123:132]
flags, err := p.convert.ToFlags(fragment)
if err != nil {
return nil, NewParserError(row, 123, fragment, "Error parsing flags", err)
}
if flags.PerWeight() {
record.Unit = product.UnitPound
} else {
record.Unit = product.UnitEach
}
if flags.Taxable() {
record.TaxRate = decimal.NewFromFloat32(TaxRate)
} else {
record.TaxRate = decimal.Zero
}
record.Size = p.convert.ToString(text[133:142])
return record, nil
} | parser/parser.go | 0.629205 | 0.501221 | parser.go | starcoder |
package artmatlang
import (
// "errors"
"fmt"
"math"
)
type syntaxTreeNode struct {
oper byte
val float64
nodes []syntaxTreeNode
}
// validate verifies that the node is makes sense.
// It requires that the node have an acceptable number of sub nodes for it's operation
// eg can not have three nodes and try to use exp
func (astnode *syntaxTreeNode) validate() error {
switch astnode.oper {
case byte('+'):
return validNodeCount(astnode.nodes, 2, false)
case byte('-'):
return validNodeCount(astnode.nodes, 2, false)
case byte('*'):
return validNodeCount(astnode.nodes, 2, false)
case byte('/'):
return validNodeCount(astnode.nodes, 2, true)
case byte('^'):
return validNodeCount(astnode.nodes, 2, true)
default:
return nil
}
}
func validNodeCount(nodes []syntaxTreeNode, count int, exact bool) error {
if exact {
if len(nodes) != count {
return fmt.Errorf("Invalid number of nodes: found %v, expected %v", len(nodes), count)
}
} else {
if len(nodes) < count {
return fmt.Errorf("Invalid number of nodes: found %v, expected at least %v", len(nodes), count)
}
}
return nil
}
func (astnode *syntaxTreeNode) evaluate() {
switch astnode.oper {
case byte('+'):
astnode.add()
case byte('-'):
astnode.subtract()
case byte('*'):
astnode.multiply()
case byte('/'):
astnode.divide()
case byte('^'):
astnode.exp()
}
}
func (astnode *syntaxTreeNode) traverse() {
for node := range astnode.nodes {
if astnode.nodes[node].oper != byte('V') {
astnode.nodes[node].traverse()
}
}
astnode.evaluate()
}
func (astnode *syntaxTreeNode) add() {
sum := 0.0
for _, v := range astnode.nodes {
sum += v.val
}
astnode.val = sum
}
func (astnode *syntaxTreeNode) subtract() {
diff := astnode.nodes[0].val
for i, v := range astnode.nodes {
if i != 0 {
diff -= v.val
}
}
astnode.val = diff
}
func (astnode *syntaxTreeNode) multiply() {
total := astnode.nodes[0].val
for i, v := range astnode.nodes {
if i != 0 {
total *= v.val
}
}
astnode.val = total
}
func (astnode *syntaxTreeNode) divide() {
result := astnode.nodes[0].val / astnode.nodes[1].val
astnode.val = result
}
func (astnode *syntaxTreeNode) exp() {
result := math.Pow(astnode.nodes[0].val, astnode.nodes[1].val)
astnode.val = result
} | src/syntaxTree.go | 0.527317 | 0.542439 | syntaxTree.go | starcoder |
package containers
import "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/types"
func MakeVector(typ types.Type, nullable bool, opts ...*Options) (vec Vector) {
switch typ.Oid {
case types.Type_BOOL:
vec = NewVector[bool](typ, nullable, opts...)
case types.Type_INT8:
vec = NewVector[int8](typ, nullable, opts...)
case types.Type_INT16:
vec = NewVector[int16](typ, nullable, opts...)
case types.Type_INT32:
vec = NewVector[int32](typ, nullable, opts...)
case types.Type_INT64:
vec = NewVector[int64](typ, nullable, opts...)
case types.Type_UINT8:
vec = NewVector[uint8](typ, nullable, opts...)
case types.Type_UINT16:
vec = NewVector[uint16](typ, nullable, opts...)
case types.Type_UINT32:
vec = NewVector[uint32](typ, nullable, opts...)
case types.Type_UINT64:
vec = NewVector[uint64](typ, nullable, opts...)
case types.Type_DECIMAL64:
vec = NewVector[types.Decimal64](typ, nullable, opts...)
case types.Type_DECIMAL128:
vec = NewVector[types.Decimal128](typ, nullable, opts...)
case types.Type_FLOAT32:
vec = NewVector[float32](typ, nullable, opts...)
case types.Type_FLOAT64:
vec = NewVector[float64](typ, nullable, opts...)
case types.Type_DATE:
vec = NewVector[types.Date](typ, nullable, opts...)
case types.Type_TIMESTAMP:
vec = NewVector[types.Timestamp](typ, nullable, opts...)
case types.Type_DATETIME:
vec = NewVector[types.Datetime](typ, nullable, opts...)
case types.Type_CHAR, types.Type_VARCHAR, types.Type_JSON:
vec = NewVector[[]byte](typ, nullable, opts...)
default:
panic("not support")
}
return
}
func BuildBatch(
attrs []string,
colTypes []types.Type,
nullables []bool,
capacity int) *Batch {
opts := new(Options)
opts.Capacity = capacity
bat := NewBatch()
for i, attr := range attrs {
vec := MakeVector(colTypes[i], nullables[i], opts)
bat.AddVector(attr, vec)
}
return bat
}
func NewEmptyBatch() *Batch {
return &Batch{
Attrs: make([]string, 0),
Vecs: make([]Vector, 0),
nameidx: make(map[string]int),
}
} | pkg/vm/engine/tae/containers/factory.go | 0.500732 | 0.564158 | factory.go | starcoder |
package hfile
import (
"bytes"
)
/*
Implementation of hadoop's variable-length, signed int:
http://grepcode.com/file/repo1.maven.org/maven2/org.apache.hadoop/hadoop-common/2.5.0/org/apache/hadoop/io/WritableUtils.java#WritableUtils.readVInt%28java.io.DataInput%29
As far as I understand, the above-mentioned java implementation works as follows:
A first byte in range 0 to 0x7f, or 0x90 to 0xff (ie without a * below):
- the vint value is just the first byte, and the length is just 1.
- NB: values between 0x90 and 0xff are negative thanks to java's signed byte type.
A first byte with a value between 0x80 and 0x8F (neg* and pos* below):
- first byte indicates the length, and sign, of the vint.
- there are two ranges (0x80-0x87 and 0x88-0x8f), each with 8 possible values.
- logic same for both, except one indicates final value is negative.
- since java was treating these as signed, appears to count "down" in go.
To represent possible first-byte values visually (in java's signed order first):
java | -128 | -120 | -112 -1 | 0 127 |
hex | 0x80 | 0x88 | 0x90 0xff| 0x00 0x7F|
Putting that in an unsigned (sane) order:
hex | 0x00 0x7F| 0x80 | 0x88 | 0x90 0xff|
java | 0 127 | -128 | -120 | -112 -1 |
meaning: | pos | neg* | pos* | neg |
*/
func vintAndLen(b []byte) (int, int) {
first := b[0]
count := 1
neg := false
if first < 0x80 {
return int(first), count
}
if first >= 0x90 {
return int(first) - 0x100, count
}
if first < 0x88 {
neg = true
count = int(0x88-first) + 1
} else {
count = int(0x90-first) + 1
}
ret := 0
for i := 1; i < count; i++ {
ret = (ret << 8) | int(b[i])
}
if neg {
ret = (ret ^ -1)
}
return ret, count
}
func vint(r *bytes.Reader) (int, error) {
if first, err := r.ReadByte(); err != nil {
return -1, err
} else {
if first < 0x80 {
return int(first), nil
}
if first >= 0x90 {
return int(first) - 0x100, nil
}
count := 0
neg := false
if first < 0x88 {
neg = true
count = int(0x88 - first)
} else {
count = int(0x90 - first)
}
ret := 0
for i := 0; i < count; i++ {
if b, err := r.ReadByte(); err != nil {
return -1, err
} else {
ret = (ret << 8) | int(b)
}
}
if neg {
ret = (ret ^ -1)
}
return ret, nil
}
} | hfile/vint.go | 0.55929 | 0.486941 | vint.go | starcoder |
package service
// Stability is a type that represents the relative stability of a service
// module
type Stability int
const (
// StabilityExperimental represents relative stability of the most immature
// service modules. At this level of stability, we're not even certain we've
// built the right thing!
StabilityExperimental Stability = iota
// StabilityPreview represents relative stability of modules we believe are
// approaching a stable state.
StabilityPreview
// StabilityStable represents relative stability of the mature, production-
// ready service modules.
StabilityStable
)
// ProvisioningParameters is an interface to be implemented by module-specific
// types that represent provisioning parameters. This interface doesn't require
// any functions to be implemented. It exists to improve the clarity of function
// signatures and documentation.
type ProvisioningParameters interface{}
// InstanceDetails is an interface to be implemented by service-specific
// types that represent the non-sensitive details of a service instance.
type InstanceDetails interface{}
// SecureInstanceDetails is an interface to be implemented by service-specific
// types that represent the secure (sensitive) details of a service instance.
type SecureInstanceDetails interface{}
// UpdatingParameters is an interface to be implemented by module-specific
// types that represent updating parameters. This interface doesn't require
// any functions to be implemented. It exists to improve the clarity of function
// signatures and documentation.
type UpdatingParameters interface{}
// BindingParameters is an interface to be implemented by module-specific types
// that represent binding parameters. This interface doesn't require any
// functions to be implemented. It exists to improve the clarity of function
// signatures and documentation.
type BindingParameters interface{}
// BindingDetails is an interface to be implemented by service-specific types
// that represent binding details. This interface doesn't require any functions
// to be implemented. It exists to improve the clarity of function signatures
// and documentation.
type BindingDetails interface{}
// Credentials is an interface to be implemented by module-specific types
// that represent service credentials. This interface doesn't require any
// functions to be implemented. It exists to improve the clarity of function
// signatures and documentation.
type Credentials interface{} | pkg/service/types.go | 0.736306 | 0.549278 | types.go | starcoder |
package entity
import (
"github.com/go-gl/mathgl/mgl32"
"github.com/veandco/go-sdl2/sdl"
)
// Controllable is implemented by all entities that can be controlled with a
// controller (e.g. the input controller, or one of the AI controllers).
type Controllable interface {
// Move moves the entity an amount forwards, right, and up. `delta` is
// normalized, and should be multiplied by the entity's move speed prior to
// applying the movement.
Move(delta mgl32.Vec3)
// Look modifies the look direction of an entity by an amount. `delta` is
// normalized, and should be multiplied by the entity's look speed prior to
// applying the rotation.
Look(delta mgl32.Vec2)
}
// Controller is implemented by all entity controllers (e.g. the input
// controller, or the mob AI controllers).
type Controller interface {
// HandleEvent is called whenever a user event is triggered (used by the
// input controller).
HandleEvent(evt sdl.Event)
// Update is called every frame to modify an entity's position and look
// direction.
Update(entity Controllable)
}
// InputController controls an entity's movement and look direction based on
// user input from the keyboard and mouse.
type InputController struct {
IsKeyDown [256]bool // Whether a key is pressed
mouseX, mouseY int32 // Accumulates mouse movement over a frame
}
// NewInputController creates a new input controller instance.
func NewInputController() *InputController {
return &InputController{}
}
// HandleEvent implements the `Controller` interface.
func (c *InputController) HandleEvent(evt sdl.Event) {
switch e := evt.(type) {
case *sdl.KeyboardEvent:
// Prevent an index out of bounds error
if int(e.Keysym.Scancode) < len(c.IsKeyDown) {
c.IsKeyDown[e.Keysym.Scancode] = (e.State == sdl.PRESSED)
}
case *sdl.MouseMotionEvent:
c.mouseX += e.XRel
c.mouseY += e.YRel
}
}
// Update implements the `Controller` interface.
func (c *InputController) Update(entity Controllable) {
// Update the entity's look direction based on mouse input. We do this
// first so that the entity's local coordinate system is updated before
// applying movement
horizontalDelta := float32(c.mouseX)
verticalDelta := float32(c.mouseY)
entity.Look(mgl32.Vec2{horizontalDelta, verticalDelta})
c.mouseX, c.mouseY = 0.0, 0.0
// Update position based on keyboard input
x, y, z := float32(0.0), float32(0.0), float32(0.0)
if c.IsKeyDown[sdl.SCANCODE_W] {
z += 1.0
}
if c.IsKeyDown[sdl.SCANCODE_S] {
z -= 1.0
}
if c.IsKeyDown[sdl.SCANCODE_A] {
x -= 1.0
}
if c.IsKeyDown[sdl.SCANCODE_D] {
x += 1.0
}
if c.IsKeyDown[sdl.SCANCODE_SPACE] {
y += 1.0
}
if c.IsKeyDown[sdl.SCANCODE_LSHIFT] || c.IsKeyDown[sdl.SCANCODE_RSHIFT] {
y -= 1.0
}
entity.Move(mgl32.Vec3{x, y, z})
} | entity/input.go | 0.753104 | 0.459076 | input.go | starcoder |
package misc
import (
"context"
"time"
"al.go/terminal"
"al.go/terminal/objects/rectangle"
"al.go/visualizer"
)
//MovingRectangle is a Animation of a Rectangle Moving Around the screen
type MovingRectangle struct {
rect *rectangle.Rectangle
state visualizer.AnimationState
}
func newRectangle(x int, y int, w, h int) *rectangle.Rectangle {
mr := &rectangle.Rectangle{TopLeftCorner: terminal.Point{X: x, Y: y}, Width: w, Height: h}
return mr
}
//NewMovingRectangleAnimation creates the moving rectangle animation
func NewMovingRectangleAnimation() *MovingRectangle {
return &MovingRectangle{
state: visualizer.AnimationState{IsRunning: false},
}
}
//Run the animation
func (mv *MovingRectangle) Run(ctx context.Context, scr terminal.Screen, ticker *time.Ticker, signal <-chan visualizer.Signal) {
colors := []terminal.Color{terminal.ColorBlue, terminal.ColorGreen, terminal.ColorYellow, terminal.ColorAqua, terminal.ColorGray}
currentColorIdx := 0
w, _ := scr.Size()
if w/2%2 == 1 {
w += 2
}
velX, velY := 1, 1
mv.state.IsRunning = true
mv.rect = newRectangle((w-1)/2, 3, 4, 2)
for {
select {
case <-ticker.C:
if mv.state.IsRunning {
scr.Clear()
velX, velY, currentColorIdx = mv.handleCollision(scr, velX, velY, currentColorIdx, len(colors))
mv.rect.Move(velX, velY)
mv.rect.Draw(scr, colors[currentColorIdx])
scr.Show()
}
case s := <-signal:
mv.handleSignal(s)
case <-ctx.Done():
return
}
}
}
func (mv *MovingRectangle) handleCollision(scr terminal.Screen, velX, velY, currentColorIdx, lenColors int) (int, int, int) {
any, top, bottom, left, right := mv.rect.HasHitScreenBorder(scr)
if any {
currentColorIdx++
if currentColorIdx >= lenColors {
currentColorIdx = currentColorIdx % lenColors
}
}
if top || bottom {
velY *= -1
}
if left || right {
velX *= -1
}
return velX, velY, currentColorIdx
}
//CurrentState ...
func (mv *MovingRectangle) CurrentState() visualizer.AnimationState {
return mv.state
}
func (mv *MovingRectangle) handleSignal(s visualizer.Signal) {
switch s {
case visualizer.Pause:
mv.state.IsRunning = false
case visualizer.Start:
mv.state.IsRunning = true
}
} | visualizer/animations/misc/moving-rectangle.go | 0.715325 | 0.425665 | moving-rectangle.go | starcoder |
package heaputil
/* For a given index into a slice or array acting as a binary heap,
calculate the index of the parent node. We are guaranteed of not
being presented an index less than 3. */
func getParentIndex(cIdx int) int {
var pIdx int
if cIdx%2 == 1 {
pIdx = (cIdx - 1) / 2
} else {
pIdx = (cIdx - 2) / 2
}
return pIdx
}
func siftDown(h []int, parent int) {
sliceLen := len(h)
leftChild := (2 * parent) + 1
for sliceLen > leftChild {
rightChild := leftChild + 1
swap := parent
if h[swap] < h[leftChild] {
swap = leftChild
}
if (rightChild < sliceLen) && (h[swap] < h[rightChild]) {
swap = rightChild
}
if swap == parent {
break
} else {
h[parent], h[swap] = h[swap], h[parent]
parent = swap
leftChild = (2 * parent) + 1
}
}
}
//MaxIntHeapify - take a slice of integers and turn it into a max heap
func MaxIntHeapify(h []int) {
sliceLen := len(h)
if sliceLen < 2 {
// Either empty or a single element, this is a valid "heap"
return
}
if sliceLen == 2 {
if h[0] < h[1] {
h[0], h[1] = h[1], h[0]
}
return
}
lastIndex := sliceLen - 1
for idx := getParentIndex(lastIndex); -1 < idx; idx-- {
siftDown(h, idx)
}
}
//MaxIntHeapPush - add a value to a MaxIntHeap. The returned slice will be a
//heap regardless of whether the initial parameter slice was one at the start.
func MaxIntHeapPush(s []int, v int) []int {
h := append(s, v)
MaxIntHeapify(h)
return h
}
//MaxIntHeapPop - take the max value from the heap and re-heapify. If the
//initial slice isn't a heap to start, you will get garbage out.
func MaxIntHeapPop(s []int) (int, []int) {
cpy := make([]int, len(s), cap(s))
copy(cpy, s)
maxVal := cpy[0]
lastIndex := len(cpy) - 1
cpy[0] = cpy[lastIndex]
cpy = cpy[:lastIndex]
MaxIntHeapify(cpy)
return maxVal, cpy
}
//MaxIntHeapSort - take a slice of integers and sort it in place, max value last
func MaxIntHeapSort(s []int) {
MaxIntHeapify(s) // just in case it's not a heap when we get it.
for idx := len(s) - 1; idx > 0; idx-- {
s[0], s[idx] = s[idx], s[0]
MaxIntHeapify(s[:idx])
}
} | maxint.go | 0.75985 | 0.555797 | maxint.go | starcoder |
package fiscal
import (
"time"
)
const Day = 24 * time.Hour
// YearForDate returns the fiscal year (as used by Apple) for a given date.
func YearForDate(date time.Time) int {
_, end := Year(date.Year())
if end.Before(date) {
return date.Year() + 1
} else {
return date.Year()
}
}
// QuarterForDate returns the fiscal year and quarter (as used by Apple) for a
// given date.
func QuarterForDate(date time.Time) (year, quarter int) {
year = date.Year()
start, end := Year(year)
if end.Before(date) {
year++
start = end.Add(time.Nanosecond)
}
daysSinceStart := int((date.Sub(start).Round(time.Hour).Hours()/24))
for quarter = 1 + daysSinceStart/98; true; quarter++ {
start, end = Quarter(year, quarter)
if !date.Before(start) && !date.After(end) {
return year, quarter
}
}
return
}
// PeriodForDate returns the fiscal year and period (as used by Apple) for a
// given date.
func PeriodForDate(date time.Time) (year, period int) {
year = date.Year()
start, end := Year(year)
if end.Before(date) {
year++
start = end.Add(time.Nanosecond)
}
daysSinceStart := int((date.Sub(start).Round(time.Hour).Hours()/24))
for period = 1 + daysSinceStart/35; true; period++ {
start, end = Period(year, period)
if !date.Before(start) && !date.After(end) {
return year, period
}
}
return
}
// Year returns the start and end date of a fiscal year as used by Apple. The
// year must be 2006 or higher, returns start and end date of the year. End is
// the last nanosecond before the start of the next year.
func Year(year int) (start, end time.Time) {
first := time.Date(2005, time.September, 25, 0, 0, 0, 0, time.UTC)
for start = first; true; start = end {
end = start.Add(364 * Day)
if end.Day() < 25 {
end = end.Add(7 * Day)
}
if end.Year() >= year {
return start, end.Add(-time.Nanosecond)
}
}
return
}
// Quarter returns the start and end date of a fiscal quarter as used by
// Apple. The year must be 2006 or higher, The quarter must be in the range
// 1..4. Quarter returns start and end date of the quarter. End is the last
// nanosecond before the start of the next quarter.
func Quarter(year, quarter int) (start, end time.Time) {
start, end = Year(year)
if quarter < 1 {
quarter = 1
}
if quarter > 4 {
quarter = 4
}
quarter--
qstart := time.Duration(quarter * 91)
qend := time.Duration((quarter + 1) * 91)
if end.Sub(start).Hours()/24 > 364 {
// 3rd period is 35 days so first quarter is 98 days
qstart += time.Duration(((quarter + 3) / 4) * 7)
qend += time.Duration(((quarter + 4) / 4) * 7)
}
return start.Add(qstart * Day), start.Add(qend * Day).Add(-time.Nanosecond)
}
// Period returns the start and end date of a fiscal period as used by Apple.
// A period is either 35 or 28 days and so is roughly equivalent to a single
// month. The year must be 2006 or higher. The period must be in the range
// 1..12. Period returns start and end date of the period. End is the last
// nanosecond before the start of the next period.
func Period(year, period int) (start, end time.Time) {
start, end = Year(year)
if period < 1 {
period = 1
}
if period > 12 {
period = 12
}
period--
pstart := time.Duration(period*28 + ((period+2)/3)*7)
pend := time.Duration((period+1)*28 + ((period+3)/3)*7)
if end.Sub(start).Hours()/24 > 364 {
// 3rd period is 35 days
pstart += time.Duration(((period + 9) / 12) * 7)
pend += time.Duration(((period + 10) / 12) * 7)
}
return start.Add(pstart * Day), start.Add(pend * Day).Add(-time.Nanosecond)
} | fiscal/fiscal.go | 0.82734 | 0.517205 | fiscal.go | starcoder |
package cmd
import (
"fmt"
"strconv"
"strings"
"github.com/jaredbancroft/aoc2020/pkg/helpers"
"github.com/jaredbancroft/aoc2020/pkg/shuttle"
"github.com/spf13/cobra"
)
// day13Cmd represents the day13 command
var day13Cmd = &cobra.Command{
Use: "day13",
Short: "Advent of Code 2020 - Day13: Shuttle Search",
Long: `--- Day 13: Shuttle Search ---
Your ferry can make it safely to a nearby port, but it won't get much further. When you call to
book another ship, you discover that no ships embark from that port to your vacation island.
You'll need to get from the port to the nearest airport.
Fortunately, a shuttle bus service is available to bring you from the sea port to the airport!
Each bus has an ID number that also indicates how often the bus leaves for the airport.
Bus schedules are defined based on a timestamp that measures the number of minutes since some
fixed reference point in the past. At timestamp 0, every bus simultaneously departed from the
sea port. After that, each bus travels to the airport, then various other locations, and finally
returns to the sea port to repeat its journey forever.
The time this loop takes a particular bus is also its ID number: the bus with ID 5 departs from
the sea port at timestamps 0, 5, 10, 15, and so on. The bus with ID 11 departs at 0, 11, 22, 33,
and so on. If you are there when the bus departs, you can ride that bus to the airport!
Your notes (your puzzle input) consist of two lines. The first line is your estimate of the earliest
timestamp you could depart on a bus. The second line lists the bus IDs that are in service according
to the shuttle company; entries that show x must be out of service, so you decide to ignore them.
To save time once you arrive, your goal is to figure out the earliest bus you can take to the airport.
(There will be exactly one such bus.)
For example, suppose you have the following notes:
939
7,13,x,x,59,x,31,19
Here, the earliest timestamp you could depart is 939, and the bus IDs in service are 7, 13, 59, 31,
and 19. Near timestamp 939, these bus IDs depart at the times marked D:
time bus 7 bus 13 bus 59 bus 31 bus 19
929 . . . . .
930 . . . D .
931 D . . . D
932 . . . . .
933 . . . . .
934 . . . . .
935 . . . . .
936 . D . . .
937 . . . . .
938 D . . . .
939 . . . . .
940 . . . . .
941 . . . . .
942 . . . . .
943 . . . . .
944 . . D . .
945 D . . . .
946 . . . . .
947 . . . . .
948 . . . . .
949 . D . . .
The earliest bus you could take is bus ID 59. It doesn't depart until timestamp 944, so you would
need to wait 944 - 939 = 5 minutes before it departs. Multiplying the bus ID by the number of
minutes you'd need to wait gives 295.
What is the ID of the earliest bus you can take to the airport multiplied by the number of
minutes you'll need to wait for that bus?
`,
RunE: func(cmd *cobra.Command, args []string) error {
schedules, err := helpers.ReadStringFile(input)
if err != nil {
return err
}
fmt.Println(schedules)
targetTime, _ := strconv.Atoi(schedules[0])
buses := strings.Split(schedules[1], ",")
timer := shuttle.NewTimer()
busList := []*shuttle.Bus{}
for _, bus := range buses {
if bus != "x" {
busID, _ := strconv.Atoi(bus)
newBus := shuttle.NewBus(busID, timer)
busList = append(busList, newBus)
}
}
i := 0
for {
exit := 0
timer.SetTime(i)
if i >= targetTime {
for _, bus := range busList {
if bus.IsDeparting() {
bus.Display()
fmt.Println("Part 1: ", bus.GetID()*(bus.GetTime()-targetTime))
exit++
}
}
}
if exit > 0 {
break
}
i++
exit = 0
}
//Part2
newBusList := []int{}
for _, bus := range buses {
if bus == "x" {
bus = "1"
}
intBus, _ := strconv.Atoi(bus)
newBusList = append(newBusList, intBus)
}
time := 1
for {
timeOffset := 1
exit := true
for i, bus := range newBusList {
if (time+i)%bus != 0 {
exit = false
break
}
timeOffset = timeOffset * bus
}
if exit {
fmt.Println(time)
break
}
time = time + timeOffset
}
return nil
},
}
func init() {
rootCmd.AddCommand(day13Cmd)
} | cmd/day13.go | 0.557364 | 0.411939 | day13.go | starcoder |
package generate
import (
"image"
"image/color"
"math"
)
// Direction constant - direction of the gradient from first color to the second color.
type Direction int
const (
// H - horizontal direction
H Direction = iota
// V - vertical direction
V
)
func normalize(value float64, min float64, max float64) float64 {
lower := -6.0
upper := 6.0
norm := (value - min) / (max - min)
return norm*(upper-lower) + lower
}
// LinearGradient generates a gradient image using a linear function.
func LinearGradient(size image.Point, startColor color.RGBA, endColor color.RGBA, direction Direction) *image.RGBA {
gradFunc := func(colorChannel uint8, percent float64) uint8 {
return uint8(math.Floor(float64(colorChannel) * percent))
}
res := image.NewRGBA(image.Rect(0, 0, size.X, size.Y))
switch direction {
case V:
step := 1.0 / float64(size.Y)
percent := 0.0
for y := 0; y < size.Y; y++ {
c := color.RGBA{
R: gradFunc(startColor.R, 1.0-percent) + gradFunc(endColor.R, percent),
G: gradFunc(startColor.G, 1.0-percent) + gradFunc(endColor.G, percent),
B: gradFunc(startColor.B, 1.0-percent) + gradFunc(endColor.B, percent),
A: 255,
}
percent += step
for x := 0; x < size.X; x++ {
res.SetRGBA(x, y, c)
}
}
case H:
step := 1.0 / float64(size.X)
percent := 0.0
for x := 0; x < size.X; x++ {
c := color.RGBA{
R: gradFunc(startColor.R, -percent) + gradFunc(endColor.R, percent),
G: gradFunc(startColor.G, -percent) + gradFunc(endColor.G, percent),
B: gradFunc(startColor.B, -percent) + gradFunc(endColor.B, percent),
A: 255,
}
percent += step
for y := 0; y < size.Y; y++ {
res.SetRGBA(x, y, c)
}
}
}
return res
}
// SigmoidalGradient generates a gradient image using the sigmoid ( f(x) = 1 / (1 + exp(-x)) ) function.
func SigmoidalGradient(size image.Point, startColor color.RGBA, endColor color.RGBA, direction Direction) *image.RGBA {
sigmoid := func(val float64) float64 {
return 1.0 / (1.0 + math.Exp(-val))
}
res := image.NewRGBA(image.Rect(0, 0, size.X, size.Y))
switch direction {
case V:
for y := 0; y < size.Y; y++ {
percent := sigmoid(normalize(float64(y), 0, float64(size.Y)))
c := color.RGBA{
R: uint8((1.0-percent)*float64(startColor.R)) + uint8(percent*float64(endColor.R)),
G: uint8((1.0-percent)*float64(startColor.G)) + uint8(percent*float64(endColor.G)),
B: uint8((1.0-percent)*float64(startColor.B)) + uint8(percent*float64(endColor.B)),
A: 255,
}
for x := 0; x < size.X; x++ {
res.SetRGBA(x, y, c)
}
}
case H:
for x := 0; x < size.X; x++ {
percent := sigmoid(normalize(float64(x), 0, float64(size.X)))
c := color.RGBA{
R: uint8((1.0-percent)*float64(startColor.R)) + uint8(percent*float64(endColor.R)),
G: uint8((1.0-percent)*float64(startColor.G)) + uint8(percent*float64(endColor.G)),
B: uint8((1.0-percent)*float64(startColor.B)) + uint8(percent*float64(endColor.B)),
A: 255,
}
for y := 0; y < size.Y; y++ {
res.SetRGBA(x, y, c)
}
}
}
return res
} | generate/generate.go | 0.758421 | 0.583915 | generate.go | starcoder |
package encoding
func (e *encoder) setByte1Int64(value int64, offset int) int {
e.d[offset] = byte(value)
return offset + 1
}
func (e *encoder) setByte2Int64(value int64, offset int) int {
e.d[offset+0] = byte(value >> 8)
e.d[offset+1] = byte(value)
return offset + 2
}
func (e *encoder) setByte4Int64(value int64, offset int) int {
e.d[offset+0] = byte(value >> 24)
e.d[offset+1] = byte(value >> 16)
e.d[offset+2] = byte(value >> 8)
e.d[offset+3] = byte(value)
return offset + 4
}
func (e *encoder) setByte8Int64(value int64, offset int) int {
e.d[offset] = byte(value >> 56)
e.d[offset+1] = byte(value >> 48)
e.d[offset+2] = byte(value >> 40)
e.d[offset+3] = byte(value >> 32)
e.d[offset+4] = byte(value >> 24)
e.d[offset+5] = byte(value >> 16)
e.d[offset+6] = byte(value >> 8)
e.d[offset+7] = byte(value)
return offset + 8
}
func (e *encoder) setByte1Uint64(value uint64, offset int) int {
e.d[offset] = byte(value)
return offset + 1
}
func (e *encoder) setByte2Uint64(value uint64, offset int) int {
e.d[offset] = byte(value >> 8)
e.d[offset+1] = byte(value)
return offset + 2
}
func (e *encoder) setByte4Uint64(value uint64, offset int) int {
e.d[offset] = byte(value >> 24)
e.d[offset+1] = byte(value >> 16)
e.d[offset+2] = byte(value >> 8)
e.d[offset+3] = byte(value)
return offset + 4
}
func (e *encoder) setByte8Uint64(value uint64, offset int) int {
e.d[offset] = byte(value >> 56)
e.d[offset+1] = byte(value >> 48)
e.d[offset+2] = byte(value >> 40)
e.d[offset+3] = byte(value >> 32)
e.d[offset+4] = byte(value >> 24)
e.d[offset+5] = byte(value >> 16)
e.d[offset+6] = byte(value >> 8)
e.d[offset+7] = byte(value)
return offset + 8
}
func (e *encoder) setByte1Int(code, offset int) int {
e.d[offset] = byte(code)
return offset + 1
}
func (e *encoder) setByte2Int(value int, offset int) int {
e.d[offset] = byte(value >> 8)
e.d[offset+1] = byte(value)
return offset + 2
}
func (e *encoder) setByte4Int(value int, offset int) int {
e.d[offset] = byte(value >> 24)
e.d[offset+1] = byte(value >> 16)
e.d[offset+2] = byte(value >> 8)
e.d[offset+3] = byte(value)
return offset + 4
}
func (e *encoder) setBytes(bs []byte, offset int) int {
for i := range bs {
e.d[offset+i] = bs[i]
}
return offset + len(bs)
} | internal/encoding/set.go | 0.727879 | 0.500244 | set.go | starcoder |
package dleq
import (
"errors"
"github.com/dedis/kyber"
)
// Suite wraps the functionalities needed by the dleq package.
type Suite interface {
kyber.Group
kyber.HashFactory
kyber.XOFFactory
kyber.Random
}
var errorDifferentLengths = errors.New("inputs of different lengths")
var errorInvalidProof = errors.New("invalid proof")
// Proof represents a NIZK dlog-equality proof.
type Proof struct {
C kyber.Scalar // challenge
R kyber.Scalar // response
VG kyber.Point // public commitment with respect to base point G
VH kyber.Point // public commitment with respect to base point H
}
// NewDLEQProof computes a new NIZK dlog-equality proof for the scalar x with
// respect to base points G and H. It therefore randomly selects a commitment v
// and then computes the challenge c = H(xG,xH,vG,vH) and response r = v - cx.
// Besides the proof, this function also returns the encrypted base points xG
// and xH.
func NewDLEQProof(suite Suite, G kyber.Point, H kyber.Point, x kyber.Scalar) (proof *Proof, xG kyber.Point, xH kyber.Point, err error) {
// Encrypt base points with secret
xG = suite.Point().Mul(x, G)
xH = suite.Point().Mul(x, H)
// Commitment
v := suite.Scalar().Pick(suite.RandomStream())
vG := suite.Point().Mul(v, G)
vH := suite.Point().Mul(v, H)
// Challenge
h := suite.Hash()
xG.MarshalTo(h)
xH.MarshalTo(h)
vG.MarshalTo(h)
vH.MarshalTo(h)
cb := h.Sum(nil)
c := suite.Scalar().Pick(suite.XOF(cb))
// Response
r := suite.Scalar()
r.Mul(x, c).Sub(v, r)
return &Proof{c, r, vG, vH}, xG, xH, nil
}
// NewDLEQProofBatch computes lists of NIZK dlog-equality proofs and of
// encrypted base points xG and xH. Note that the challenge is computed over all
// input values.
func NewDLEQProofBatch(suite Suite, G []kyber.Point, H []kyber.Point, secrets []kyber.Scalar) (proof []*Proof, xG []kyber.Point, xH []kyber.Point, err error) {
if len(G) != len(H) || len(H) != len(secrets) {
return nil, nil, nil, errorDifferentLengths
}
n := len(secrets)
proofs := make([]*Proof, n)
v := make([]kyber.Scalar, n)
xG = make([]kyber.Point, n)
xH = make([]kyber.Point, n)
vG := make([]kyber.Point, n)
vH := make([]kyber.Point, n)
for i, x := range secrets {
// Encrypt base points with secrets
xG[i] = suite.Point().Mul(x, G[i])
xH[i] = suite.Point().Mul(x, H[i])
// Commitments
v[i] = suite.Scalar().Pick(suite.RandomStream())
vG[i] = suite.Point().Mul(v[i], G[i])
vH[i] = suite.Point().Mul(v[i], H[i])
}
// Collective challenge
h := suite.Hash()
for _, x := range xG {
x.MarshalTo(h)
}
for _, x := range xH {
x.MarshalTo(h)
}
for _, x := range vG {
x.MarshalTo(h)
}
for _, x := range vH {
x.MarshalTo(h)
}
cb := h.Sum(nil)
c := suite.Scalar().Pick(suite.XOF(cb))
// Responses
for i, x := range secrets {
r := suite.Scalar()
r.Mul(x, c).Sub(v[i], r)
proofs[i] = &Proof{c, r, vG[i], vH[i]}
}
return proofs, xG, xH, nil
}
// Verify examines the validity of the NIZK dlog-equality proof.
// The proof is valid if the following two conditions hold:
// vG == rG + c(xG)
// vH == rH + c(xH)
func (p *Proof) Verify(suite Suite, G kyber.Point, H kyber.Point, xG kyber.Point, xH kyber.Point) error {
rG := suite.Point().Mul(p.R, G)
rH := suite.Point().Mul(p.R, H)
cxG := suite.Point().Mul(p.C, xG)
cxH := suite.Point().Mul(p.C, xH)
a := suite.Point().Add(rG, cxG)
b := suite.Point().Add(rH, cxH)
if !(p.VG.Equal(a) && p.VH.Equal(b)) {
return errorInvalidProof
}
return nil
} | lib/dedis/kyber/proof/dleq/dleq.go | 0.7478 | 0.503601 | dleq.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.