repo stringlengths 1 191 ⌀ | file stringlengths 23 351 | code stringlengths 0 5.32M | file_length int64 0 5.32M | avg_line_length float64 0 2.9k | max_line_length int64 0 288k | extension_type stringclasses 1 value |
|---|---|---|---|---|---|---|
null | jabref-main/src/test/java/org/jabref/model/entry/field/StandardFieldTest.java | package org.jabref.model.entry.field;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class StandardFieldTest {
@Test
void fieldsConsideredEqualIfSame() {
assertEquals(StandardField.TITLE, StandardField.TITLE);
}
}
| 287 | 19.571429 | 63 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/field/UnknownFieldTest.java | package org.jabref.model.entry.field;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class UnknownFieldTest {
@Test
void fieldsConsideredEqualIfSameName() {
assertEquals(new UnknownField("title"), new UnknownField("title"));
}
@Test
void fieldsConsideredEqualINameDifferByCapitalization() {
assertEquals(new UnknownField("tiTle"), new UnknownField("Title"));
}
@Test
void displayNameConstructor() {
UnknownField cAsED = UnknownField.fromDisplayName("cAsEd");
assertEquals(new UnknownField("cased", "cAsEd"), cAsED);
}
}
| 642 | 25.791667 | 75 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/ArXivIdentifierTest.java | package org.jabref.model.entry.identifier;
import java.net.URI;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class ArXivIdentifierTest {
@Test
void parse() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("0710.0994");
assertEquals(Optional.of(new ArXivIdentifier("0710.0994")), parsed);
}
@Test
void parseWithArXivPrefix() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("arXiv:0710.0994");
assertEquals(Optional.of(new ArXivIdentifier("0710.0994")), parsed);
}
@Test
void parseWithArxivPrefix() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("arxiv:0710.0994");
assertEquals(Optional.of(new ArXivIdentifier("0710.0994")), parsed);
}
@Test
void parseWithClassification() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("0706.0001v1 [q-bio.CB]");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "1", "q-bio.CB")), parsed);
}
@Test
void parseWithArXivPrefixAndClassification() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("arXiv:0706.0001v1 [q-bio.CB]");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "1", "q-bio.CB")), parsed);
}
@Test
void parseOldIdentifier() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("math.GT/0309136");
assertEquals(Optional.of(new ArXivIdentifier("math.GT/0309136", "math.GT")), parsed);
}
@Test
public void acceptLegacyEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("astro-ph.GT/1234567");
assertEquals(Optional.of(new ArXivIdentifier("astro-ph.GT/1234567", "astro-ph.GT")), parsed);
}
@Test
public void acceptLegacyMathEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("math/1234567");
assertEquals(Optional.of(new ArXivIdentifier("math/1234567", "math")), parsed);
}
@Test
void parseOldIdentifierWithArXivPrefix() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("arXiv:math.GT/0309136");
assertEquals(Optional.of(new ArXivIdentifier("math.GT/0309136", "math.GT")), parsed);
}
@Test
void parseUrl() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("http://arxiv.org/abs/1502.05795");
assertEquals(Optional.of(new ArXivIdentifier("1502.05795", "")), parsed);
}
@Test
void parseHttpsUrl() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("https://arxiv.org/abs/1502.05795");
assertEquals(Optional.of(new ArXivIdentifier("1502.05795", "")), parsed);
}
@Test
void parsePdfUrl() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("http://arxiv.org/pdf/1502.05795");
assertEquals(Optional.of(new ArXivIdentifier("1502.05795", "")), parsed);
}
@Test
void parseUrlWithVersion() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("http://arxiv.org/abs/1502.05795v1");
assertEquals(Optional.of(new ArXivIdentifier("1502.05795", "1", "")), parsed);
}
@Test
void parseOldUrlWithVersion() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("http://arxiv.org/pdf/hep-ex/0307015v1");
assertEquals(Optional.of(new ArXivIdentifier("hep-ex/0307015", "1", "hep-ex")), parsed);
}
@Test
void fourDigitDateIsInvalidInLegacyFormat() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("2017/1118");
assertEquals(Optional.empty(), parsed);
}
@Test
public void acceptPlainEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("0706.0001");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001")), parsed);
}
@Test
public void acceptPlainEprintWithVersion() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("0706.0001v1");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "v1", "")), parsed);
}
@Test
public void acceptArxivPrefix() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("arXiv:0706.0001v1");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "v1", "")), parsed);
}
@Test
public void ignoreLeadingAndTrailingWhitespaces() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse(" 0706.0001v1 ");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "v1", "")), parsed);
}
@Test
public void rejectEmbeddedEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("other stuff 0706.0001v1 end");
assertEquals(Optional.empty(), parsed);
}
@Test
public void rejectInvalidEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("https://thisisnouri");
assertEquals(Optional.empty(), parsed);
}
@Test
public void acceptUrlHttpEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("http://arxiv.org/abs/0706.0001v1");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "v1", "")), parsed);
}
@Test
public void acceptUrlHttpsEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("https://arxiv.org/abs/0706.0001v1");
assertEquals(Optional.of(new ArXivIdentifier("0706.0001", "v1", "")), parsed);
}
@Test
public void rejectUrlOtherDomainEprint() {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("https://asdf.org/abs/0706.0001v1");
assertEquals(Optional.empty(), parsed);
}
@Test
public void constructCorrectURLForEprint() throws Exception {
Optional<ArXivIdentifier> parsed = ArXivIdentifier.parse("0706.0001v1");
assertEquals(Optional.of(new URI("https://arxiv.org/abs/0706.0001v1")), parsed.get().getExternalURI());
}
}
| 6,191 | 34.586207 | 111 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/DOITest.java | package org.jabref.model.entry.identifier;
import java.util.Optional;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class DOITest {
private static Stream<Arguments> testData() {
return Stream.of(
// PlainDoi
Arguments.of("10.1006/jmbi.1998.2354", new DOI("10.1006/jmbi.1998.2354").getDOI()),
Arguments.of("10.231/JIM.0b013e31820bab4c", new DOI("10.231/JIM.0b013e31820bab4c").getDOI()),
Arguments.of("10.1002/(SICI)1522-2594(199911)42:5<952::AID-MRM16>3.0.CO;2-S",
new DOI("10.1002/(SICI)1522-2594(199911)42:5<952::AID-MRM16>3.0.CO;2-S").getDOI()),
Arguments.of("10.1126/sciadv.1500214", new DOI("10.1126/sciadv.1500214").getDOI()),
// PlainShortDoi
Arguments.of("10/gf4gqc", new DOI("10/gf4gqc").getDOI()),
Arguments.of("10/1000", new DOI("10/1000").getDOI()),
Arguments.of("10/aaaa", new DOI("10/aaaa").getDOI()),
Arguments.of("10/adc", new DOI("10/adc").getDOI()),
// ignoreLeadingAndTrailingWhitespaces
Arguments.of("10.1006/jmbi.1998.2354", new DOI(" 10.1006/jmbi.1998.2354 ").getDOI()),
// ignoreLeadingAndTrailingWhitespacesInShortDoi
Arguments.of("10/gf4gqc", new DOI(" 10/gf4gqc ").getDOI()),
// acceptDoiPrefix
// Doi prefix
Arguments.of("10.1006/jmbi.1998.2354", new DOI("doi:10.1006/jmbi.1998.2354").getDOI()),
// acceptDoiPrefixInShortDoi
Arguments.of("10/gf4gqc", new DOI("doi:10/gf4gqc").getDOI()),
// acceptURNPrefix
Arguments.of("10.123/456", new DOI("urn:10.123/456").getDOI()),
Arguments.of("10.123/456", new DOI("http://doi.org/urn:doi:10.123/456").getDOI()),
// : is also allowed as divider, will be replaced by RESOLVER
Arguments.of("10.123:456ABC/zyz", new DOI("http://doi.org/urn:doi:10.123:456ABC%2Fzyz").getDOI()),
// acceptShortcutShortDoi
Arguments.of("10/d8dn", new DOI("https://doi.org/d8dn").getDOI()),
Arguments.of("10/d8dn", new DOI(" https://doi.org/d8dn ").getDOI()),
Arguments.of("10/d8dn", new DOI("doi.org/d8dn").getDOI()),
Arguments.of("10/d8dn", new DOI("www.doi.org/d8dn").getDOI()),
Arguments.of("10/d8dn", new DOI(" doi.org/d8dn ").getDOI()),
// acceptURNPrefixInShortDoi
Arguments.of("10/gf4gqc", new DOI("urn:10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("doi:10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("urn:doi:10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://doi.org/urn:doi:10/gf4gqc").getDOI()),
// : is also allowed as divider, will be replaced by RESOLVER
Arguments.of("10:gf4gqc", new DOI("http://doi.org/urn:doi:10:gf4gqc").getDOI()),
// acceptURLDoi
// http
Arguments.of("10.1006/jmbi.1998.2354", new DOI("http://doi.org/10.1006/jmbi.1998.2354").getDOI()),
// https
Arguments.of("10.1006/jmbi.1998.2354", new DOI("https://doi.org/10.1006/jmbi.1998.2354").getDOI()),
// https with % divider
Arguments.of("10.2307/1990888", new DOI("https://dx.doi.org/10.2307%2F1990888").getDOI()),
// other domains
Arguments.of("10.1145/1294928.1294933", new DOI("http://doi.acm.org/10.1145/1294928.1294933").getDOI()),
Arguments.of("10.1145/1294928.1294933", new DOI("http://doi.acm.net/10.1145/1294928.1294933").getDOI()),
Arguments.of("10.1145/1294928.1294933", new DOI("http://doi.acm.com/10.1145/1294928.1294933").getDOI()),
Arguments.of("10.1145/1294928.1294933", new DOI("http://doi.acm.de/10.1145/1294928.1294933").getDOI()),
Arguments.of("10.1007/978-3-642-15618-2_19",
new DOI("http://dx.doi.org/10.1007/978-3-642-15618-2_19").getDOI()),
Arguments.of("10.1007/978-3-642-15618-2_19",
new DOI("http://dx.doi.net/10.1007/978-3-642-15618-2_19").getDOI()),
Arguments.of("10.1007/978-3-642-15618-2_19",
new DOI("http://dx.doi.com/10.1007/978-3-642-15618-2_19").getDOI()),
Arguments.of("10.1007/978-3-642-15618-2_19",
new DOI("http://dx.doi.de/10.1007/978-3-642-15618-2_19").getDOI()),
Arguments.of("10.4108/ICST.COLLABORATECOM2009.8275",
new DOI("http://dx.doi.org/10.4108/ICST.COLLABORATECOM2009.8275").getDOI()),
Arguments.of("10.1109/MIC.2012.43",
new DOI("http://doi.ieeecomputersociety.org/10.1109/MIC.2012.43").getDOI()),
// acceptURLShortDoi
// http
Arguments.of("10/gf4gqc", new DOI("http://doi.org/10/gf4gqc").getDOI()),
// https
Arguments.of("10/gf4gqc", new DOI("https://doi.org/10/gf4gqc").getDOI()),
// https with % divider
Arguments.of("10/gf4gqc", new DOI("https://dx.doi.org/10%2Fgf4gqc").getDOI()),
// other domains
Arguments.of("10/gf4gqc", new DOI("http://doi.acm.org/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("www.doi.acm.org/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("doi.acm.org/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI(" /10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI(" 10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://doi.acm.net/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://doi.acm.com/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://doi.acm.de/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://dx.doi.org/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://dx.doi.net/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://dx.doi.com/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://dx.doi.de/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://dx.doi.org/10/gf4gqc").getDOI()),
Arguments.of("10/gf4gqc", new DOI("http://doi.ieeecomputersociety.org/10/gf4gqc").getDOI()),
// parse DOI with whitespace
Arguments.of("https://doi.org/10.1109/VLHCC.2004.20", DOI.parse("https : / / doi.org / 10 .1109 /V LHCC.20 04.20").get().getURIAsASCIIString()),
// parse short DOI with whitespace
Arguments.of("https://doi.org/10/gf4gqc", DOI.parse("https : / / doi.org / 10 / gf4gqc").get().getURIAsASCIIString()),
// parse DOI with non-ASCII characters and whitespace
Arguments.of("https://doi.org/10/gf4gqc", DOI.parse("�https : \n ␛ / / doi.org / \t 10 / \r gf4gqc�␛").get().getURIAsASCIIString()),
Arguments.of("10/gf4gqc", DOI.parse("�https : \n ␛ / / doi.org / \t 10 / \r gf4gqc�␛").get().getDOI()),
Arguments.of("10/gf4gqc", DOI.parse(" 10 / gf4gqc ").get().getDOI()),
Arguments.of("10.3218/3846-0", DOI.parse(" �10.3218\n/384␛6-0�").get().getDOI()),
// parse already-cleaned DOI
Arguments.of("10.3218/3846-0", DOI.parse("10.3218/3846-0").get().getDOI()),
// correctlyEncodeDOIs
// See http://www.doi.org/doi_handbook/2_Numbering.html#2.5.2.4
// % -> (%25)
Arguments.of("https://doi.org/10.1006/rwei.1999%25.0001",
new DOI("https://doi.org/10.1006/rwei.1999%25.0001").getURIAsASCIIString()),
// " -> (%22)
Arguments.of("https://doi.org/10.1006/rwei.1999%22.0001",
new DOI("https://doi.org/10.1006/rwei.1999%22.0001").getURIAsASCIIString()),
// # -> (%23)
Arguments.of("https://doi.org/10.1006/rwei.1999%23.0001",
new DOI("https://doi.org/10.1006/rwei.1999%23.0001").getURIAsASCIIString()),
// SPACE -> (%20)
Arguments.of("https://doi.org/10.1006/rwei.1999%20.0001",
new DOI("https://doi.org/10.1006/rwei.1999%20.0001").getURIAsASCIIString()),
// ? -> (%3F)
Arguments.of("https://doi.org/10.1006/rwei.1999%3F.0001",
new DOI("https://doi.org/10.1006/rwei.1999%3F.0001").getURIAsASCIIString()),
// constructCorrectURLForDoi
// add / to RESOLVER url if missing
Arguments.of("https://doi.org/10.1006/jmbi.1998.2354",
new DOI("10.1006/jmbi.1998.2354").getURIAsASCIIString()),
Arguments.of("https://doi.org/10.1006/jmbi.1998.2354",
new DOI("https://doi.org/10.1006/jmbi.1998.2354").getURIAsASCIIString()),
Arguments.of("https://doi.org/10.1109/VLHCC.2004.20",
new DOI("doi:10.1109/VLHCC.2004.20").getURIAsASCIIString()),
// constructCorrectURLForShortDoi
Arguments.of("https://doi.org/10/gf4gqc", new DOI("10/gf4gqc").getURIAsASCIIString()),
// correctlyDecodeHttpDOIs
// See http://www.doi.org/doi_handbook/2_Numbering.html#2.5.2.4
// % -> (%25)
Arguments.of("10.1006/rwei.1999%.0001", new DOI("http://doi.org/10.1006/rwei.1999%25.0001").getDOI()),
// " -> (%22)
Arguments.of("10.1006/rwei.1999\".0001", new DOI("http://doi.org/10.1006/rwei.1999%22.0001").getDOI()),
// # -> (%23)
Arguments.of("10.1006/rwei.1999#.0001", new DOI("http://doi.org/10.1006/rwei.1999%23.0001").getDOI()),
// SPACE -> (%20)
Arguments.of("10.1006/rwei.1999 .0001", new DOI("http://doi.org/10.1006/rwei.1999%20.0001").getDOI()),
// ? -> (%3F)
Arguments.of("10.1006/rwei.1999?.0001", new DOI("http://doi.org/10.1006/rwei.1999%3F.0001").getDOI()),
// findDoiInsideArbitraryText
Arguments.of("10.1006/jmbi.1998.2354",
DOI.findInText("other stuff 10.1006/jmbi.1998.2354 end").get().getDOI()),
Arguments.of("10.1007/s10549-018-4743-9",
DOI.findInText("Breast Cancer Res Treat. 2018 July ; 170(1): 77–87. doi:10.1007/s10549-018-4743-9. ").get().getDOI()),
Arguments.of("10.1007/s10549-018-4743-9",
DOI.findInText("Breast Cancer Res Treat. 2018 July ; 170(1): 77–87. doi:10.1007/s10549-018-4743-9, ").get().getDOI()),
Arguments.of("10.1007/s10549-018-4743-9",
DOI.findInText("Breast Cancer Res Treat. 2018 July ; 170(1): 77–87. doi:10.1007/s10549-018-4743-9;something else").get().getDOI()),
Arguments.of("10.1007/s10549-018-4743-9.1234",
DOI.findInText("bla doi:10.1007/s10549-018-4743-9.1234 with . in doi").get().getDOI()),
// findShortDoiInsideArbitraryText
Arguments.of("10/12ab", DOI.findInText("other stuff doi:10/12ab end").get().getDOI()),
Arguments.of("10/12ab", DOI.findInText("other stuff /urn:doi:10/12ab end").get().getDOI()),
Arguments.of("10%12ab", DOI.findInText("other stuff doi:10%12ab end").get().getDOI()),
Arguments.of("10%12ab", DOI.findInText("other stuff /doi:10%12ab end").get().getDOI()),
Arguments.of("10%12ab", DOI.findInText("other stuff /doi:10%12ab, end").get().getDOI()),
Arguments.of("10%12ab", DOI.findInText("other stuff /doi:10%12ab. end").get().getDOI()),
Arguments.of("10%12ab", DOI.findInText("other stuff /doi:10%12ab; end").get().getDOI()),
Arguments.of("10/1234", DOI.findInText("10/B(C)/15 \n" +
" \n" +
"10:51 \n" +
" \n" +
" \n" +
"doi.org/10/1234 ").get().getDOI()),
// findShortcutDoiInsideArbitraryText
Arguments.of("10/ab123", DOI.findInText("other stuff doi.org/ab123 end").get().getDOI()),
Arguments.of("10/76543", DOI.findInText("other stuff www.doi.org/76543 end").get().getDOI()),
Arguments.of("10/abcde", DOI.findInText("other stuff https://www.doi.org/abcde end").get().getDOI()),
Arguments.of("10/abcde", DOI.findInText("other stuff https://doi.org/abcde end").get().getDOI()),
Arguments.of("10.5220/0010404301780189", DOI.findInText("https://www.scitepress.org/Link.aspx?doi=10.5220/0010404301780189").get().getDOI()),
Arguments.of("10.5220/0010404301780189", DOI.findInText("10.5220/0010404301780189").get().getDOI())
);
}
@ParameterizedTest
@MethodSource("testData")
public void testEquals(String expected, String input) {
assertEquals(expected, input);
}
@Test
public void equalsWorksFor2017Doi() {
assertEquals(new DOI("10.1109/cloud.2017.89"), new DOI("10.1109/CLOUD.2017.89"));
}
@Test
public void isShortDoiShouldReturnTrueWhenItIsShortDoi() {
assertTrue(new DOI("10/abcde").isShortDoi());
}
@Test
public void noDOIFoundInsideArbitraryText() {
assertEquals(Optional.empty(), DOI.findInText("text without 28282 a doi"));
assertEquals(Optional.empty(), DOI.findInText("It's 10:30 o'clock"));
assertEquals(Optional.empty(), DOI.findInText("...archive number 10/XYZ/123..."));
assertEquals(Optional.empty(), DOI.findInText("some website poi.org/ab123 end"));
}
@Test
public void rejectURLShortDoi() {
assertThrows(IllegalArgumentException.class, () -> new DOI("http://www.cs.utexas.edu/users/kaufmann/itp-trusted-extensions-aug-2010/summary/summary.pdf"));
assertThrows(IllegalArgumentException.class, () -> new DOI("http://www.cs.utexas.edu/users/kaufmann/itp-trusted-extensions-aug-20/10/summary/summary.pdf"));
assertThrows(IllegalArgumentException.class, () -> new DOI("http://www.boi.org/10/2010bingbong"));
}
@Test
public void isShortDoiShouldReturnFalseWhenItIsDoi() {
assertFalse(new DOI("10.1006/jmbi.1998.2354").isShortDoi());
}
@Test
public void rejectEmbeddedDoi() {
assertThrows(IllegalArgumentException.class, () -> new DOI("other stuff 10.1006/jmbi.1998.2354 end"));
}
@Test
public void rejectEmbeddedShortDoi() {
assertThrows(IllegalArgumentException.class, () -> new DOI("other stuff 10/gf4gqc end"));
assertThrows(IllegalArgumentException.class, () -> new DOI("10/2021/01"));
assertThrows(IllegalArgumentException.class, () -> new DOI("01/10/2021"));
assertThrows(IllegalArgumentException.class, () -> new DOI("https://www.abc.de/10/abcd"));
}
@Test
public void rejectInvalidDirectoryIndicator() {
// wrong directory indicator
assertThrows(IllegalArgumentException.class, () -> new DOI("12.1006/jmbi.1998.2354 end"));
}
@Test
public void rejectInvalidDirectoryIndicatorInShortDoi() {
assertThrows(IllegalArgumentException.class, () -> new DOI("20/abcd"));
}
@Test
public void rejectInvalidDoiUri() {
assertThrows(IllegalArgumentException.class, () -> new DOI("https://thisisnouri"));
}
@Test
public void rejectMissingDivider() {
// missing divider
assertThrows(IllegalArgumentException.class, () -> new DOI("10.1006jmbi.1998.2354 end"));
}
@Test
public void rejectMissingDividerInShortDoi() {
assertThrows(IllegalArgumentException.class, () -> new DOI("10gf4gqc end"));
}
}
| 16,836 | 58.077193 | 164 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/ISBNTest.java | package org.jabref.model.entry.identifier;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ISBNTest {
@Test
public void testIsValidFormat10Correct() {
assertTrue(new ISBN("0-123456-47-9").isValidFormat());
assertTrue(new ISBN("0-9752298-0-X").isValidFormat());
}
@Test
public void testIsValidFormat10Incorrect() {
assertFalse(new ISBN("0-12B456-47-9").isValidFormat());
}
@Test
public void testIsValidChecksum10Correct() {
assertTrue(new ISBN("0-123456-47-9").isValidChecksum());
assertTrue(new ISBN("0-9752298-0-X").isValidChecksum());
assertTrue(new ISBN("0-9752298-0-x").isValidChecksum());
}
@Test
public void testIsValidChecksum10Incorrect() {
assertFalse(new ISBN("0-123456-47-8").isValidChecksum());
}
@Test
public void testIsValidFormat13Correct() {
assertTrue(new ISBN("978-1-56619-909-4").isValidFormat());
}
@Test
public void testIsValidFormat13Incorrect() {
assertFalse(new ISBN("978-1-56619-9O9-4 ").isValidFormat());
}
@Test
public void testIsValidChecksum13Correct() {
assertTrue(new ISBN("978-1-56619-909-4 ").isValidChecksum());
}
@Test
public void testIsValidChecksum13Incorrect() {
assertFalse(new ISBN("978-1-56619-909-5").isValidChecksum());
}
@Test
public void testIsIsbn10Correct() {
assertTrue(new ISBN("0-123456-47-9").isIsbn10());
assertTrue(new ISBN("0-9752298-0-X").isIsbn10());
}
@Test
public void testIsIsbn10Incorrect() {
assertFalse(new ISBN("978-1-56619-909-4").isIsbn10());
}
@Test
public void testIsIsbn13Correct() {
assertTrue(new ISBN("978-1-56619-909-4").isIsbn13());
}
@Test
public void testIsIsbn13Incorrect() {
assertFalse(new ISBN("0-123456-47-9").isIsbn13());
}
}
| 2,013 | 26.216216 | 69 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/ISSNTest.java | package org.jabref.model.entry.identifier;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ISSNTest {
@Test
public void testIsCanBeCleaned() {
assertTrue(new ISSN("00279633").isCanBeCleaned());
}
@Test
public void testIsCanBeCleanedIncorrectRubbish() {
assertFalse(new ISSN("A brown fox").isCanBeCleaned());
}
@Test
public void testIsCanBeCleanedDashAlreadyThere() {
assertFalse(new ISSN("0027-9633").isCanBeCleaned());
}
@Test
public void testGetCleanedISSN() {
assertEquals("0027-9633", new ISSN("00279633").getCleanedISSN());
}
@Test
public void testGetCleanedISSNDashAlreadyThere() {
assertEquals("0027-9633", new ISSN("0027-9633").getCleanedISSN());
}
@Test
public void testGetCleanedISSNDashRubbish() {
assertEquals("A brown fox", new ISSN("A brown fox").getCleanedISSN());
}
@Test
public void testIsValidChecksumCorrect() {
assertTrue(new ISSN("0027-9633").isValidChecksum());
assertTrue(new ISSN("2434-561X").isValidChecksum());
assertTrue(new ISSN("2434-561x").isValidChecksum());
}
@Test
public void testIsValidChecksumIncorrect() {
assertFalse(new ISSN("0027-9634").isValidChecksum());
}
@Test
public void testIsValidFormatCorrect() {
assertTrue(new ISSN("0027-963X").isValidFormat());
}
@Test
public void testIsValidFormatIncorrect() {
assertFalse(new ISSN("00279634").isValidFormat());
}
}
| 1,712 | 26.190476 | 78 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/IacrEprintTest.java | package org.jabref.model.entry.identifier;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class IacrEprintTest {
private static Stream<Arguments> provideTestData() {
return Stream.of(
Arguments.arguments(
"acceptPlainIacrEprint",
"2019/001"
),
Arguments.arguments(
"ignoreLeadingAndTrailingWhitespaces",
" 2019/001 "
),
Arguments.arguments(
"acceptFullUrlIacrEprint",
"https://eprint.iacr.org/2019/001"
),
Arguments.arguments(
"acceptShortenedUrlIacrEprint",
"https://ia.cr/2019/001"
),
Arguments.arguments(
"acceptDomainUrlIacrEprint",
"eprint.iacr.org/2019/001"
),
Arguments.arguments(
"acceptShortenedDomainUrlIacrEprint",
"ia.cr/2019/001"
)
);
}
@Test
public void rejectInvalidIacrEprint() {
assertThrows(IllegalArgumentException.class, () -> new IacrEprint("2021/12"));
}
@ParameterizedTest(name = "{index} {0}")
@MethodSource("provideTestData")
public void acceptCorrectIacrEprintIdentifier(String name, String identifier) {
assertEquals("2019/001", new IacrEprint(identifier).getNormalized());
}
@Test
public void constructValidIacrEprintUrl() {
assertEquals("https://ia.cr/2019/001", new IacrEprint("2019/001").getAsciiUrl());
}
}
| 2,024 | 32.75 | 89 | java |
null | jabref-main/src/test/java/org/jabref/model/entry/identifier/MathSciNetIdTest.java | package org.jabref.model.entry.identifier;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class MathSciNetIdTest {
@Test
public void parseRemovesNewLineCharacterAtEnd() throws Exception {
Optional<MathSciNetId> id = MathSciNetId.parse("3014184\n");
assertEquals(Optional.of(new MathSciNetId("3014184")), id);
}
}
| 429 | 24.294118 | 70 | java |
null | jabref-main/src/test/java/org/jabref/model/event/EventListenerTest.java | package org.jabref.model.event;
import java.util.List;
import org.jabref.model.database.event.EntriesAddedEvent;
import org.jabref.model.database.event.EntriesRemovedEvent;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.event.EntryChangedEvent;
import com.google.common.eventbus.Subscribe;
public class EventListenerTest {
private List<BibEntry> addedEntries;
private BibEntry firstInsertedEntry;
private List<BibEntry> removedEntries;
private BibEntry changedEntry;
@Subscribe
public void listen(EntriesAddedEvent event) {
this.addedEntries = event.getBibEntries();
this.firstInsertedEntry = event.getFirstEntry();
}
@Subscribe
public void listen(EntriesRemovedEvent event) {
this.removedEntries = event.getBibEntries();
}
@Subscribe
public void listen(EntryChangedEvent event) {
this.changedEntry = event.getBibEntry();
}
public List<BibEntry> getAddedEntries() {
return addedEntries;
}
public BibEntry getFirstInsertedEntry() {
return firstInsertedEntry;
}
public List<BibEntry> getRemovedEntries() {
return removedEntries;
}
public BibEntry getChangedEntry() {
return changedEntry;
}
}
| 1,273 | 23.980392 | 59 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/AutomaticKeywordGroupTest.java | package org.jabref.model.groups;
import java.util.HashSet;
import java.util.Set;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class AutomaticKeywordGroupTest {
@Test
void createSubgroupsForTwoKeywords() throws Exception {
AutomaticKeywordGroup keywordsGroup = new AutomaticKeywordGroup("Keywords", GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, ',', '>');
BibEntry entry = new BibEntry().withField(StandardField.KEYWORDS, "A, B");
Set<GroupTreeNode> expected = createIncludingKeywordsSubgroup();
assertEquals(expected, keywordsGroup.createSubgroups(entry));
}
@Test
void createSubgroupsIgnoresEmptyKeyword() throws Exception {
AutomaticKeywordGroup keywordsGroup = new AutomaticKeywordGroup("Keywords", GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, ',', '>');
BibEntry entry = new BibEntry().withField(StandardField.KEYWORDS, "A, ,B");
Set<GroupTreeNode> expected = createIncludingKeywordsSubgroup();
assertEquals(expected, keywordsGroup.createSubgroups(entry));
}
private Set<GroupTreeNode> createIncludingKeywordsSubgroup() {
Set<GroupTreeNode> expectedKeywordsSubgroup = new HashSet<>();
expectedKeywordsSubgroup.add(GroupTreeNode.fromGroup(new WordKeywordGroup("A", GroupHierarchyType.INCLUDING, StandardField.KEYWORDS, "A", true, ',', true)));
expectedKeywordsSubgroup.add(GroupTreeNode.fromGroup(new WordKeywordGroup("B", GroupHierarchyType.INCLUDING, StandardField.KEYWORDS, "B", true, ',', true)));
return expectedKeywordsSubgroup;
}
}
| 1,746 | 39.627907 | 165 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/AutomaticPersonsGroupTest.java | package org.jabref.model.groups;
import java.util.Arrays;
import java.util.stream.Collectors;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
class AutomaticPersonsGroupTest {
private static GroupTreeNode[] createPersonSubGroupFrom(String... lastNames) {
return Arrays.stream(lastNames)
.map(lastName ->
new LastNameGroup(lastName, GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, lastName))
.map(GroupTreeNode::new)
.collect(Collectors.toList())
.toArray(GroupTreeNode[]::new);
}
@Test
void createSubgroupsFromCommaSeparatedLastNames() {
BibEntry bibEntry = new BibEntry().withField(StandardField.AUTHOR, "Turing, Alan and Hopper, Grace");
var subgroups = new AutomaticPersonsGroup("", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR).createSubgroups(bibEntry);
var expectedSubgroups = createPersonSubGroupFrom("Turing", "Hopper");
assertThat(subgroups, containsInAnyOrder(expectedSubgroups));
}
@Test
void createSubgroupsContainingSpaceSeparatedNames() {
BibEntry bibEntry = new BibEntry().withField(StandardField.AUTHOR, "Alan Turing and Grace Hopper");
var subgroups = new AutomaticPersonsGroup("", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR).createSubgroups(bibEntry);
var expectedSubgroups = createPersonSubGroupFrom("Turing", "Hopper");
assertThat(subgroups, containsInAnyOrder(expectedSubgroups));
}
@Test
void createSubgroupFromLatex() {
BibEntry bibEntry = new BibEntry().withField(StandardField.AUTHOR, "Kurt G{\\\"{o}}del");
var subgroup = new AutomaticPersonsGroup("", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR).createSubgroups(bibEntry);
var expectedSubgroup = createPersonSubGroupFrom("Gödel");
assertThat(subgroup, contains(expectedSubgroup));
}
@Test
void createSubgroupFromUnicode() {
BibEntry bibEntry = new BibEntry().withField(StandardField.AUTHOR, "Kurt Gödel");
var subgroup = new AutomaticPersonsGroup("", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR).createSubgroups(bibEntry);
var expectedSubgroup = createPersonSubGroupFrom("Gödel");
assertThat(subgroup, contains(expectedSubgroup));
}
}
| 2,601 | 44.649123 | 134 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/ExplicitGroupTest.java | package org.jabref.model.groups;
import java.util.Optional;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
class ExplicitGroupTest {
private ExplicitGroup group;
private ExplicitGroup group2;
private BibEntry entry;
@BeforeEach
void setUp() {
group = new ExplicitGroup("myExplicitGroup", GroupHierarchyType.INDEPENDENT, ',');
group2 = new ExplicitGroup("myExplicitGroup2", GroupHierarchyType.INCLUDING, ',');
entry = new BibEntry();
}
@Test
void addSingleGroupToEmptyBibEntryChangesGroupsField() {
group.add(entry);
assertEquals(Optional.of("myExplicitGroup"), entry.getField(StandardField.GROUPS));
}
@Test
void addSingleGroupToNonemptyBibEntryAppendsToGroupsField() {
entry.setField(StandardField.GROUPS, "some thing");
group.add(entry);
assertEquals(Optional.of("some thing, myExplicitGroup"), entry.getField(StandardField.GROUPS));
}
@Test
void addTwoGroupsToBibEntryChangesGroupsField() {
group.add(entry);
group2.add(entry);
assertEquals(Optional.of("myExplicitGroup, myExplicitGroup2"), entry.getField(StandardField.GROUPS));
}
@Test
void addDuplicateGroupDoesNotChangeGroupsField() throws Exception {
entry.setField(StandardField.GROUPS, "myExplicitGroup");
group.add(entry);
assertEquals(Optional.of("myExplicitGroup"), entry.getField(StandardField.GROUPS));
}
@Test
// For https://github.com/JabRef/jabref/issues/2334
void removeDoesNotChangeFieldIfContainsNameAsPart() throws Exception {
entry.setField(StandardField.GROUPS, "myExplicitGroup_alternative");
group.remove(entry);
assertEquals(Optional.of("myExplicitGroup_alternative"), entry.getField(StandardField.GROUPS));
}
@Test
// For https://github.com/JabRef/jabref/issues/2334
void removeDoesNotChangeFieldIfContainsNameAsWord() throws Exception {
entry.setField(StandardField.GROUPS, "myExplicitGroup alternative");
group.remove(entry);
assertEquals(Optional.of("myExplicitGroup alternative"), entry.getField(StandardField.GROUPS));
}
@Test
// For https://github.com/JabRef/jabref/issues/1873
void containsOnlyMatchesCompletePhraseWithWhitespace() throws Exception {
entry.setField(StandardField.GROUPS, "myExplicitGroup b");
assertFalse(group.contains(entry));
}
@Test
// For https://github.com/JabRef/jabref/issues/1873
void containsOnlyMatchesCompletePhraseWithSlash() throws Exception {
entry.setField(StandardField.GROUPS, "myExplicitGroup/b");
assertFalse(group.contains(entry));
}
@Test
// For https://github.com/JabRef/jabref/issues/2394
void containsMatchesPhraseWithBrackets() throws Exception {
entry.setField(StandardField.GROUPS, "[aa] Subgroup1");
ExplicitGroup explicitGroup = new ExplicitGroup("[aa] Subgroup1", GroupHierarchyType.INCLUDING, ',');
assertTrue(explicitGroup.contains(entry));
}
}
| 3,388 | 33.938144 | 109 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/GroupTreeNodeTest.java | package org.jabref.model.groups;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import org.jabref.model.FieldChange;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.jabref.model.search.matchers.AndMatcher;
import org.jabref.model.search.matchers.OrMatcher;
import org.jabref.model.search.rules.SearchRules;
import org.jabref.model.search.rules.SearchRules.SearchFlags;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class GroupTreeNodeTest {
private final List<BibEntry> entries = new ArrayList<>();
private BibEntry entry;
/**
* Gets the marked node in the following tree of explicit groups:
* Root
* A ExplicitA, Including
* A ExplicitParent, Independent (= parent)
* B ExplicitNode, Refining (<-- this)
*/
public static GroupTreeNode getNodeInSimpleTree(GroupTreeNode root) {
root.addSubgroup(new ExplicitGroup("ExplicitA", GroupHierarchyType.INCLUDING, ','));
GroupTreeNode parent = root
.addSubgroup(new ExplicitGroup("ExplicitParent", GroupHierarchyType.INDEPENDENT, ','));
return parent.addSubgroup(new ExplicitGroup("ExplicitNode", GroupHierarchyType.REFINING, ','));
}
/**
* Gets the marked node in the following tree:
* Root
* A SearchA
* A ExplicitA, Including
* A ExplicitGrandParent (= grand parent)
* B ExplicitB
* B KeywordParent (= parent)
* C KeywordNode (<-- this)
* D ExplicitChild (= child)
* C SearchC
* C ExplicitC
* C KeywordC
* B SearchB
* B KeywordB
* A KeywordA
*/
public static GroupTreeNode getNodeInComplexTree(GroupTreeNode root) {
root.addSubgroup(getSearchGroup("SearchA"));
root.addSubgroup(new ExplicitGroup("ExplicitA", GroupHierarchyType.INCLUDING, ','));
GroupTreeNode grandParent = root
.addSubgroup(new ExplicitGroup("ExplicitGrandParent", GroupHierarchyType.INDEPENDENT, ','));
root.addSubgroup(getKeywordGroup("KeywordA"));
grandParent.addSubgroup(getExplict("ExplicitB"));
GroupTreeNode parent = grandParent.addSubgroup(getKeywordGroup("KeywordParent"));
grandParent.addSubgroup(getSearchGroup("SearchB"));
grandParent.addSubgroup(getKeywordGroup("KeywordB"));
GroupTreeNode node = parent.addSubgroup(getKeywordGroup("KeywordNode"));
parent.addSubgroup(getSearchGroup("SearchC"));
parent.addSubgroup(getExplict("ExplicitC"));
parent.addSubgroup(getKeywordGroup("KeywordC"));
node.addSubgroup(getExplict("ExplicitChild"));
return node;
}
private static AbstractGroup getKeywordGroup(String name) {
return new WordKeywordGroup(name, GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, "searchExpression", true, ',', false);
}
private static AbstractGroup getSearchGroup(String name) {
return new SearchGroup(name, GroupHierarchyType.INCLUDING, "searchExpression", EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE));
}
private static AbstractGroup getExplict(String name) {
return new ExplicitGroup(name, GroupHierarchyType.REFINING, ',');
}
/**
* Gets the marked in the following tree:
* Root
* A
* A
* A (<- this)
* A
*/
/*
GroupTreeNode getNodeAsChild(TreeNodeMock root) {
root.addChild(new TreeNodeMock());
root.addChild(new TreeNodeMock());
TreeNodeMock node = new TreeNodeMock();
root.addChild(node);
root.addChild(new TreeNodeMock());
return node;
}
*/
public static GroupTreeNode getRoot() {
return GroupTreeNode.fromGroup(new AllEntriesGroup("All entries"));
}
@BeforeEach
void setUp() {
entries.clear();
entry = new BibEntry();
entries.add(entry);
entries.add(new BibEntry().withField(StandardField.AUTHOR, "author1 and author2"));
entries.add(new BibEntry().withField(StandardField.AUTHOR, "author1"));
}
/*
GroupTreeNode getNodeInComplexTree() {
return getNodeInComplexTree(new TreeNodeMock());
}
*/
private GroupTreeNode getNodeInSimpleTree() {
return getNodeInSimpleTree(getRoot());
}
@Test
void getSearchRuleForIndependentGroupReturnsGroupAsMatcher() {
GroupTreeNode node = GroupTreeNode
.fromGroup(new ExplicitGroup("node", GroupHierarchyType.INDEPENDENT, ','));
assertEquals(node.getGroup(), node.getSearchMatcher());
}
@Test
void getSearchRuleForRefiningGroupReturnsParentAndGroupAsMatcher() {
GroupTreeNode parent = GroupTreeNode
.fromGroup(
new ExplicitGroup("parent", GroupHierarchyType.INDEPENDENT, ','));
GroupTreeNode node = parent
.addSubgroup(new ExplicitGroup("node", GroupHierarchyType.REFINING, ','));
AndMatcher matcher = new AndMatcher();
matcher.addRule(node.getGroup());
matcher.addRule(parent.getGroup());
assertEquals(matcher, node.getSearchMatcher());
}
@Test
void getSearchRuleForIncludingGroupReturnsGroupOrSubgroupAsMatcher() {
GroupTreeNode node = GroupTreeNode.fromGroup(new ExplicitGroup("node", GroupHierarchyType.INCLUDING, ','));
GroupTreeNode child = node.addSubgroup(new ExplicitGroup("child", GroupHierarchyType.INDEPENDENT, ','));
OrMatcher matcher = new OrMatcher();
matcher.addRule(node.getGroup());
matcher.addRule(child.getGroup());
assertEquals(matcher, node.getSearchMatcher());
}
@Test
void findMatchesReturnsEmptyForEmptyList() {
assertEquals(Collections.emptyList(), getNodeInSimpleTree().findMatches(Collections.emptyList()));
}
@Test
void findMatchesOneEntry() {
GroupTreeNode parent = getNodeInSimpleTree();
GroupTreeNode node = parent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, "author2", true, ',', false));
assertEquals(1, node.findMatches(entries).size());
}
@Test
void findMatchesMultipleEntries() {
GroupTreeNode parent = getNodeInSimpleTree();
GroupTreeNode node = parent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, "author1", true, ',', false));
assertEquals(2, node.findMatches(entries).size());
}
@Test
void findMatchesWorksForRefiningGroups() {
GroupTreeNode grandParent = getNodeInSimpleTree();
GroupTreeNode parent = grandParent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, "author2", true, ',', false));
GroupTreeNode node = parent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.REFINING, StandardField.AUTHOR, "author1", true, ',', false));
assertEquals(1, node.findMatches(entries).size());
}
@Test
void findMatchesWorksForHierarchyOfIndependentGroups() {
GroupTreeNode grandParent = getNodeInSimpleTree();
GroupTreeNode parent = grandParent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, "author2", true, ',', false));
GroupTreeNode node = parent.addSubgroup(
new WordKeywordGroup("node", GroupHierarchyType.INDEPENDENT, StandardField.AUTHOR, "author1", true, ',', false));
assertEquals(2, node.findMatches(entries).size());
}
@Test
void setGroupChangesUnderlyingGroup() {
GroupTreeNode node = getNodeInSimpleTree();
AbstractGroup newGroup = new ExplicitGroup("NewGroup", GroupHierarchyType.INDEPENDENT, ',');
node.setGroup(newGroup, true, true, entries);
assertEquals(newGroup, node.getGroup());
}
@Test
void setGroupAddsPreviousAssignmentsExplicitToExplicit() {
ExplicitGroup oldGroup = new ExplicitGroup("OldGroup", GroupHierarchyType.INDEPENDENT, ',');
oldGroup.add(entry);
GroupTreeNode node = GroupTreeNode.fromGroup(oldGroup);
AbstractGroup newGroup = new ExplicitGroup("NewGroup", GroupHierarchyType.INDEPENDENT, ',');
node.setGroup(newGroup, true, true, entries);
assertTrue(newGroup.isMatch(entry));
}
@Test
void setGroupWithFalseDoesNotAddsPreviousAssignments() {
ExplicitGroup oldGroup = new ExplicitGroup("OldGroup", GroupHierarchyType.INDEPENDENT, ',');
oldGroup.add(entry);
GroupTreeNode node = GroupTreeNode.fromGroup(oldGroup);
AbstractGroup newGroup = new ExplicitGroup("NewGroup", GroupHierarchyType.INDEPENDENT, ',');
node.setGroup(newGroup, false, false, entries);
assertFalse(newGroup.isMatch(entry));
}
@Test
void setGroupAddsOnlyPreviousAssignments() {
ExplicitGroup oldGroup = new ExplicitGroup("OldGroup", GroupHierarchyType.INDEPENDENT, ',');
assertFalse(oldGroup.isMatch(entry));
GroupTreeNode node = GroupTreeNode.fromGroup(oldGroup);
AbstractGroup newGroup = new ExplicitGroup("NewGroup", GroupHierarchyType.INDEPENDENT, ',');
node.setGroup(newGroup, true, true, entries);
assertFalse(newGroup.isMatch(entry));
}
@Test
void setGroupExplicitToSearchDoesNotKeepPreviousAssignments() {
ExplicitGroup oldGroup = new ExplicitGroup("OldGroup", GroupHierarchyType.INDEPENDENT, ',');
oldGroup.add(entry);
GroupTreeNode node = GroupTreeNode.fromGroup(oldGroup);
AbstractGroup newGroup = new SearchGroup("NewGroup", GroupHierarchyType.INDEPENDENT, "test", EnumSet.noneOf(SearchFlags.class));
node.setGroup(newGroup, true, true, entries);
assertFalse(newGroup.isMatch(entry));
}
@Test
void setGroupExplicitToExplicitIsRenameAndSoRemovesPreviousAssignment() {
ExplicitGroup oldGroup = new ExplicitGroup("OldGroup", GroupHierarchyType.INDEPENDENT, ',');
oldGroup.add(entry);
GroupTreeNode node = GroupTreeNode.fromGroup(oldGroup);
AbstractGroup newGroup = new ExplicitGroup("NewGroup", GroupHierarchyType.INDEPENDENT, ',');
node.setGroup(newGroup, true, true, entries);
assertFalse(oldGroup.isMatch(entry));
}
@Test
void getChildByPathFindsCorrectChildInSecondLevel() {
GroupTreeNode root = getRoot();
GroupTreeNode child = getNodeInSimpleTree(root);
assertEquals(Optional.of(child), root.getChildByPath("ExplicitParent > ExplicitNode"));
}
@Test
void getChildByPathDoesNotFindChildWhenInvalidPath() {
GroupTreeNode root = getRoot();
// use side effect of method, which builds the group tree
getNodeInSimpleTree(root);
assertEquals(Optional.empty(), root.getChildByPath("ExplicitParent > ExplicitChildNode"));
}
@Test
void getPathSimpleTree() {
GroupTreeNode node = getNodeInSimpleTree();
assertEquals("ExplicitParent > ExplicitNode", node.getPath());
}
@Test
void onlyRootAndChildNodeContainAtLeastOneEntry() {
GroupTreeNode rootNode = getRoot();
rootNode.addSubgroup(new ExplicitGroup("ExplicitA", GroupHierarchyType.INCLUDING, ','));
GroupTreeNode parent = rootNode
.addSubgroup(new ExplicitGroup("ExplicitParent", GroupHierarchyType.INDEPENDENT, ','));
GroupTreeNode child = parent.addSubgroup(new ExplicitGroup("ExplicitNode", GroupHierarchyType.REFINING, ','));
BibEntry newEntry = new BibEntry().withField(StandardField.AUTHOR, "Stephen King");
child.addEntriesToGroup(Collections.singletonList(newEntry));
entries.add(newEntry);
assertEquals(rootNode.getContainingGroups(entries, false), Arrays.asList(rootNode, child));
}
@Test
void onlySubgroupsContainAllEntries() {
GroupTreeNode rootNode = getRoot();
rootNode.addSubgroup(new ExplicitGroup("ExplicitA", GroupHierarchyType.INCLUDING, ','));
GroupTreeNode parent = rootNode
.addSubgroup(new ExplicitGroup("ExplicitParent", GroupHierarchyType.INDEPENDENT, ','));
GroupTreeNode firstChild = parent.addSubgroup(new ExplicitGroup("ExplicitNode", GroupHierarchyType.REFINING, ','));
GroupTreeNode secondChild = parent.addSubgroup(new ExplicitGroup("ExplicitSecondNode", GroupHierarchyType.REFINING, ','));
GroupTreeNode grandChild = secondChild.addSubgroup(new ExplicitGroup("ExplicitNodeThirdLevel", GroupHierarchyType.REFINING, ','));
parent.addEntriesToGroup(Collections.singletonList(entry));
firstChild.addEntriesToGroup(entries);
secondChild.addEntriesToGroup(entries);
grandChild.addEntriesToGroup(entries);
assertEquals(parent.getContainingGroups(entries, true), Arrays.asList(firstChild, secondChild, grandChild));
}
@Test
void addEntriesToGroupWorksNotForGroupsNotSupportingExplicitAddingOfEntries() {
GroupTreeNode searchGroup = new GroupTreeNode(new SearchGroup("Search A", GroupHierarchyType.INCLUDING, "searchExpression", EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE)));
List<FieldChange> fieldChanges = searchGroup.addEntriesToGroup(entries);
assertEquals(Collections.emptyList(), fieldChanges);
}
@Test
void removeEntriesFromGroupWorksNotForGroupsNotSupportingExplicitRemovalOfEntries() {
GroupTreeNode searchGroup = new GroupTreeNode(new SearchGroup("Search A", GroupHierarchyType.INCLUDING, "searchExpression", EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE)));
List<FieldChange> fieldChanges = searchGroup.removeEntriesFromGroup(entries);
assertEquals(Collections.emptyList(), fieldChanges);
}
}
| 14,341 | 39.860399 | 185 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/SearchGroupTest.java | package org.jabref.model.groups;
import java.util.EnumSet;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.jabref.model.search.rules.SearchRules;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class SearchGroupTest {
@Test
public void containsFindsWordWithRegularExpression() {
SearchGroup group = new SearchGroup("myExplicitGroup", GroupHierarchyType.INDEPENDENT, "anyfield=rev*", EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
BibEntry entry = new BibEntry();
entry.addKeyword("review", ',');
assertTrue(group.contains(entry));
}
@Test
public void containsDoesNotFindsWordWithInvalidRegularExpression() {
SearchGroup group = new SearchGroup("myExplicitGroup", GroupHierarchyType.INDEPENDENT, "anyfield=*rev*", EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
BibEntry entry = new BibEntry();
entry.addKeyword("review", ',');
assertFalse(group.contains(entry));
}
@Test
public void notQueryWorksWithLeftPartOfQuery() {
SearchGroup groupToBeClassified = new SearchGroup("to-be-classified", GroupHierarchyType.INDEPENDENT, "NOT(groups=alpha) AND NOT(groups=beta)", EnumSet.noneOf(SearchRules.SearchFlags.class));
BibEntry alphaEntry = new BibEntry()
.withCitationKey("alpha")
.withField(StandardField.GROUPS, "alpha");
assertFalse(groupToBeClassified.contains(alphaEntry));
}
@Test
public void notQueryWorksWithLRightPartOfQuery() {
SearchGroup groupToBeClassified = new SearchGroup("to-be-classified", GroupHierarchyType.INDEPENDENT, "NOT(groups=alpha) AND NOT(groups=beta)", EnumSet.noneOf(SearchRules.SearchFlags.class));
BibEntry betaEntry = new BibEntry()
.withCitationKey("beta")
.withField(StandardField.GROUPS, "beta");
assertFalse(groupToBeClassified.contains(betaEntry));
}
}
| 2,188 | 39.537037 | 209 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/TexGroupTest.java | package org.jabref.model.groups;
import java.nio.file.Path;
import org.jabref.architecture.AllowedToUseLogic;
import org.jabref.logic.auxparser.DefaultAuxParser;
import org.jabref.model.database.BibDatabase;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.metadata.MetaData;
import org.jabref.model.util.DummyFileUpdateMonitor;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@AllowedToUseLogic("because class under test relies on logic classes")
public class TexGroupTest {
private MetaData metaData;
@BeforeEach
public void setUp() throws Exception {
metaData = new MetaData();
}
@Test
public void containsReturnsTrueForEntryInAux() throws Exception {
Path auxFile = Path.of(TexGroupTest.class.getResource("paper.aux").toURI());
TexGroup group = new TexGroup("paper", GroupHierarchyType.INDEPENDENT, auxFile, new DefaultAuxParser(new BibDatabase()), new DummyFileUpdateMonitor(), metaData);
BibEntry inAux = new BibEntry();
inAux.setCitationKey("Darwin1888");
assertTrue(group.contains(inAux));
}
@Test
public void containsReturnsTrueForEntryNotInAux() throws Exception {
Path auxFile = Path.of(TexGroupTest.class.getResource("paper.aux").toURI());
TexGroup group = new TexGroup("paper", GroupHierarchyType.INDEPENDENT, auxFile, new DefaultAuxParser(new BibDatabase()), new DummyFileUpdateMonitor(), metaData);
BibEntry notInAux = new BibEntry();
notInAux.setCitationKey("NotInAux2017");
assertFalse(group.contains(notInAux));
}
@Test
public void getFilePathReturnsRelativePath() throws Exception {
Path auxFile = Path.of(TexGroupTest.class.getResource("paper.aux").toURI());
String user = "Darwin";
metaData.setLatexFileDirectory(user, auxFile.getParent());
TexGroup group = new TexGroup("paper", GroupHierarchyType.INDEPENDENT, auxFile, new DefaultAuxParser(new BibDatabase()), new DummyFileUpdateMonitor(), metaData, user);
assertEquals("paper.aux", group.getFilePath().toString());
}
}
| 2,309 | 38.152542 | 175 | java |
null | jabref-main/src/test/java/org/jabref/model/groups/WordKeywordGroupTest.java | package org.jabref.model.groups;
import java.util.Optional;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class WordKeywordGroupTest {
private WordKeywordGroup testGroup;
private WordKeywordGroup testCaseSensitiveGroup;
private WordKeywordGroup waterGroup;
private BibEntry entry;
@BeforeEach
public void setUp() {
testGroup = new WordKeywordGroup("name", GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, "test", false, ',', false);
testCaseSensitiveGroup = new WordKeywordGroup("name", GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, "test", true, ',', false);
waterGroup = new WordKeywordGroup("name", GroupHierarchyType.INDEPENDENT, StandardField.KEYWORDS, "\\H2O", false, ',', false);
entry = new BibEntry();
}
@Test
public void containsFindsSameWord() {
entry.setField(StandardField.KEYWORDS, "test");
assertTrue(testGroup.contains(entry));
}
@Test
public void containsFindsWordInSentence() throws Exception {
entry.setField(StandardField.KEYWORDS, "Some sentence containing test word");
assertTrue(testGroup.contains(entry));
}
@Test
public void containsFindsWordInCommaSeparatedList() throws Exception {
entry.setField(StandardField.KEYWORDS, "Some,list,containing,test,word");
assertTrue(testGroup.contains(entry));
}
@Test
public void containsFindsWordInSemicolonSeparatedList() throws Exception {
entry.setField(StandardField.KEYWORDS, "Some;list;containing;test;word");
assertTrue(testGroup.contains(entry));
}
@Test
public void containsFindsSameComplexWord() throws Exception {
entry.setField(StandardField.KEYWORDS, "\\H2O");
assertTrue(waterGroup.contains(entry));
}
@Test
public void containsFindsComplexWordInSentence() throws Exception {
entry.setField(StandardField.KEYWORDS, "Some sentence containing \\H2O word");
assertTrue(waterGroup.contains(entry));
}
@Test
public void containsDoesNotFindWordIfCaseDiffers() throws Exception {
entry.setField(StandardField.KEYWORDS, "Test");
assertFalse(testCaseSensitiveGroup.contains(entry));
}
@Test
public void containsDoesNotFindsWordInSentenceIfCaseDiffers() throws Exception {
entry.setField(StandardField.KEYWORDS, "Some sentence containing Test word");
assertFalse(testCaseSensitiveGroup.contains(entry));
}
@Test
public void addChangesFieldIfEmptyBefore() throws Exception {
testGroup.add(entry);
assertEquals(Optional.of("test"), entry.getField(StandardField.KEYWORDS));
}
@Test
public void addChangesFieldIfNotEmptyBefore() throws Exception {
entry.setField(StandardField.KEYWORDS, "bla, blubb");
testGroup.add(entry);
assertEquals(Optional.of("bla, blubb, test"), entry.getField(StandardField.KEYWORDS));
}
@Test
public void addDoesNotAddDuplicate() throws Exception {
entry.setField(StandardField.KEYWORDS, "test, blubb");
testGroup.add(entry);
assertEquals(Optional.of("test, blubb"), entry.getField(StandardField.KEYWORDS));
}
@Test
public void removeDoesNothingIfEntryNotMatched() throws Exception {
entry.setField(StandardField.KEYWORDS, "something");
testGroup.remove(entry);
assertEquals(Optional.of("something"), entry.getField(StandardField.KEYWORDS));
}
@Test
public void removeRemovesNameFromField() throws Exception {
entry.setField(StandardField.KEYWORDS, "test, blubb");
testGroup.remove(entry);
assertEquals(Optional.of("blubb"), entry.getField(StandardField.KEYWORDS));
}
}
| 4,073 | 31.592 | 144 | java |
null | jabref-main/src/test/java/org/jabref/model/metadata/MetaDataTest.java | package org.jabref.model.metadata;
import java.util.Optional;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class MetaDataTest {
private MetaData metaData;
@BeforeEach
public void setUp() {
metaData = new MetaData();
}
@Test
public void emptyGroupsIfNotSet() {
assertEquals(Optional.empty(), metaData.getGroups());
}
}
| 468 | 18.541667 | 61 | java |
null | jabref-main/src/test/java/org/jabref/model/openoffice/CitationEntryTest.java | package org.jabref.model.openoffice;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CitationEntryTest {
@Test
void testCitationEntryInitialPageInfo() {
CitationEntry citationEntry = new CitationEntry("RefMark", "Context", "Info");
assertTrue(citationEntry.getPageInfo().isPresent());
assertEquals("Info", citationEntry.getPageInfo().get());
assertEquals("RefMark", citationEntry.getRefMarkName());
assertEquals("Context", citationEntry.getContext());
}
@Test
void testCitationEntryOptionalInitialPageInfo() {
CitationEntry citationEntry = new CitationEntry("RefMark", "Context", Optional.of("Info"));
assertEquals(Optional.of("Info"), citationEntry.getPageInfo());
assertEquals("RefMark", citationEntry.getRefMarkName());
assertEquals("Context", citationEntry.getContext());
}
@Test
void testCitationEntryInitalPageInfoChanged() {
CitationEntry citationEntry = new CitationEntry("RefMark", "Context", "Info");
assertEquals(Optional.of("Info"), citationEntry.getPageInfo());
}
@Test
void testCitationEntryNoInitialPageInfo() {
CitationEntry citationEntry = new CitationEntry("RefMark", "Context");
assertEquals(Optional.empty(), citationEntry.getPageInfo());
}
@Test
void testCitationEntryEquals() {
CitationEntry citationEntry1 = new CitationEntry("RefMark", "Context", "Info");
CitationEntry citationEntry2 = new CitationEntry("RefMark2", "Context", "Info");
CitationEntry citationEntry3 = new CitationEntry("RefMark", "Other Context", "Other Info");
assertEquals(citationEntry1, citationEntry1);
assertEquals(citationEntry1, citationEntry3);
assertNotEquals(citationEntry1, citationEntry2);
assertNotEquals(citationEntry1, "Random String");
}
@Test
void testCitationEntryCompareTo() {
CitationEntry citationEntry1 = new CitationEntry("RefMark", "Context", "Info");
CitationEntry citationEntry2 = new CitationEntry("RefMark2", "Context", "Info");
CitationEntry citationEntry3 = new CitationEntry("RefMark", "Other Context", "Other Info");
assertEquals(0, citationEntry1.compareTo(citationEntry3));
assertEquals(-1, citationEntry1.compareTo(citationEntry2));
assertEquals(1, citationEntry2.compareTo(citationEntry1));
}
}
| 2,618 | 39.921875 | 99 | java |
null | jabref-main/src/test/java/org/jabref/model/paging/PageTest.java | package org.jabref.model.paging;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class PageTest {
private Page<String> page1;
private Page<String> page2;
private final int testPageNumber = 3;
private final String testQuery = "anyQuery";
private Collection<String> testContent = new ArrayList<>();
private final String[] testStrings = {"str1", "str2", "str3"};
@BeforeEach
public void setup() {
testContent.addAll(Arrays.asList(testStrings));
testContent = Collections.unmodifiableCollection(testContent);
page1 = new Page<String>(testQuery, testPageNumber, testContent);
page2 = new Page<String>(testQuery, testPageNumber);
}
@Test
public void getContentTest() {
// make sure the collections have the same elements
List<String> differences = new ArrayList<>(testContent);
differences.removeAll(page1.getContent());
assertTrue(differences.isEmpty());
List<String> differences2 = new ArrayList<>(page1.getContent());
differences2.removeAll(testContent);
assertTrue(differences2.isEmpty());
assertTrue(page2.getContent().isEmpty());
}
@Test
public void getPageNumberTest() {
assertEquals(testPageNumber, page1.getPageNumber());
}
@Test
public void getQueryTest() {
assertEquals(testQuery, page1.getQuery());
}
@Test
public void getSizeTest() {
assertEquals(testContent.size(), page1.getSize());
}
}
| 1,801 | 29.033333 | 73 | java |
null | jabref-main/src/test/java/org/jabref/model/pdf/FileAnnotationTest.java | package org.jabref.model.pdf;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class FileAnnotationTest {
@Test
public void testParseDateMinusBeforeTimezone() {
String dateString = "D:20170512224019-03'00'";
LocalDateTime date = FileAnnotation.extractModifiedTime(dateString);
assertEquals(LocalDateTime.of(2017, 05, 12, 22, 40, 19), date);
}
@Test
public void testParseDatePlusBeforeTimezone() {
String dateString = "D:20170512224019+03'00'";
LocalDateTime date = FileAnnotation.extractModifiedTime(dateString);
assertEquals(LocalDateTime.of(2017, 05, 12, 22, 40, 19), date);
}
@Test
public void testParseDateNoTimezone() {
String dateString = "D:20170512224019";
LocalDateTime date = FileAnnotation.extractModifiedTime(dateString);
assertEquals(LocalDateTime.of(2017, 05, 12, 22, 40, 19), date);
}
@Test
public void testParseNotADate() {
String dateString = "gsdfgwergsdf";
LocalDateTime date = FileAnnotation.extractModifiedTime(dateString);
assertTrue(ChronoUnit.SECONDS.between(LocalDateTime.now(), date) <= 1);
}
@Test
public void testAbbreviateAnnotationName() {
final FileAnnotation fileAnnotation = new FileAnnotation("John Robertson",
LocalDateTime.of(2020, 4, 18, 17, 10), 1,
"this is an annotation that is very long and goes over the character limit of 45",
FileAnnotationType.FREETEXT, Optional.empty());
assertEquals("this is an annotation that is very long and g...", fileAnnotation.toString());
}
}
| 1,855 | 34.692308 | 100 | java |
null | jabref-main/src/test/java/org/jabref/model/search/matchers/MatcherSetsTest.java | package org.jabref.model.search.matchers;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.search.rules.MockSearchMatcher;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class MatcherSetsTest {
@Test
public void testBuildAnd() {
MatcherSet matcherSet = MatcherSets.build(MatcherSets.MatcherType.AND);
assertTrue(matcherSet.isMatch(new BibEntry()));
matcherSet.addRule(new MockSearchMatcher(true));
assertTrue(matcherSet.isMatch(new BibEntry()));
matcherSet.addRule(new MockSearchMatcher(false));
assertFalse(matcherSet.isMatch(new BibEntry()));
}
@Test
public void testBuildOr() {
MatcherSet matcherSet = MatcherSets.build(MatcherSets.MatcherType.OR);
assertFalse(matcherSet.isMatch(new BibEntry()));
matcherSet.addRule(new MockSearchMatcher(true));
assertTrue(matcherSet.isMatch(new BibEntry()));
matcherSet.addRule(new MockSearchMatcher(false));
assertTrue(matcherSet.isMatch(new BibEntry()));
}
@Test
public void testBuildNotWithTrue() {
NotMatcher matcher = new NotMatcher(new MockSearchMatcher(true));
assertFalse(matcher.isMatch(new BibEntry()));
}
@Test
public void testBuildNotWithFalse() {
NotMatcher matcher = new NotMatcher(new MockSearchMatcher(false));
assertTrue(matcher.isMatch(new BibEntry()));
}
}
| 1,532 | 30.285714 | 79 | java |
null | jabref-main/src/test/java/org/jabref/model/search/rules/ContainsBasedSearchRuleTest.java | package org.jabref.model.search.rules;
import java.util.EnumSet;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.jabref.model.entry.types.StandardEntryType;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test case for ContainBasedSearchRule.
*/
public class ContainsBasedSearchRuleTest {
private final BibEntry be = new BibEntry(StandardEntryType.InCollection)
.withCitationKey("shields01")
.withField(StandardField.TITLE, "Marine finfish larviculture in Europe")
.withField(StandardField.YEAR, "2001")
.withField(StandardField.AUTHOR, "Kevin Shields");
private final ContainsBasedSearchRule bsCaseSensitive = new ContainsBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
private final ContainsBasedSearchRule bsCaseInsensitive = new ContainsBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.REGULAR_EXPRESSION));
private final RegexBasedSearchRule bsCaseSensitiveRegexp = new RegexBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
private final RegexBasedSearchRule bsCaseInsensitiveRegexp = new RegexBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.REGULAR_EXPRESSION));
@Test
public void testContentOfSingleField() {
String query = "\"marine larviculture\"";
assertFalse(bsCaseSensitive.applyRule(query, be));
assertFalse(bsCaseInsensitive.applyRule(query, be));
assertFalse(bsCaseSensitiveRegexp.applyRule(query, be));
assertFalse(bsCaseInsensitiveRegexp.applyRule(query, be));
}
@Test
public void testContentDistributedOnMultipleFields() {
String query = "marine 2001 shields";
assertFalse(bsCaseSensitive.applyRule(query, be));
assertTrue(bsCaseInsensitive.applyRule(query, be));
assertFalse(bsCaseSensitiveRegexp.applyRule(query, be));
assertFalse(bsCaseInsensitiveRegexp.applyRule(query, be));
}
@Test
public void testRegularExpressionMatch() {
String query = "marine [A-Za-z]* larviculture";
assertFalse(bsCaseSensitive.applyRule(query, be));
assertFalse(bsCaseInsensitive.applyRule(query, be));
assertFalse(bsCaseSensitiveRegexp.applyRule(query, be));
assertTrue(bsCaseInsensitiveRegexp.applyRule(query, be));
}
}
| 2,558 | 42.372881 | 184 | java |
null | jabref-main/src/test/java/org/jabref/model/search/rules/GrammarBasedSearchRuleTest.java | package org.jabref.model.search.rules;
import java.util.EnumSet;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.entry.field.StandardField;
import org.jabref.model.entry.types.StandardEntryType;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test case for GrammarBasedSearchRuleTest.
*/
public class GrammarBasedSearchRuleTest {
@Test
void applyRuleMatchesSingleTermWithRegex() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
String query = "M[a-z]+e";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, makeBibtexEntry()));
}
@Test
void applyRuleDoesNotMatchSingleTermWithRegex() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.CASE_SENSITIVE, SearchRules.SearchFlags.REGULAR_EXPRESSION));
String query = "M[0-9]+e";
assertTrue(searchRule.validateSearchStrings(query));
assertFalse(searchRule.applyRule(query, makeBibtexEntry()));
}
@Test
void searchRuleOfDocumentationMatches() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.REGULAR_EXPRESSION));
String query = "(author = miller or title|keywords = \"image processing\") and not author = brown";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, new BibEntry()
.withCitationKey("key")
.withField(StandardField.KEYWORDS, "image processing")));
assertFalse(searchRule.applyRule(query, new BibEntry()
.withCitationKey("key")
.withField(StandardField.AUTHOR, "Sam Brown")
.withField(StandardField.KEYWORDS, "image processing")));
}
@Disabled
@Test
void searchForAnyFieldWorks() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.REGULAR_EXPRESSION));
String query = "anyfield:fruit";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, new BibEntry()
.withField(StandardField.KEYWORDS, "fruit")));
}
@Disabled
@Test
void searchForAnyKeywordWorks() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.of(SearchRules.SearchFlags.REGULAR_EXPRESSION));
String query = "anykeyword:apple";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, new BibEntry()
.withField(StandardField.KEYWORDS, "apple")));
assertFalse(searchRule.applyRule(query, new BibEntry()
.withField(StandardField.KEYWORDS, "pineapple")));
}
@Test
void searchForCitationKeyWorks() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.noneOf(SearchRules.SearchFlags.class));
String query = "citationkey==miller2005";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, new BibEntry()
.withCitationKey("miller2005")));
}
@Test
void searchForThesisEntryTypeWorks() {
GrammarBasedSearchRule searchRule = new GrammarBasedSearchRule(EnumSet.noneOf(SearchRules.SearchFlags.class));
String query = "entrytype=thesis";
assertTrue(searchRule.validateSearchStrings(query));
assertTrue(searchRule.applyRule(query, new BibEntry(StandardEntryType.PhdThesis)));
}
public BibEntry makeBibtexEntry() {
return new BibEntry(StandardEntryType.InCollection)
.withCitationKey("shields01")
.withField(StandardField.TITLE, "Marine finfish larviculture in Europe")
.withField(StandardField.YEAR, "2001")
.withField(StandardField.AUTHOR, "Kevin Shields");
}
}
| 4,203 | 40.215686 | 167 | java |
null | jabref-main/src/test/java/org/jabref/model/search/rules/MockSearchMatcher.java | package org.jabref.model.search.rules;
import org.jabref.model.entry.BibEntry;
import org.jabref.model.search.SearchMatcher;
/**
* Mock search rule that returns the values passed. Useful for testing.
*/
public class MockSearchMatcher implements SearchMatcher {
private final boolean result;
public MockSearchMatcher(boolean result) {
this.result = result;
}
@Override
public boolean isMatch(BibEntry entry) {
return result;
}
}
| 475 | 20.636364 | 71 | java |
null | jabref-main/src/test/java/org/jabref/model/search/rules/SentenceAnalyzerTest.java | package org.jabref.model.search.rules;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class SentenceAnalyzerTest {
static Stream<Arguments> getParameters() {
return Stream.of(
Arguments.of(List.of("a", "b"), "a b"),
// Leading and trailing spaces
Arguments.of(List.of("a", "b"), " a b "),
// Escaped characters and trailing spaces
Arguments.of(List.of("b "), "\"b \" "),
// Escaped characters and leading spaces.
Arguments.of(List.of(" a"), " \\ a")
);
}
@ParameterizedTest
@MethodSource("getParameters")
public void testGetWords(List<String> expected, String input) {
assertEquals(expected, new SentenceAnalyzer(input).getWords());
}
}
| 1,040 | 28.742857 | 71 | java |
null | jabref-main/src/test/java/org/jabref/model/strings/StringUtilTest.java | package org.jabref.model.strings;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Optional;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
class StringUtilTest {
@Test
void StringUtilClassIsSmall() throws Exception {
Path path = Path.of("src", "main", "java", StringUtil.class.getName().replace('.', '/') + ".java");
int lineCount = Files.readAllLines(path, StandardCharsets.UTF_8).size();
assertTrue(lineCount <= 774, "StringUtil increased in size to " + lineCount + ". "
+ "We try to keep this class as small as possible. "
+ "Thus think twice if you add something to StringUtil.");
}
@Test
void testBooleanToBinaryString() {
assertEquals("0", StringUtil.booleanToBinaryString(false));
assertEquals("1", StringUtil.booleanToBinaryString(true));
}
@Test
void testQuoteSimple() {
assertEquals("a::", StringUtil.quote("a:", "", ':'));
}
@Test
void testQuoteNullQuotation() {
assertEquals("a::", StringUtil.quote("a:", null, ':'));
}
@Test
void testQuoteNullString() {
assertEquals("", StringUtil.quote(null, ";", ':'));
}
@Test
void testQuoteQuotationCharacter() {
assertEquals("a:::;", StringUtil.quote("a:;", ";", ':'));
}
@Test
void testQuoteMoreComplicated() {
assertEquals("a::b:%c:;", StringUtil.quote("a:b%c;", "%;", ':'));
}
@Test
void testUnifyLineBreaks() {
// Mac < v9
String result = StringUtil.unifyLineBreaks("\r", "newline");
assertEquals("newline", result);
// Windows
result = StringUtil.unifyLineBreaks("\r\n", "newline");
assertEquals("newline", result);
// Unix
result = StringUtil.unifyLineBreaks("\n", "newline");
assertEquals("newline", result);
}
@Test
void testGetCorrectFileName() {
assertEquals("aa.bib", StringUtil.getCorrectFileName("aa", "bib"));
assertEquals(".login.bib", StringUtil.getCorrectFileName(".login", "bib"));
assertEquals("a.bib", StringUtil.getCorrectFileName("a.bib", "bib"));
assertEquals("a.bib", StringUtil.getCorrectFileName("a.bib", "BIB"));
assertEquals("a.bib", StringUtil.getCorrectFileName("a", "bib"));
assertEquals("a.bb", StringUtil.getCorrectFileName("a.bb", "bib"));
assertEquals("", StringUtil.getCorrectFileName(null, "bib"));
}
@Test
void testQuoteForHTML() {
assertEquals("!", StringUtil.quoteForHTML("!"));
assertEquals("!!!", StringUtil.quoteForHTML("!!!"));
}
@Test
void testRemoveBracesAroundCapitals() {
assertEquals("ABC", StringUtil.removeBracesAroundCapitals("{ABC}"));
assertEquals("ABC", StringUtil.removeBracesAroundCapitals("{{ABC}}"));
assertEquals("{abc}", StringUtil.removeBracesAroundCapitals("{abc}"));
assertEquals("ABCDEF", StringUtil.removeBracesAroundCapitals("{ABC}{DEF}"));
}
@Test
void testPutBracesAroundCapitals() {
assertEquals("{ABC}", StringUtil.putBracesAroundCapitals("ABC"));
assertEquals("{ABC}", StringUtil.putBracesAroundCapitals("{ABC}"));
assertEquals("abc", StringUtil.putBracesAroundCapitals("abc"));
assertEquals("#ABC#", StringUtil.putBracesAroundCapitals("#ABC#"));
assertEquals("{ABC} def {EFG}", StringUtil.putBracesAroundCapitals("ABC def EFG"));
}
@Test
void testShaveString() {
assertEquals("", StringUtil.shaveString(null));
assertEquals("", StringUtil.shaveString(""));
assertEquals("aaa", StringUtil.shaveString(" aaa\t\t\n\r"));
assertEquals("a", StringUtil.shaveString(" {a} "));
assertEquals("a", StringUtil.shaveString(" \"a\" "));
assertEquals("{a}", StringUtil.shaveString(" {{a}} "));
assertEquals("{a}", StringUtil.shaveString(" \"{a}\" "));
assertEquals("\"{a\"}", StringUtil.shaveString(" \"{a\"} "));
}
@Test
void testJoin() {
String[] s = {"ab", "cd", "ed"};
assertEquals("ab\\cd\\ed", StringUtil.join(s, "\\", 0, s.length));
assertEquals("cd\\ed", StringUtil.join(s, "\\", 1, s.length));
assertEquals("ed", StringUtil.join(s, "\\", 2, s.length));
assertEquals("", StringUtil.join(s, "\\", 3, s.length));
assertEquals("", StringUtil.join(new String[]{}, "\\", 0, 0));
}
@Test
void testStripBrackets() {
assertEquals("foo", StringUtil.stripBrackets("[foo]"));
assertEquals("[foo]", StringUtil.stripBrackets("[[foo]]"));
assertEquals("", StringUtil.stripBrackets(""));
assertEquals("[foo", StringUtil.stripBrackets("[foo"));
assertEquals("]", StringUtil.stripBrackets("]"));
assertEquals("", StringUtil.stripBrackets("[]"));
assertEquals("f[]f", StringUtil.stripBrackets("f[]f"));
assertEquals(null, StringUtil.stripBrackets(null));
}
@Test
void testGetPart() {
// Get word between braces
assertEquals("{makes}", StringUtil.getPart("Practice {makes} perfect", 8, false));
// When the string is empty and start Index equal zero
assertEquals("", StringUtil.getPart("", 0, false));
// When the word are in between close curly bracket
assertEquals("", StringUtil.getPart("A closed mouth catches no }flies}", 25, false));
// Get the word from the end of the sentence
assertEquals("bite", StringUtil.getPart("Barking dogs seldom bite", 19, true));
}
@Test
void testFindEncodingsForString() {
// Unused in JabRef, but should be added in case it finds some use
}
@Test
void testWrap() {
String newline = "newline";
assertEquals("aaaaa" + newline + "\tbbbbb" + newline + "\tccccc",
StringUtil.wrap("aaaaa bbbbb ccccc", 5, newline));
assertEquals("aaaaa bbbbb" + newline + "\tccccc", StringUtil.wrap("aaaaa bbbbb ccccc", 8, newline));
assertEquals("aaaaa bbbbb" + newline + "\tccccc", StringUtil.wrap("aaaaa bbbbb ccccc", 11, newline));
assertEquals("aaaaa bbbbb ccccc", StringUtil.wrap("aaaaa bbbbb ccccc", 12, newline));
assertEquals("aaaaa" + newline + "\t" + newline + "\tbbbbb" + newline + "\t" + newline + "\tccccc",
StringUtil.wrap("aaaaa\nbbbbb\nccccc", 12, newline));
assertEquals(
"aaaaa" + newline + "\t" + newline + "\t" + newline + "\tbbbbb" + newline + "\t" + newline + "\tccccc",
StringUtil.wrap("aaaaa\n\nbbbbb\nccccc", 12, newline));
assertEquals("aaaaa" + newline + "\t" + newline + "\tbbbbb" + newline + "\t" + newline + "\tccccc",
StringUtil.wrap("aaaaa\r\nbbbbb\r\nccccc", 12, newline));
}
@Test
void testDecodeStringDoubleArray() {
assertArrayEquals(new String[][]{{"a", "b"}, {"c", "d"}}, StringUtil.decodeStringDoubleArray("a:b;c:d"));
assertArrayEquals(new String[][]{{"a", ""}, {"c", "d"}}, StringUtil.decodeStringDoubleArray("a:;c:d"));
// arrays first differed at element [0][1]; expected: null<null> but was: java.lang.String<null>
// assertArrayEquals(stringArray2res, StringUtil.decodeStringDoubleArray(encStringArray2));
assertArrayEquals(new String[][]{{"a", ":b"}, {"c;", "d"}}, StringUtil.decodeStringDoubleArray("a:\\:b;c\\;:d"));
}
@Test
void testIsInCurlyBrackets() {
assertFalse(StringUtil.isInCurlyBrackets(""));
assertFalse(StringUtil.isInCurlyBrackets(null));
assertTrue(StringUtil.isInCurlyBrackets("{}"));
assertTrue(StringUtil.isInCurlyBrackets("{a}"));
assertTrue(StringUtil.isInCurlyBrackets("{a{a}}"));
assertTrue(StringUtil.isInCurlyBrackets("{{\\AA}sa {\\AA}Stor{\\aa}}"));
assertFalse(StringUtil.isInCurlyBrackets("{"));
assertFalse(StringUtil.isInCurlyBrackets("}"));
assertFalse(StringUtil.isInCurlyBrackets("a{}a"));
assertFalse(StringUtil.isInCurlyBrackets("{\\AA}sa {\\AA}Stor{\\aa}"));
}
@Test
void testIsInSquareBrackets() {
assertFalse(StringUtil.isInSquareBrackets(""));
assertFalse(StringUtil.isInSquareBrackets(null));
assertTrue(StringUtil.isInSquareBrackets("[]"));
assertTrue(StringUtil.isInSquareBrackets("[a]"));
assertFalse(StringUtil.isInSquareBrackets("["));
assertFalse(StringUtil.isInSquareBrackets("]"));
assertFalse(StringUtil.isInSquareBrackets("a[]a"));
}
@Test
void testIsInCitationMarks() {
assertFalse(StringUtil.isInCitationMarks(""));
assertFalse(StringUtil.isInCitationMarks(null));
assertTrue(StringUtil.isInCitationMarks("\"\""));
assertTrue(StringUtil.isInCitationMarks("\"a\""));
assertFalse(StringUtil.isInCitationMarks("\""));
assertFalse(StringUtil.isInCitationMarks("a\"\"a"));
}
@Test
void testIntValueOfSingleDigit() {
assertEquals(1, StringUtil.intValueOf("1"));
assertEquals(2, StringUtil.intValueOf("2"));
assertEquals(8, StringUtil.intValueOf("8"));
}
@Test
void testIntValueOfLongString() {
assertEquals(1234567890, StringUtil.intValueOf("1234567890"));
}
@Test
void testIntValueOfStartWithZeros() {
assertEquals(1234, StringUtil.intValueOf("001234"));
}
@Test
void testIntValueOfExceptionIfStringContainsLetter() {
assertThrows(NumberFormatException.class, () -> StringUtil.intValueOf("12A2"));
}
@Test
void testIntValueOfExceptionIfStringNull() {
assertThrows(NumberFormatException.class, () -> StringUtil.intValueOf(null));
}
@Test
void testIntValueOfExceptionfIfStringEmpty() {
assertThrows(NumberFormatException.class, () -> StringUtil.intValueOf(""));
}
@Test
void testIntValueOfWithNullSingleDigit() {
assertEquals(Optional.of(1), StringUtil.intValueOfOptional("1"));
assertEquals(Optional.of(2), StringUtil.intValueOfOptional("2"));
assertEquals(Optional.of(8), StringUtil.intValueOfOptional("8"));
}
@Test
void testIntValueOfWithNullLongString() {
assertEquals(Optional.of(1234567890), StringUtil.intValueOfOptional("1234567890"));
}
@Test
void testIntValueOfWithNullStartWithZeros() {
assertEquals(Optional.of(1234), StringUtil.intValueOfOptional("001234"));
}
@Test
void testIntValueOfWithNullExceptionIfStringContainsLetter() {
assertEquals(Optional.empty(), StringUtil.intValueOfOptional("12A2"));
}
@Test
void testIntValueOfWithNullExceptionIfStringNull() {
assertEquals(Optional.empty(), StringUtil.intValueOfOptional(null));
}
@Test
void testIntValueOfWithNullExceptionfIfStringEmpty() {
assertEquals(Optional.empty(), StringUtil.intValueOfOptional(""));
}
@Test
void testLimitStringLengthShort() {
assertEquals("Test", StringUtil.limitStringLength("Test", 20));
}
@Test
void testLimitStringLengthLimiting() {
assertEquals("TestTes...", StringUtil.limitStringLength("TestTestTestTestTest", 10));
assertEquals(10, StringUtil.limitStringLength("TestTestTestTestTest", 10).length());
}
@Test
void testLimitStringLengthNullInput() {
assertEquals("", StringUtil.limitStringLength(null, 10));
}
@Test
void testReplaceSpecialCharacters() {
assertEquals("Hallo Arger", StringUtil.replaceSpecialCharacters("Hallo Arger"));
assertEquals("aaAeoeeee", StringUtil.replaceSpecialCharacters("åÄöéèë"));
}
@Test
void replaceSpecialCharactersWithNonNormalizedUnicode() {
assertEquals("Modele", StringUtil.replaceSpecialCharacters("Modèle"));
}
static Stream<Arguments> testRepeatSpacesData() {
return Stream.of(
Arguments.of("", -1),
Arguments.of("", 0),
Arguments.of(" ", 1),
Arguments.of(" ", 7)
);
}
@ParameterizedTest
@MethodSource("testRepeatSpacesData")
void testRepeatSpaces(String result, int count) {
assertEquals(result, StringUtil.repeatSpaces(count));
}
@Test
void testRepeat() {
assertEquals("", StringUtil.repeat(0, 'a'));
assertEquals("a", StringUtil.repeat(1, 'a'));
assertEquals("aaaaaaa", StringUtil.repeat(7, 'a'));
}
@Test
void testBoldHTML() {
assertEquals("<b>AA</b>", StringUtil.boldHTML("AA"));
}
@Test
void testBoldHTMLReturnsOriginalTextIfNonNull() {
assertEquals("<b>AA</b>", StringUtil.boldHTML("AA", "BB"));
}
@Test
void testBoldHTMLReturnsAlternativeTextIfNull() {
assertEquals("<b>BB</b>", StringUtil.boldHTML(null, "BB"));
}
@Test
void testUnquote() {
assertEquals("a:", StringUtil.unquote("a::", ':'));
assertEquals("a:;", StringUtil.unquote("a:::;", ':'));
assertEquals("a:b%c;", StringUtil.unquote("a::b:%c:;", ':'));
}
@Test
void testCapitalizeFirst() {
assertEquals("", StringUtil.capitalizeFirst(""));
assertEquals("Hello world", StringUtil.capitalizeFirst("Hello World"));
assertEquals("A", StringUtil.capitalizeFirst("a"));
assertEquals("Aa", StringUtil.capitalizeFirst("AA"));
}
private static Stream<Arguments> getQuoteStringIfSpaceIsContainedData() {
return Stream.of(
Arguments.of("", ""),
Arguments.of("\" \"", " "),
Arguments.of("world", "world"),
Arguments.of("\"hello world\"", "hello world")
);
}
@ParameterizedTest
@MethodSource("getQuoteStringIfSpaceIsContainedData")
void testGuoteStringIfSpaceIsContained(String expected, String source) {
assertEquals(expected, StringUtil.quoteStringIfSpaceIsContained(source));
}
@Test
void testStripAccents() {
assertEquals("aAoeee", StringUtil.stripAccents("åÄöéèë"));
assertEquals("Muhlbach", StringUtil.stripAccents("Mühlbach"));
}
static Stream<Arguments> testContainsWhitespace() {
return Stream.of(
Arguments.of(true, "file url"),
Arguments.of(true, "file\nurl"),
Arguments.of(true, "file\r\nurl"),
Arguments.of(true, "file\rurl"),
Arguments.of(true, "file\furl"),
Arguments.of(true, "file_url "),
Arguments.of(true, "file url\n"),
Arguments.of(true, " "),
Arguments.of(false, "file_url"),
Arguments.of(false, "PascalCase"),
Arguments.of(false, ""));
}
@ParameterizedTest
@MethodSource
void testContainsWhitespace(Boolean expected, String input) {
assertEquals(expected, StringUtil.containsWhitespace(input));
}
}
| 15,516 | 36.662621 | 121 | java |
null | jabref-main/src/test/java/org/jabref/model/texparser/CitationTest.java | package org.jabref.model.texparser;
import java.nio.file.Path;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.params.provider.Arguments.arguments;
public class CitationTest {
Path path;
Citation citation;
@BeforeEach
public void init() {
path = Path.of("test");
citation = new Citation(path, 10, 1, 4, "lineText");
}
private static Stream<Arguments> colStartColEndNotInBounds() {
return Stream.of(
arguments(-1, 2),
arguments(1, 9)
);
}
private static Stream<Arguments> colStartColEndInBounds() {
return Stream.of(
arguments(0, 2),
arguments(1, 8)
);
}
@ParameterizedTest
@ValueSource(ints = {-1, 0})
public void constructorLineSmallerEqualZeroTest(int line) {
Exception e = assertThrows(IllegalArgumentException.class, () -> new Citation(path, line, 1, 5, "lineText"));
assertEquals("Line has to be greater than 0.", e.getMessage());
}
@ParameterizedTest
@ValueSource(ints = {1, 2})
public void constructorLineLargerZeroTest(int line) {
Citation citation = new Citation(path, line, 1, 5, "lineText");
}
@ParameterizedTest
@MethodSource("colStartColEndNotInBounds")
public void constructorColStartColEndNotInBoundsTest(int colStart, int colEnd) {
Exception e = assertThrows(IllegalArgumentException.class, () -> new Citation(path, 10, colStart, colEnd, "lineText"));
assertEquals("Citation has to be between 0 and line length.", e.getMessage());
}
@ParameterizedTest
@MethodSource("colStartColEndInBounds")
public void constructorColStartColEndInBoundsTest(int colStart, int colEnd) {
Citation citation = new Citation(path, 10, colStart, colEnd, "lineText");
}
@Test
public void getPathTest() {
assertEquals(path, citation.getPath());
}
@Test
public void getLineTest() {
assertEquals(10, citation.getLine());
}
@Test
public void getColStartTest() {
assertEquals(1, citation.getColStart());
}
@Test
public void getColEndTest() {
assertEquals(4, citation.getColEnd());
}
@Test
public void getLineTextTest() {
assertEquals("lineText", citation.getLineText());
}
@Test
public void getContextTest() {
assertEquals("lineText", citation.getContext());
}
@Test
public void equalsTest() {
Citation citation1 = new Citation(path, 10, 1, 4, "lineText");
Citation citation2 = null;
assertEquals(citation, citation1);
assertNotEquals(citation, citation2);
}
}
| 3,181 | 28.738318 | 127 | java |
null | jabref-main/src/test/java/org/jabref/performance/BibtexEntryGenerator.java | package org.jabref.performance;
public class BibtexEntryGenerator {
public String generateBibtexEntries(int number) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < number; i++) {
sb.append(generateBibtexEntry(i));
sb.append("\n");
}
return sb.toString();
}
private String generateBibtexEntry(int i) {
return "@article{einstein1916grundlage" + i + ",\n" +
" title={Die grundlage der allgemeinen relativit{\\\"a}tstheorie},\n" +
" author={Einstein, Albert},\n" +
" journal={Annalen der Physik},\n" +
" volume={354},\n" +
" number={7},\n" +
" pages={769--822},\n" +
" year={1916},\n" +
" publisher={Wiley Online Library}\n" +
"}\n";
}
}
| 883 | 31.740741 | 88 | java |
null | jabref-main/src/test/java/org/jabref/support/CIServerCondition.java | package org.jabref.support;
import java.lang.reflect.AnnotatedElement;
import java.util.Optional;
import org.jabref.model.strings.StringUtil;
import org.junit.jupiter.api.extension.ConditionEvaluationResult;
import org.junit.jupiter.api.extension.ExecutionCondition;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.platform.commons.support.AnnotationSupport;
public class CIServerCondition implements ExecutionCondition {
private static final ConditionEvaluationResult ENABLED = ConditionEvaluationResult.enabled("not on CI server");
private static boolean isCIServer() {
// See http://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables
// See https://circleci.com/docs/environment-variables
return Boolean.valueOf(System.getenv("CI"));
}
/**
* Containers and tests are disabled if they are annotated with {@link DisabledOnCIServer} and the tests are run on
* the CI server.
*/
@Override
public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) {
if (!isCIServer()) {
return ENABLED;
}
Optional<AnnotatedElement> element = context.getElement();
Optional<DisabledOnCIServer> disabled = AnnotationSupport.findAnnotation(element, DisabledOnCIServer.class);
if (disabled.isPresent()) {
String reason = disabled.map(DisabledOnCIServer::value)
.filter(StringUtil::isNotBlank)
.orElseGet(() -> element.get() + " is disabled on CI server");
return ConditionEvaluationResult.disabled(reason);
}
return ENABLED;
}
}
| 1,727 | 37.4 | 119 | java |
null | jabref-main/src/test/java/org/jabref/support/DisabledOnCIServer.java | package org.jabref.support;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.junit.jupiter.api.extension.ExtendWith;
@Target(value = {ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@ExtendWith(CIServerCondition.class)
public @interface DisabledOnCIServer {
String value();
}
| 433 | 26.125 | 55 | java |
null | jabref-main/src/test/java/org/jabref/testutils/category/DatabaseTest.java | package org.jabref.testutils.category;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.junit.jupiter.api.Tag;
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Tag("DatabaseTest")
public @interface DatabaseTest {
}
| 427 | 24.176471 | 47 | java |
null | jabref-main/src/test/java/org/jabref/testutils/category/FetcherTest.java | package org.jabref.testutils.category;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.junit.jupiter.api.Tag;
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Tag("FetcherTest")
public @interface FetcherTest {
String value() default "";
}
| 456 | 24.388889 | 47 | java |
null | jabref-main/src/test/java/org/jabref/testutils/category/GUITest.java | package org.jabref.testutils.category;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.junit.jupiter.api.Tag;
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Tag("GUITest")
public @interface GUITest {
// empty
}
| 430 | 22.944444 | 47 | java |
null | jabref-main/src/test/java/org/jabref/testutils/interactive/styletester/StyleTesterMain.java | package org.jabref.testutils.interactive.styletester;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.stage.Stage;
import org.jabref.gui.JabRefExecutorService;
import org.jabref.gui.theme.ThemeManager;
import org.jabref.gui.util.DefaultFileUpdateMonitor;
import org.jabref.logic.JabRefException;
import org.jabref.preferences.JabRefPreferences;
/**
* Useful for checking the display of different controls. Not needed inside of JabRef.
*/
public class StyleTesterMain extends Application {
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage stage) throws JabRefException {
StyleTesterView view = new StyleTesterView();
DefaultFileUpdateMonitor fileUpdateMonitor = new DefaultFileUpdateMonitor();
JabRefExecutorService.INSTANCE.executeInterruptableTask(fileUpdateMonitor, "FileUpdateMonitor");
ThemeManager themeManager = new ThemeManager(
JabRefPreferences.getInstance().getWorkspacePreferences(),
fileUpdateMonitor,
Runnable::run);
Scene scene = new Scene(view.getContent());
themeManager.installCss(scene);
stage.setScene(scene);
stage.show();
}
@Override
public void stop() {
JabRefExecutorService.INSTANCE.shutdownEverything();
}
}
| 1,376 | 31.023256 | 104 | java |
null | jabref-main/src/test/java/org/jabref/testutils/interactive/styletester/StyleTesterView.java | package org.jabref.testutils.interactive.styletester;
import javafx.css.PseudoClass;
import javafx.fxml.FXML;
import javafx.scene.Parent;
import javafx.scene.control.Button;
import com.airhacks.afterburner.views.ViewLoader;
public class StyleTesterView {
@FXML private Button normalButtonHover;
@FXML private Button normalButtonPressed;
@FXML private Button normalButtonFocused;
@FXML private Button textButtonHover;
@FXML private Button textButtonPressed;
@FXML private Button textButtonFocused;
@FXML private Button containedButtonHover;
@FXML private Button containedButtonPressed;
@FXML private Button containedButtonFocused;
private Parent content;
StyleTesterView() {
content = ViewLoader.view(this)
.load()
.getView();
setStates();
}
private void setStates() {
PseudoClass hover = PseudoClass.getPseudoClass("hover");
normalButtonHover.pseudoClassStateChanged(hover, true);
textButtonHover.pseudoClassStateChanged(hover, true);
containedButtonHover.pseudoClassStateChanged(hover, true);
PseudoClass pressed = PseudoClass.getPseudoClass("pressed");
normalButtonPressed.pseudoClassStateChanged(pressed, true);
textButtonPressed.pseudoClassStateChanged(pressed, true);
containedButtonPressed.pseudoClassStateChanged(pressed, true);
PseudoClass focused = PseudoClass.getPseudoClass("focused");
normalButtonFocused.pseudoClassStateChanged(focused, true);
textButtonFocused.pseudoClassStateChanged(focused, true);
containedButtonFocused.pseudoClassStateChanged(focused, true);
}
public Parent getContent() {
return content;
}
}
| 1,778 | 33.882353 | 70 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/cc/factorie/protobuf/DocumentProtos.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Document.proto
package cc.factorie.protobuf;
public final class DocumentProtos {
private DocumentProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public static final class Document extends
com.google.protobuf.GeneratedMessage {
// Use Document.newBuilder() to construct.
private Document() {
initFields();
}
private Document(boolean noInit) {}
private static final Document defaultInstance;
public static Document getDefaultInstance() {
return defaultInstance;
}
public Document getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_fieldAccessorTable;
}
public static final class Sentence extends
com.google.protobuf.GeneratedMessage {
// Use Sentence.newBuilder() to construct.
private Sentence() {
initFields();
}
private Sentence(boolean noInit) {}
private static final Sentence defaultInstance;
public static Sentence getDefaultInstance() {
return defaultInstance;
}
public Sentence getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Sentence_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Sentence_fieldAccessorTable;
}
// repeated .distant.protobuf.Document.Token tokens = 1;
public static final int TOKENS_FIELD_NUMBER = 1;
private java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Token> tokens_ =
java.util.Collections.emptyList();
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Token> getTokensList() {
return tokens_;
}
public int getTokensCount() { return tokens_.size(); }
public cc.factorie.protobuf.DocumentProtos.Document.Token getTokens(int index) {
return tokens_.get(index);
}
// repeated .distant.protobuf.Document.Mention mentions = 2;
public static final int MENTIONS_FIELD_NUMBER = 2;
private java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Mention> mentions_ =
java.util.Collections.emptyList();
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Mention> getMentionsList() {
return mentions_;
}
public int getMentionsCount() { return mentions_.size(); }
public cc.factorie.protobuf.DocumentProtos.Document.Mention getMentions(int index) {
return mentions_.get(index);
}
// optional .distant.protobuf.Document.DepTree depTree = 3;
public static final int DEPTREE_FIELD_NUMBER = 3;
private boolean hasDepTree;
private cc.factorie.protobuf.DocumentProtos.Document.DepTree depTree_;
public boolean hasDepTree() { return hasDepTree; }
public cc.factorie.protobuf.DocumentProtos.Document.DepTree getDepTree() { return depTree_; }
private void initFields() {
depTree_ = cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDefaultInstance();
}
public final boolean isInitialized() {
for (cc.factorie.protobuf.DocumentProtos.Document.Token element : getTokensList()) {
if (!element.isInitialized()) return false;
}
for (cc.factorie.protobuf.DocumentProtos.Document.Mention element : getMentionsList()) {
if (!element.isInitialized()) return false;
}
if (hasDepTree()) {
if (!getDepTree().isInitialized()) return false;
}
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (cc.factorie.protobuf.DocumentProtos.Document.Token element : getTokensList()) {
output.writeMessage(1, element);
}
for (cc.factorie.protobuf.DocumentProtos.Document.Mention element : getMentionsList()) {
output.writeMessage(2, element);
}
if (hasDepTree()) {
output.writeMessage(3, getDepTree());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (cc.factorie.protobuf.DocumentProtos.Document.Token element : getTokensList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, element);
}
for (cc.factorie.protobuf.DocumentProtos.Document.Mention element : getMentionsList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, element);
}
if (hasDepTree()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getDepTree());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Sentence parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document.Sentence prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document.Sentence result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.Sentence.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document.Sentence();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document.Sentence internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document.Sentence();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Sentence.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document.Sentence getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Sentence.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document.Sentence build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document.Sentence buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document.Sentence buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.tokens_ != java.util.Collections.EMPTY_LIST) {
result.tokens_ =
java.util.Collections.unmodifiableList(result.tokens_);
}
if (result.mentions_ != java.util.Collections.EMPTY_LIST) {
result.mentions_ =
java.util.Collections.unmodifiableList(result.mentions_);
}
cc.factorie.protobuf.DocumentProtos.Document.Sentence returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document.Sentence) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document.Sentence)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document.Sentence other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.Sentence.getDefaultInstance()) return this;
if (!other.tokens_.isEmpty()) {
if (result.tokens_.isEmpty()) {
result.tokens_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Token>();
}
result.tokens_.addAll(other.tokens_);
}
if (!other.mentions_.isEmpty()) {
if (result.mentions_.isEmpty()) {
result.mentions_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Mention>();
}
result.mentions_.addAll(other.mentions_);
}
if (other.hasDepTree()) {
mergeDepTree(other.getDepTree());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
cc.factorie.protobuf.DocumentProtos.Document.Token.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Document.Token.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addTokens(subBuilder.buildPartial());
break;
}
case 18: {
cc.factorie.protobuf.DocumentProtos.Document.Mention.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Document.Mention.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMentions(subBuilder.buildPartial());
break;
}
case 26: {
cc.factorie.protobuf.DocumentProtos.Document.DepTree.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Document.DepTree.newBuilder();
if (hasDepTree()) {
subBuilder.mergeFrom(getDepTree());
}
input.readMessage(subBuilder, extensionRegistry);
setDepTree(subBuilder.buildPartial());
break;
}
}
}
}
// repeated .distant.protobuf.Document.Token tokens = 1;
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Token> getTokensList() {
return java.util.Collections.unmodifiableList(result.tokens_);
}
public int getTokensCount() {
return result.getTokensCount();
}
public cc.factorie.protobuf.DocumentProtos.Document.Token getTokens(int index) {
return result.getTokens(index);
}
public Builder setTokens(int index, cc.factorie.protobuf.DocumentProtos.Document.Token value) {
if (value == null) {
throw new NullPointerException();
}
result.tokens_.set(index, value);
return this;
}
public Builder setTokens(int index, cc.factorie.protobuf.DocumentProtos.Document.Token.Builder builderForValue) {
result.tokens_.set(index, builderForValue.build());
return this;
}
public Builder addTokens(cc.factorie.protobuf.DocumentProtos.Document.Token value) {
if (value == null) {
throw new NullPointerException();
}
if (result.tokens_.isEmpty()) {
result.tokens_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Token>();
}
result.tokens_.add(value);
return this;
}
public Builder addTokens(cc.factorie.protobuf.DocumentProtos.Document.Token.Builder builderForValue) {
if (result.tokens_.isEmpty()) {
result.tokens_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Token>();
}
result.tokens_.add(builderForValue.build());
return this;
}
public Builder addAllTokens(
java.lang.Iterable<? extends cc.factorie.protobuf.DocumentProtos.Document.Token> values) {
if (result.tokens_.isEmpty()) {
result.tokens_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Token>();
}
super.addAll(values, result.tokens_);
return this;
}
public Builder clearTokens() {
result.tokens_ = java.util.Collections.emptyList();
return this;
}
// repeated .distant.protobuf.Document.Mention mentions = 2;
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Mention> getMentionsList() {
return java.util.Collections.unmodifiableList(result.mentions_);
}
public int getMentionsCount() {
return result.getMentionsCount();
}
public cc.factorie.protobuf.DocumentProtos.Document.Mention getMentions(int index) {
return result.getMentions(index);
}
public Builder setMentions(int index, cc.factorie.protobuf.DocumentProtos.Document.Mention value) {
if (value == null) {
throw new NullPointerException();
}
result.mentions_.set(index, value);
return this;
}
public Builder setMentions(int index, cc.factorie.protobuf.DocumentProtos.Document.Mention.Builder builderForValue) {
result.mentions_.set(index, builderForValue.build());
return this;
}
public Builder addMentions(cc.factorie.protobuf.DocumentProtos.Document.Mention value) {
if (value == null) {
throw new NullPointerException();
}
if (result.mentions_.isEmpty()) {
result.mentions_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Mention>();
}
result.mentions_.add(value);
return this;
}
public Builder addMentions(cc.factorie.protobuf.DocumentProtos.Document.Mention.Builder builderForValue) {
if (result.mentions_.isEmpty()) {
result.mentions_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Mention>();
}
result.mentions_.add(builderForValue.build());
return this;
}
public Builder addAllMentions(
java.lang.Iterable<? extends cc.factorie.protobuf.DocumentProtos.Document.Mention> values) {
if (result.mentions_.isEmpty()) {
result.mentions_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Mention>();
}
super.addAll(values, result.mentions_);
return this;
}
public Builder clearMentions() {
result.mentions_ = java.util.Collections.emptyList();
return this;
}
// optional .distant.protobuf.Document.DepTree depTree = 3;
public boolean hasDepTree() {
return result.hasDepTree();
}
public cc.factorie.protobuf.DocumentProtos.Document.DepTree getDepTree() {
return result.getDepTree();
}
public Builder setDepTree(cc.factorie.protobuf.DocumentProtos.Document.DepTree value) {
if (value == null) {
throw new NullPointerException();
}
result.hasDepTree = true;
result.depTree_ = value;
return this;
}
public Builder setDepTree(cc.factorie.protobuf.DocumentProtos.Document.DepTree.Builder builderForValue) {
result.hasDepTree = true;
result.depTree_ = builderForValue.build();
return this;
}
public Builder mergeDepTree(cc.factorie.protobuf.DocumentProtos.Document.DepTree value) {
if (result.hasDepTree() &&
result.depTree_ != cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDefaultInstance()) {
result.depTree_ =
cc.factorie.protobuf.DocumentProtos.Document.DepTree.newBuilder(result.depTree_).mergeFrom(value).buildPartial();
} else {
result.depTree_ = value;
}
result.hasDepTree = true;
return this;
}
public Builder clearDepTree() {
result.hasDepTree = false;
result.depTree_ = cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDefaultInstance();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document.Sentence)
}
static {
defaultInstance = new Sentence(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document.Sentence)
}
public static final class Token extends
com.google.protobuf.GeneratedMessage {
// Use Token.newBuilder() to construct.
private Token() {
initFields();
}
private Token(boolean noInit) {}
private static final Token defaultInstance;
public static Token getDefaultInstance() {
return defaultInstance;
}
public Token getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Token_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Token_fieldAccessorTable;
}
// required string word = 1;
public static final int WORD_FIELD_NUMBER = 1;
private boolean hasWord;
private java.lang.String word_ = "";
public boolean hasWord() { return hasWord; }
public java.lang.String getWord() { return word_; }
// optional string tag = 2;
public static final int TAG_FIELD_NUMBER = 2;
private boolean hasTag;
private java.lang.String tag_ = "";
public boolean hasTag() { return hasTag; }
public java.lang.String getTag() { return tag_; }
// optional string ner = 3;
public static final int NER_FIELD_NUMBER = 3;
private boolean hasNer;
private java.lang.String ner_ = "";
public boolean hasNer() { return hasNer; }
public java.lang.String getNer() { return ner_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasWord) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasWord()) {
output.writeString(1, getWord());
}
if (hasTag()) {
output.writeString(2, getTag());
}
if (hasNer()) {
output.writeString(3, getNer());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasWord()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getWord());
}
if (hasTag()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getTag());
}
if (hasNer()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(3, getNer());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Token parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document.Token prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document.Token result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.Token.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document.Token();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document.Token internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document.Token();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Token.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document.Token getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Token.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document.Token build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document.Token buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document.Token buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
cc.factorie.protobuf.DocumentProtos.Document.Token returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document.Token) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document.Token)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document.Token other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.Token.getDefaultInstance()) return this;
if (other.hasWord()) {
setWord(other.getWord());
}
if (other.hasTag()) {
setTag(other.getTag());
}
if (other.hasNer()) {
setNer(other.getNer());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setWord(input.readString());
break;
}
case 18: {
setTag(input.readString());
break;
}
case 26: {
setNer(input.readString());
break;
}
}
}
}
// required string word = 1;
public boolean hasWord() {
return result.hasWord();
}
public java.lang.String getWord() {
return result.getWord();
}
public Builder setWord(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasWord = true;
result.word_ = value;
return this;
}
public Builder clearWord() {
result.hasWord = false;
result.word_ = getDefaultInstance().getWord();
return this;
}
// optional string tag = 2;
public boolean hasTag() {
return result.hasTag();
}
public java.lang.String getTag() {
return result.getTag();
}
public Builder setTag(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasTag = true;
result.tag_ = value;
return this;
}
public Builder clearTag() {
result.hasTag = false;
result.tag_ = getDefaultInstance().getTag();
return this;
}
// optional string ner = 3;
public boolean hasNer() {
return result.hasNer();
}
public java.lang.String getNer() {
return result.getNer();
}
public Builder setNer(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasNer = true;
result.ner_ = value;
return this;
}
public Builder clearNer() {
result.hasNer = false;
result.ner_ = getDefaultInstance().getNer();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document.Token)
}
static {
defaultInstance = new Token(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document.Token)
}
public static final class Mention extends
com.google.protobuf.GeneratedMessage {
// Use Mention.newBuilder() to construct.
private Mention() {
initFields();
}
private Mention(boolean noInit) {}
private static final Mention defaultInstance;
public static Mention getDefaultInstance() {
return defaultInstance;
}
public Mention getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Mention_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_Mention_fieldAccessorTable;
}
// required int32 id = 1;
public static final int ID_FIELD_NUMBER = 1;
private boolean hasId;
private int id_ = 0;
public boolean hasId() { return hasId; }
public int getId() { return id_; }
// optional string entityGuid = 2;
public static final int ENTITYGUID_FIELD_NUMBER = 2;
private boolean hasEntityGuid;
private java.lang.String entityGuid_ = "";
public boolean hasEntityGuid() { return hasEntityGuid; }
public java.lang.String getEntityGuid() { return entityGuid_; }
// required int32 from = 3;
public static final int FROM_FIELD_NUMBER = 3;
private boolean hasFrom;
private int from_ = 0;
public boolean hasFrom() { return hasFrom; }
public int getFrom() { return from_; }
// required int32 to = 4;
public static final int TO_FIELD_NUMBER = 4;
private boolean hasTo;
private int to_ = 0;
public boolean hasTo() { return hasTo; }
public int getTo() { return to_; }
// required string label = 5;
public static final int LABEL_FIELD_NUMBER = 5;
private boolean hasLabel;
private java.lang.String label_ = "";
public boolean hasLabel() { return hasLabel; }
public java.lang.String getLabel() { return label_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasId) return false;
if (!hasFrom) return false;
if (!hasTo) return false;
if (!hasLabel) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasId()) {
output.writeInt32(1, getId());
}
if (hasEntityGuid()) {
output.writeString(2, getEntityGuid());
}
if (hasFrom()) {
output.writeInt32(3, getFrom());
}
if (hasTo()) {
output.writeInt32(4, getTo());
}
if (hasLabel()) {
output.writeString(5, getLabel());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, getId());
}
if (hasEntityGuid()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getEntityGuid());
}
if (hasFrom()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, getFrom());
}
if (hasTo()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(4, getTo());
}
if (hasLabel()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(5, getLabel());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.Mention parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document.Mention prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document.Mention result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.Mention.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document.Mention();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document.Mention internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document.Mention();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Mention.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document.Mention getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.Mention.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document.Mention build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document.Mention buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document.Mention buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
cc.factorie.protobuf.DocumentProtos.Document.Mention returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document.Mention) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document.Mention)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document.Mention other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.Mention.getDefaultInstance()) return this;
if (other.hasId()) {
setId(other.getId());
}
if (other.hasEntityGuid()) {
setEntityGuid(other.getEntityGuid());
}
if (other.hasFrom()) {
setFrom(other.getFrom());
}
if (other.hasTo()) {
setTo(other.getTo());
}
if (other.hasLabel()) {
setLabel(other.getLabel());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setId(input.readInt32());
break;
}
case 18: {
setEntityGuid(input.readString());
break;
}
case 24: {
setFrom(input.readInt32());
break;
}
case 32: {
setTo(input.readInt32());
break;
}
case 42: {
setLabel(input.readString());
break;
}
}
}
}
// required int32 id = 1;
public boolean hasId() {
return result.hasId();
}
public int getId() {
return result.getId();
}
public Builder setId(int value) {
result.hasId = true;
result.id_ = value;
return this;
}
public Builder clearId() {
result.hasId = false;
result.id_ = 0;
return this;
}
// optional string entityGuid = 2;
public boolean hasEntityGuid() {
return result.hasEntityGuid();
}
public java.lang.String getEntityGuid() {
return result.getEntityGuid();
}
public Builder setEntityGuid(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasEntityGuid = true;
result.entityGuid_ = value;
return this;
}
public Builder clearEntityGuid() {
result.hasEntityGuid = false;
result.entityGuid_ = getDefaultInstance().getEntityGuid();
return this;
}
// required int32 from = 3;
public boolean hasFrom() {
return result.hasFrom();
}
public int getFrom() {
return result.getFrom();
}
public Builder setFrom(int value) {
result.hasFrom = true;
result.from_ = value;
return this;
}
public Builder clearFrom() {
result.hasFrom = false;
result.from_ = 0;
return this;
}
// required int32 to = 4;
public boolean hasTo() {
return result.hasTo();
}
public int getTo() {
return result.getTo();
}
public Builder setTo(int value) {
result.hasTo = true;
result.to_ = value;
return this;
}
public Builder clearTo() {
result.hasTo = false;
result.to_ = 0;
return this;
}
// required string label = 5;
public boolean hasLabel() {
return result.hasLabel();
}
public java.lang.String getLabel() {
return result.getLabel();
}
public Builder setLabel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasLabel = true;
result.label_ = value;
return this;
}
public Builder clearLabel() {
result.hasLabel = false;
result.label_ = getDefaultInstance().getLabel();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document.Mention)
}
static {
defaultInstance = new Mention(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document.Mention)
}
public static final class DepTree extends
com.google.protobuf.GeneratedMessage {
// Use DepTree.newBuilder() to construct.
private DepTree() {
initFields();
}
private DepTree(boolean noInit) {}
private static final DepTree defaultInstance;
public static DepTree getDefaultInstance() {
return defaultInstance;
}
public DepTree getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_DepTree_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_DepTree_fieldAccessorTable;
}
// required int32 root = 1;
public static final int ROOT_FIELD_NUMBER = 1;
private boolean hasRoot;
private int root_ = 0;
public boolean hasRoot() { return hasRoot; }
public int getRoot() { return root_; }
// repeated int32 head = 2;
public static final int HEAD_FIELD_NUMBER = 2;
private java.util.List<java.lang.Integer> head_ =
java.util.Collections.emptyList();
public java.util.List<java.lang.Integer> getHeadList() {
return head_;
}
public int getHeadCount() { return head_.size(); }
public int getHead(int index) {
return head_.get(index);
}
// repeated string relType = 3;
public static final int RELTYPE_FIELD_NUMBER = 3;
private java.util.List<java.lang.String> relType_ =
java.util.Collections.emptyList();
public java.util.List<java.lang.String> getRelTypeList() {
return relType_;
}
public int getRelTypeCount() { return relType_.size(); }
public java.lang.String getRelType(int index) {
return relType_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
if (!hasRoot) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasRoot()) {
output.writeInt32(1, getRoot());
}
for (int element : getHeadList()) {
output.writeInt32(2, element);
}
for (java.lang.String element : getRelTypeList()) {
output.writeString(3, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasRoot()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, getRoot());
}
{
int dataSize = 0;
for (int element : getHeadList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeInt32SizeNoTag(element);
}
size += dataSize;
size += 1 * getHeadList().size();
}
{
int dataSize = 0;
for (java.lang.String element : getRelTypeList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeStringSizeNoTag(element);
}
size += dataSize;
size += 1 * getRelTypeList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.DepTree parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document.DepTree prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document.DepTree result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.DepTree.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document.DepTree();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document.DepTree internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document.DepTree();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document.DepTree getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document.DepTree build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document.DepTree buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document.DepTree buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.head_ != java.util.Collections.EMPTY_LIST) {
result.head_ =
java.util.Collections.unmodifiableList(result.head_);
}
if (result.relType_ != java.util.Collections.EMPTY_LIST) {
result.relType_ =
java.util.Collections.unmodifiableList(result.relType_);
}
cc.factorie.protobuf.DocumentProtos.Document.DepTree returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document.DepTree) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document.DepTree)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document.DepTree other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.DepTree.getDefaultInstance()) return this;
if (other.hasRoot()) {
setRoot(other.getRoot());
}
if (!other.head_.isEmpty()) {
if (result.head_.isEmpty()) {
result.head_ = new java.util.ArrayList<java.lang.Integer>();
}
result.head_.addAll(other.head_);
}
if (!other.relType_.isEmpty()) {
if (result.relType_.isEmpty()) {
result.relType_ = new java.util.ArrayList<java.lang.String>();
}
result.relType_.addAll(other.relType_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setRoot(input.readInt32());
break;
}
case 16: {
addHead(input.readInt32());
break;
}
case 18: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
addHead(input.readInt32());
}
input.popLimit(limit);
break;
}
case 26: {
addRelType(input.readString());
break;
}
}
}
}
// required int32 root = 1;
public boolean hasRoot() {
return result.hasRoot();
}
public int getRoot() {
return result.getRoot();
}
public Builder setRoot(int value) {
result.hasRoot = true;
result.root_ = value;
return this;
}
public Builder clearRoot() {
result.hasRoot = false;
result.root_ = 0;
return this;
}
// repeated int32 head = 2;
public java.util.List<java.lang.Integer> getHeadList() {
return java.util.Collections.unmodifiableList(result.head_);
}
public int getHeadCount() {
return result.getHeadCount();
}
public int getHead(int index) {
return result.getHead(index);
}
public Builder setHead(int index, int value) {
result.head_.set(index, value);
return this;
}
public Builder addHead(int value) {
if (result.head_.isEmpty()) {
result.head_ = new java.util.ArrayList<java.lang.Integer>();
}
result.head_.add(value);
return this;
}
public Builder addAllHead(
java.lang.Iterable<? extends java.lang.Integer> values) {
if (result.head_.isEmpty()) {
result.head_ = new java.util.ArrayList<java.lang.Integer>();
}
super.addAll(values, result.head_);
return this;
}
public Builder clearHead() {
result.head_ = java.util.Collections.emptyList();
return this;
}
// repeated string relType = 3;
public java.util.List<java.lang.String> getRelTypeList() {
return java.util.Collections.unmodifiableList(result.relType_);
}
public int getRelTypeCount() {
return result.getRelTypeCount();
}
public java.lang.String getRelType(int index) {
return result.getRelType(index);
}
public Builder setRelType(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.relType_.set(index, value);
return this;
}
public Builder addRelType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
if (result.relType_.isEmpty()) {
result.relType_ = new java.util.ArrayList<java.lang.String>();
}
result.relType_.add(value);
return this;
}
public Builder addAllRelType(
java.lang.Iterable<? extends java.lang.String> values) {
if (result.relType_.isEmpty()) {
result.relType_ = new java.util.ArrayList<java.lang.String>();
}
super.addAll(values, result.relType_);
return this;
}
public Builder clearRelType() {
result.relType_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document.DepTree)
}
static {
defaultInstance = new DepTree(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document.DepTree)
}
public static final class RelationMention extends
com.google.protobuf.GeneratedMessage {
// Use RelationMention.newBuilder() to construct.
private RelationMention() {
initFields();
}
private RelationMention(boolean noInit) {}
private static final RelationMention defaultInstance;
public static RelationMention getDefaultInstance() {
return defaultInstance;
}
public RelationMention getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_RelationMention_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Document_RelationMention_fieldAccessorTable;
}
// required int32 id = 1;
public static final int ID_FIELD_NUMBER = 1;
private boolean hasId;
private int id_ = 0;
public boolean hasId() { return hasId; }
public int getId() { return id_; }
// required int32 source = 2;
public static final int SOURCE_FIELD_NUMBER = 2;
private boolean hasSource;
private int source_ = 0;
public boolean hasSource() { return hasSource; }
public int getSource() { return source_; }
// required int32 dest = 3;
public static final int DEST_FIELD_NUMBER = 3;
private boolean hasDest;
private int dest_ = 0;
public boolean hasDest() { return hasDest; }
public int getDest() { return dest_; }
// required string label = 4;
public static final int LABEL_FIELD_NUMBER = 4;
private boolean hasLabel;
private java.lang.String label_ = "";
public boolean hasLabel() { return hasLabel; }
public java.lang.String getLabel() { return label_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasId) return false;
if (!hasSource) return false;
if (!hasDest) return false;
if (!hasLabel) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasId()) {
output.writeInt32(1, getId());
}
if (hasSource()) {
output.writeInt32(2, getSource());
}
if (hasDest()) {
output.writeInt32(3, getDest());
}
if (hasLabel()) {
output.writeString(4, getLabel());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, getId());
}
if (hasSource()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, getSource());
}
if (hasDest()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, getDest());
}
if (hasLabel()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(4, getLabel());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document.RelationMention parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document.RelationMention prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document.RelationMention result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.RelationMention.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document.RelationMention();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document.RelationMention internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document.RelationMention();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.RelationMention.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document.RelationMention getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.RelationMention.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document.RelationMention build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document.RelationMention buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document.RelationMention buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
cc.factorie.protobuf.DocumentProtos.Document.RelationMention returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document.RelationMention) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document.RelationMention)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document.RelationMention other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.RelationMention.getDefaultInstance()) return this;
if (other.hasId()) {
setId(other.getId());
}
if (other.hasSource()) {
setSource(other.getSource());
}
if (other.hasDest()) {
setDest(other.getDest());
}
if (other.hasLabel()) {
setLabel(other.getLabel());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setId(input.readInt32());
break;
}
case 16: {
setSource(input.readInt32());
break;
}
case 24: {
setDest(input.readInt32());
break;
}
case 34: {
setLabel(input.readString());
break;
}
}
}
}
// required int32 id = 1;
public boolean hasId() {
return result.hasId();
}
public int getId() {
return result.getId();
}
public Builder setId(int value) {
result.hasId = true;
result.id_ = value;
return this;
}
public Builder clearId() {
result.hasId = false;
result.id_ = 0;
return this;
}
// required int32 source = 2;
public boolean hasSource() {
return result.hasSource();
}
public int getSource() {
return result.getSource();
}
public Builder setSource(int value) {
result.hasSource = true;
result.source_ = value;
return this;
}
public Builder clearSource() {
result.hasSource = false;
result.source_ = 0;
return this;
}
// required int32 dest = 3;
public boolean hasDest() {
return result.hasDest();
}
public int getDest() {
return result.getDest();
}
public Builder setDest(int value) {
result.hasDest = true;
result.dest_ = value;
return this;
}
public Builder clearDest() {
result.hasDest = false;
result.dest_ = 0;
return this;
}
// required string label = 4;
public boolean hasLabel() {
return result.hasLabel();
}
public java.lang.String getLabel() {
return result.getLabel();
}
public Builder setLabel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasLabel = true;
result.label_ = value;
return this;
}
public Builder clearLabel() {
result.hasLabel = false;
result.label_ = getDefaultInstance().getLabel();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document.RelationMention)
}
static {
defaultInstance = new RelationMention(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document.RelationMention)
}
// required string filename = 1;
public static final int FILENAME_FIELD_NUMBER = 1;
private boolean hasFilename;
private java.lang.String filename_ = "";
public boolean hasFilename() { return hasFilename; }
public java.lang.String getFilename() { return filename_; }
// repeated .distant.protobuf.Document.Sentence sentences = 2;
public static final int SENTENCES_FIELD_NUMBER = 2;
private java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Sentence> sentences_ =
java.util.Collections.emptyList();
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Sentence> getSentencesList() {
return sentences_;
}
public int getSentencesCount() { return sentences_.size(); }
public cc.factorie.protobuf.DocumentProtos.Document.Sentence getSentences(int index) {
return sentences_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
if (!hasFilename) return false;
for (cc.factorie.protobuf.DocumentProtos.Document.Sentence element : getSentencesList()) {
if (!element.isInitialized()) return false;
}
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasFilename()) {
output.writeString(1, getFilename());
}
for (cc.factorie.protobuf.DocumentProtos.Document.Sentence element : getSentencesList()) {
output.writeMessage(2, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasFilename()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getFilename());
}
for (cc.factorie.protobuf.DocumentProtos.Document.Sentence element : getSentencesList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, element);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Document parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Document prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Document result;
// Construct using cc.factorie.protobuf.DocumentProtos.Document.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Document();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Document internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Document();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Document.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Document getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Document.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Document build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Document buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Document buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.sentences_ != java.util.Collections.EMPTY_LIST) {
result.sentences_ =
java.util.Collections.unmodifiableList(result.sentences_);
}
cc.factorie.protobuf.DocumentProtos.Document returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Document) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Document)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Document other) {
if (other == cc.factorie.protobuf.DocumentProtos.Document.getDefaultInstance()) return this;
if (other.hasFilename()) {
setFilename(other.getFilename());
}
if (!other.sentences_.isEmpty()) {
if (result.sentences_.isEmpty()) {
result.sentences_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Sentence>();
}
result.sentences_.addAll(other.sentences_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setFilename(input.readString());
break;
}
case 18: {
cc.factorie.protobuf.DocumentProtos.Document.Sentence.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Document.Sentence.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addSentences(subBuilder.buildPartial());
break;
}
}
}
}
// required string filename = 1;
public boolean hasFilename() {
return result.hasFilename();
}
public java.lang.String getFilename() {
return result.getFilename();
}
public Builder setFilename(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasFilename = true;
result.filename_ = value;
return this;
}
public Builder clearFilename() {
result.hasFilename = false;
result.filename_ = getDefaultInstance().getFilename();
return this;
}
// repeated .distant.protobuf.Document.Sentence sentences = 2;
public java.util.List<cc.factorie.protobuf.DocumentProtos.Document.Sentence> getSentencesList() {
return java.util.Collections.unmodifiableList(result.sentences_);
}
public int getSentencesCount() {
return result.getSentencesCount();
}
public cc.factorie.protobuf.DocumentProtos.Document.Sentence getSentences(int index) {
return result.getSentences(index);
}
public Builder setSentences(int index, cc.factorie.protobuf.DocumentProtos.Document.Sentence value) {
if (value == null) {
throw new NullPointerException();
}
result.sentences_.set(index, value);
return this;
}
public Builder setSentences(int index, cc.factorie.protobuf.DocumentProtos.Document.Sentence.Builder builderForValue) {
result.sentences_.set(index, builderForValue.build());
return this;
}
public Builder addSentences(cc.factorie.protobuf.DocumentProtos.Document.Sentence value) {
if (value == null) {
throw new NullPointerException();
}
if (result.sentences_.isEmpty()) {
result.sentences_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Sentence>();
}
result.sentences_.add(value);
return this;
}
public Builder addSentences(cc.factorie.protobuf.DocumentProtos.Document.Sentence.Builder builderForValue) {
if (result.sentences_.isEmpty()) {
result.sentences_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Sentence>();
}
result.sentences_.add(builderForValue.build());
return this;
}
public Builder addAllSentences(
java.lang.Iterable<? extends cc.factorie.protobuf.DocumentProtos.Document.Sentence> values) {
if (result.sentences_.isEmpty()) {
result.sentences_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Document.Sentence>();
}
super.addAll(values, result.sentences_);
return this;
}
public Builder clearSentences() {
result.sentences_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Document)
}
static {
defaultInstance = new Document(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Document)
}
public static final class Relation extends
com.google.protobuf.GeneratedMessage {
// Use Relation.newBuilder() to construct.
private Relation() {
initFields();
}
private Relation(boolean noInit) {}
private static final Relation defaultInstance;
public static Relation getDefaultInstance() {
return defaultInstance;
}
public Relation getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Relation_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Relation_fieldAccessorTable;
}
public static final class RelationMentionRef extends
com.google.protobuf.GeneratedMessage {
// Use RelationMentionRef.newBuilder() to construct.
private RelationMentionRef() {
initFields();
}
private RelationMentionRef(boolean noInit) {}
private static final RelationMentionRef defaultInstance;
public static RelationMentionRef getDefaultInstance() {
return defaultInstance;
}
public RelationMentionRef getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Relation_RelationMentionRef_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Relation_RelationMentionRef_fieldAccessorTable;
}
// required string filename = 1;
public static final int FILENAME_FIELD_NUMBER = 1;
private boolean hasFilename;
private java.lang.String filename_ = "";
public boolean hasFilename() { return hasFilename; }
public java.lang.String getFilename() { return filename_; }
// required int32 sourceId = 2;
public static final int SOURCEID_FIELD_NUMBER = 2;
private boolean hasSourceId;
private int sourceId_ = 0;
public boolean hasSourceId() { return hasSourceId; }
public int getSourceId() { return sourceId_; }
// required int32 destId = 3;
public static final int DESTID_FIELD_NUMBER = 3;
private boolean hasDestId;
private int destId_ = 0;
public boolean hasDestId() { return hasDestId; }
public int getDestId() { return destId_; }
// repeated string feature = 4;
public static final int FEATURE_FIELD_NUMBER = 4;
private java.util.List<java.lang.String> feature_ =
java.util.Collections.emptyList();
public java.util.List<java.lang.String> getFeatureList() {
return feature_;
}
public int getFeatureCount() { return feature_.size(); }
public java.lang.String getFeature(int index) {
return feature_.get(index);
}
// optional string sentence = 5;
public static final int SENTENCE_FIELD_NUMBER = 5;
private boolean hasSentence;
private java.lang.String sentence_ = "";
public boolean hasSentence() { return hasSentence; }
public java.lang.String getSentence() { return sentence_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasFilename) return false;
if (!hasSourceId) return false;
if (!hasDestId) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasFilename()) {
output.writeString(1, getFilename());
}
if (hasSourceId()) {
output.writeInt32(2, getSourceId());
}
if (hasDestId()) {
output.writeInt32(3, getDestId());
}
for (java.lang.String element : getFeatureList()) {
output.writeString(4, element);
}
if (hasSentence()) {
output.writeString(5, getSentence());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasFilename()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getFilename());
}
if (hasSourceId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, getSourceId());
}
if (hasDestId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, getDestId());
}
{
int dataSize = 0;
for (java.lang.String element : getFeatureList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeStringSizeNoTag(element);
}
size += dataSize;
size += 1 * getFeatureList().size();
}
if (hasSentence()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(5, getSentence());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef result;
// Construct using cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.feature_ != java.util.Collections.EMPTY_LIST) {
result.feature_ =
java.util.Collections.unmodifiableList(result.feature_);
}
cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef other) {
if (other == cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.getDefaultInstance()) return this;
if (other.hasFilename()) {
setFilename(other.getFilename());
}
if (other.hasSourceId()) {
setSourceId(other.getSourceId());
}
if (other.hasDestId()) {
setDestId(other.getDestId());
}
if (!other.feature_.isEmpty()) {
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
result.feature_.addAll(other.feature_);
}
if (other.hasSentence()) {
setSentence(other.getSentence());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setFilename(input.readString());
break;
}
case 16: {
setSourceId(input.readInt32());
break;
}
case 24: {
setDestId(input.readInt32());
break;
}
case 34: {
addFeature(input.readString());
break;
}
case 42: {
setSentence(input.readString());
break;
}
}
}
}
// required string filename = 1;
public boolean hasFilename() {
return result.hasFilename();
}
public java.lang.String getFilename() {
return result.getFilename();
}
public Builder setFilename(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasFilename = true;
result.filename_ = value;
return this;
}
public Builder clearFilename() {
result.hasFilename = false;
result.filename_ = getDefaultInstance().getFilename();
return this;
}
// required int32 sourceId = 2;
public boolean hasSourceId() {
return result.hasSourceId();
}
public int getSourceId() {
return result.getSourceId();
}
public Builder setSourceId(int value) {
result.hasSourceId = true;
result.sourceId_ = value;
return this;
}
public Builder clearSourceId() {
result.hasSourceId = false;
result.sourceId_ = 0;
return this;
}
// required int32 destId = 3;
public boolean hasDestId() {
return result.hasDestId();
}
public int getDestId() {
return result.getDestId();
}
public Builder setDestId(int value) {
result.hasDestId = true;
result.destId_ = value;
return this;
}
public Builder clearDestId() {
result.hasDestId = false;
result.destId_ = 0;
return this;
}
// repeated string feature = 4;
public java.util.List<java.lang.String> getFeatureList() {
return java.util.Collections.unmodifiableList(result.feature_);
}
public int getFeatureCount() {
return result.getFeatureCount();
}
public java.lang.String getFeature(int index) {
return result.getFeature(index);
}
public Builder setFeature(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.feature_.set(index, value);
return this;
}
public Builder addFeature(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
result.feature_.add(value);
return this;
}
public Builder addAllFeature(
java.lang.Iterable<? extends java.lang.String> values) {
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
super.addAll(values, result.feature_);
return this;
}
public Builder clearFeature() {
result.feature_ = java.util.Collections.emptyList();
return this;
}
// optional string sentence = 5;
public boolean hasSentence() {
return result.hasSentence();
}
public java.lang.String getSentence() {
return result.getSentence();
}
public Builder setSentence(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasSentence = true;
result.sentence_ = value;
return this;
}
public Builder clearSentence() {
result.hasSentence = false;
result.sentence_ = getDefaultInstance().getSentence();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Relation.RelationMentionRef)
}
static {
defaultInstance = new RelationMentionRef(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Relation.RelationMentionRef)
}
// required string sourceGuid = 1;
public static final int SOURCEGUID_FIELD_NUMBER = 1;
private boolean hasSourceGuid;
private java.lang.String sourceGuid_ = "";
public boolean hasSourceGuid() { return hasSourceGuid; }
public java.lang.String getSourceGuid() { return sourceGuid_; }
// required string destGuid = 2;
public static final int DESTGUID_FIELD_NUMBER = 2;
private boolean hasDestGuid;
private java.lang.String destGuid_ = "";
public boolean hasDestGuid() { return hasDestGuid; }
public java.lang.String getDestGuid() { return destGuid_; }
// required string relType = 3;
public static final int RELTYPE_FIELD_NUMBER = 3;
private boolean hasRelType;
private java.lang.String relType_ = "";
public boolean hasRelType() { return hasRelType; }
public java.lang.String getRelType() { return relType_; }
// repeated .distant.protobuf.Relation.RelationMentionRef mention = 4;
public static final int MENTION_FIELD_NUMBER = 4;
private java.util.List<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef> mention_ =
java.util.Collections.emptyList();
public java.util.List<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef> getMentionList() {
return mention_;
}
public int getMentionCount() { return mention_.size(); }
public cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef getMention(int index) {
return mention_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
if (!hasSourceGuid) return false;
if (!hasDestGuid) return false;
if (!hasRelType) return false;
for (cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef element : getMentionList()) {
if (!element.isInitialized()) return false;
}
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasSourceGuid()) {
output.writeString(1, getSourceGuid());
}
if (hasDestGuid()) {
output.writeString(2, getDestGuid());
}
if (hasRelType()) {
output.writeString(3, getRelType());
}
for (cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef element : getMentionList()) {
output.writeMessage(4, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasSourceGuid()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getSourceGuid());
}
if (hasDestGuid()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getDestGuid());
}
if (hasRelType()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(3, getRelType());
}
for (cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef element : getMentionList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, element);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Relation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Relation prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Relation result;
// Construct using cc.factorie.protobuf.DocumentProtos.Relation.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Relation();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Relation internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Relation();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Relation.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Relation getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Relation.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Relation build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Relation buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Relation buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.mention_ != java.util.Collections.EMPTY_LIST) {
result.mention_ =
java.util.Collections.unmodifiableList(result.mention_);
}
cc.factorie.protobuf.DocumentProtos.Relation returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Relation) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Relation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Relation other) {
if (other == cc.factorie.protobuf.DocumentProtos.Relation.getDefaultInstance()) return this;
if (other.hasSourceGuid()) {
setSourceGuid(other.getSourceGuid());
}
if (other.hasDestGuid()) {
setDestGuid(other.getDestGuid());
}
if (other.hasRelType()) {
setRelType(other.getRelType());
}
if (!other.mention_.isEmpty()) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef>();
}
result.mention_.addAll(other.mention_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setSourceGuid(input.readString());
break;
}
case 18: {
setDestGuid(input.readString());
break;
}
case 26: {
setRelType(input.readString());
break;
}
case 34: {
cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMention(subBuilder.buildPartial());
break;
}
}
}
}
// required string sourceGuid = 1;
public boolean hasSourceGuid() {
return result.hasSourceGuid();
}
public java.lang.String getSourceGuid() {
return result.getSourceGuid();
}
public Builder setSourceGuid(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasSourceGuid = true;
result.sourceGuid_ = value;
return this;
}
public Builder clearSourceGuid() {
result.hasSourceGuid = false;
result.sourceGuid_ = getDefaultInstance().getSourceGuid();
return this;
}
// required string destGuid = 2;
public boolean hasDestGuid() {
return result.hasDestGuid();
}
public java.lang.String getDestGuid() {
return result.getDestGuid();
}
public Builder setDestGuid(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasDestGuid = true;
result.destGuid_ = value;
return this;
}
public Builder clearDestGuid() {
result.hasDestGuid = false;
result.destGuid_ = getDefaultInstance().getDestGuid();
return this;
}
// required string relType = 3;
public boolean hasRelType() {
return result.hasRelType();
}
public java.lang.String getRelType() {
return result.getRelType();
}
public Builder setRelType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasRelType = true;
result.relType_ = value;
return this;
}
public Builder clearRelType() {
result.hasRelType = false;
result.relType_ = getDefaultInstance().getRelType();
return this;
}
// repeated .distant.protobuf.Relation.RelationMentionRef mention = 4;
public java.util.List<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef> getMentionList() {
return java.util.Collections.unmodifiableList(result.mention_);
}
public int getMentionCount() {
return result.getMentionCount();
}
public cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef getMention(int index) {
return result.getMention(index);
}
public Builder setMention(int index, cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef value) {
if (value == null) {
throw new NullPointerException();
}
result.mention_.set(index, value);
return this;
}
public Builder setMention(int index, cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.Builder builderForValue) {
result.mention_.set(index, builderForValue.build());
return this;
}
public Builder addMention(cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef value) {
if (value == null) {
throw new NullPointerException();
}
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef>();
}
result.mention_.add(value);
return this;
}
public Builder addMention(cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.Builder builderForValue) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef>();
}
result.mention_.add(builderForValue.build());
return this;
}
public Builder addAllMention(
java.lang.Iterable<? extends cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef> values) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef>();
}
super.addAll(values, result.mention_);
return this;
}
public Builder clearMention() {
result.mention_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Relation)
}
static {
defaultInstance = new Relation(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Relation)
}
public static final class Entity extends
com.google.protobuf.GeneratedMessage {
// Use Entity.newBuilder() to construct.
private Entity() {
initFields();
}
private Entity(boolean noInit) {}
private static final Entity defaultInstance;
public static Entity getDefaultInstance() {
return defaultInstance;
}
public Entity getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Entity_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Entity_fieldAccessorTable;
}
public static final class EntityMentionRef extends
com.google.protobuf.GeneratedMessage {
// Use EntityMentionRef.newBuilder() to construct.
private EntityMentionRef() {
initFields();
}
private EntityMentionRef(boolean noInit) {}
private static final EntityMentionRef defaultInstance;
public static EntityMentionRef getDefaultInstance() {
return defaultInstance;
}
public EntityMentionRef getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Entity_EntityMentionRef_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return cc.factorie.protobuf.DocumentProtos.internal_static_distant_protobuf_Entity_EntityMentionRef_fieldAccessorTable;
}
// required string filename = 1;
public static final int FILENAME_FIELD_NUMBER = 1;
private boolean hasFilename;
private java.lang.String filename_ = "";
public boolean hasFilename() { return hasFilename; }
public java.lang.String getFilename() { return filename_; }
// required int32 id = 2;
public static final int ID_FIELD_NUMBER = 2;
private boolean hasId;
private int id_ = 0;
public boolean hasId() { return hasId; }
public int getId() { return id_; }
// repeated string feature = 3;
public static final int FEATURE_FIELD_NUMBER = 3;
private java.util.List<java.lang.String> feature_ =
java.util.Collections.emptyList();
public java.util.List<java.lang.String> getFeatureList() {
return feature_;
}
public int getFeatureCount() { return feature_.size(); }
public java.lang.String getFeature(int index) {
return feature_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
if (!hasFilename) return false;
if (!hasId) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasFilename()) {
output.writeString(1, getFilename());
}
if (hasId()) {
output.writeInt32(2, getId());
}
for (java.lang.String element : getFeatureList()) {
output.writeString(3, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasFilename()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getFilename());
}
if (hasId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, getId());
}
{
int dataSize = 0;
for (java.lang.String element : getFeatureList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeStringSizeNoTag(element);
}
size += dataSize;
size += 1 * getFeatureList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef result;
// Construct using cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.feature_ != java.util.Collections.EMPTY_LIST) {
result.feature_ =
java.util.Collections.unmodifiableList(result.feature_);
}
cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef other) {
if (other == cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.getDefaultInstance()) return this;
if (other.hasFilename()) {
setFilename(other.getFilename());
}
if (other.hasId()) {
setId(other.getId());
}
if (!other.feature_.isEmpty()) {
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
result.feature_.addAll(other.feature_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setFilename(input.readString());
break;
}
case 16: {
setId(input.readInt32());
break;
}
case 26: {
addFeature(input.readString());
break;
}
}
}
}
// required string filename = 1;
public boolean hasFilename() {
return result.hasFilename();
}
public java.lang.String getFilename() {
return result.getFilename();
}
public Builder setFilename(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasFilename = true;
result.filename_ = value;
return this;
}
public Builder clearFilename() {
result.hasFilename = false;
result.filename_ = getDefaultInstance().getFilename();
return this;
}
// required int32 id = 2;
public boolean hasId() {
return result.hasId();
}
public int getId() {
return result.getId();
}
public Builder setId(int value) {
result.hasId = true;
result.id_ = value;
return this;
}
public Builder clearId() {
result.hasId = false;
result.id_ = 0;
return this;
}
// repeated string feature = 3;
public java.util.List<java.lang.String> getFeatureList() {
return java.util.Collections.unmodifiableList(result.feature_);
}
public int getFeatureCount() {
return result.getFeatureCount();
}
public java.lang.String getFeature(int index) {
return result.getFeature(index);
}
public Builder setFeature(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.feature_.set(index, value);
return this;
}
public Builder addFeature(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
result.feature_.add(value);
return this;
}
public Builder addAllFeature(
java.lang.Iterable<? extends java.lang.String> values) {
if (result.feature_.isEmpty()) {
result.feature_ = new java.util.ArrayList<java.lang.String>();
}
super.addAll(values, result.feature_);
return this;
}
public Builder clearFeature() {
result.feature_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Entity.EntityMentionRef)
}
static {
defaultInstance = new EntityMentionRef(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Entity.EntityMentionRef)
}
// required string guid = 1;
public static final int GUID_FIELD_NUMBER = 1;
private boolean hasGuid;
private java.lang.String guid_ = "";
public boolean hasGuid() { return hasGuid; }
public java.lang.String getGuid() { return guid_; }
// optional string name = 2;
public static final int NAME_FIELD_NUMBER = 2;
private boolean hasName;
private java.lang.String name_ = "";
public boolean hasName() { return hasName; }
public java.lang.String getName() { return name_; }
// optional string type = 3;
public static final int TYPE_FIELD_NUMBER = 3;
private boolean hasType;
private java.lang.String type_ = "";
public boolean hasType() { return hasType; }
public java.lang.String getType() { return type_; }
// optional string pred = 4;
public static final int PRED_FIELD_NUMBER = 4;
private boolean hasPred;
private java.lang.String pred_ = "";
public boolean hasPred() { return hasPred; }
public java.lang.String getPred() { return pred_; }
// repeated .distant.protobuf.Entity.EntityMentionRef mention = 5;
public static final int MENTION_FIELD_NUMBER = 5;
private java.util.List<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef> mention_ =
java.util.Collections.emptyList();
public java.util.List<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef> getMentionList() {
return mention_;
}
public int getMentionCount() { return mention_.size(); }
public cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef getMention(int index) {
return mention_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
if (!hasGuid) return false;
for (cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef element : getMentionList()) {
if (!element.isInitialized()) return false;
}
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasGuid()) {
output.writeString(1, getGuid());
}
if (hasName()) {
output.writeString(2, getName());
}
if (hasType()) {
output.writeString(3, getType());
}
if (hasPred()) {
output.writeString(4, getPred());
}
for (cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef element : getMentionList()) {
output.writeMessage(5, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasGuid()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(1, getGuid());
}
if (hasName()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getName());
}
if (hasType()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(3, getType());
}
if (hasPred()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(4, getPred());
}
for (cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef element : getMentionList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, element);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static cc.factorie.protobuf.DocumentProtos.Entity parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(cc.factorie.protobuf.DocumentProtos.Entity prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private cc.factorie.protobuf.DocumentProtos.Entity result;
// Construct using cc.factorie.protobuf.DocumentProtos.Entity.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new cc.factorie.protobuf.DocumentProtos.Entity();
return builder;
}
protected cc.factorie.protobuf.DocumentProtos.Entity internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new cc.factorie.protobuf.DocumentProtos.Entity();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return cc.factorie.protobuf.DocumentProtos.Entity.getDescriptor();
}
public cc.factorie.protobuf.DocumentProtos.Entity getDefaultInstanceForType() {
return cc.factorie.protobuf.DocumentProtos.Entity.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public cc.factorie.protobuf.DocumentProtos.Entity build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private cc.factorie.protobuf.DocumentProtos.Entity buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public cc.factorie.protobuf.DocumentProtos.Entity buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.mention_ != java.util.Collections.EMPTY_LIST) {
result.mention_ =
java.util.Collections.unmodifiableList(result.mention_);
}
cc.factorie.protobuf.DocumentProtos.Entity returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof cc.factorie.protobuf.DocumentProtos.Entity) {
return mergeFrom((cc.factorie.protobuf.DocumentProtos.Entity)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(cc.factorie.protobuf.DocumentProtos.Entity other) {
if (other == cc.factorie.protobuf.DocumentProtos.Entity.getDefaultInstance()) return this;
if (other.hasGuid()) {
setGuid(other.getGuid());
}
if (other.hasName()) {
setName(other.getName());
}
if (other.hasType()) {
setType(other.getType());
}
if (other.hasPred()) {
setPred(other.getPred());
}
if (!other.mention_.isEmpty()) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef>();
}
result.mention_.addAll(other.mention_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setGuid(input.readString());
break;
}
case 18: {
setName(input.readString());
break;
}
case 26: {
setType(input.readString());
break;
}
case 34: {
setPred(input.readString());
break;
}
case 42: {
cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.Builder subBuilder = cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMention(subBuilder.buildPartial());
break;
}
}
}
}
// required string guid = 1;
public boolean hasGuid() {
return result.hasGuid();
}
public java.lang.String getGuid() {
return result.getGuid();
}
public Builder setGuid(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasGuid = true;
result.guid_ = value;
return this;
}
public Builder clearGuid() {
result.hasGuid = false;
result.guid_ = getDefaultInstance().getGuid();
return this;
}
// optional string name = 2;
public boolean hasName() {
return result.hasName();
}
public java.lang.String getName() {
return result.getName();
}
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasName = true;
result.name_ = value;
return this;
}
public Builder clearName() {
result.hasName = false;
result.name_ = getDefaultInstance().getName();
return this;
}
// optional string type = 3;
public boolean hasType() {
return result.hasType();
}
public java.lang.String getType() {
return result.getType();
}
public Builder setType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasType = true;
result.type_ = value;
return this;
}
public Builder clearType() {
result.hasType = false;
result.type_ = getDefaultInstance().getType();
return this;
}
// optional string pred = 4;
public boolean hasPred() {
return result.hasPred();
}
public java.lang.String getPred() {
return result.getPred();
}
public Builder setPred(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasPred = true;
result.pred_ = value;
return this;
}
public Builder clearPred() {
result.hasPred = false;
result.pred_ = getDefaultInstance().getPred();
return this;
}
// repeated .distant.protobuf.Entity.EntityMentionRef mention = 5;
public java.util.List<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef> getMentionList() {
return java.util.Collections.unmodifiableList(result.mention_);
}
public int getMentionCount() {
return result.getMentionCount();
}
public cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef getMention(int index) {
return result.getMention(index);
}
public Builder setMention(int index, cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef value) {
if (value == null) {
throw new NullPointerException();
}
result.mention_.set(index, value);
return this;
}
public Builder setMention(int index, cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.Builder builderForValue) {
result.mention_.set(index, builderForValue.build());
return this;
}
public Builder addMention(cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef value) {
if (value == null) {
throw new NullPointerException();
}
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef>();
}
result.mention_.add(value);
return this;
}
public Builder addMention(cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.Builder builderForValue) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef>();
}
result.mention_.add(builderForValue.build());
return this;
}
public Builder addAllMention(
java.lang.Iterable<? extends cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef> values) {
if (result.mention_.isEmpty()) {
result.mention_ = new java.util.ArrayList<cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef>();
}
super.addAll(values, result.mention_);
return this;
}
public Builder clearMention() {
result.mention_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:distant.protobuf.Entity)
}
static {
defaultInstance = new Entity(true);
cc.factorie.protobuf.DocumentProtos.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:distant.protobuf.Entity)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_Sentence_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_Sentence_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_Token_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_Token_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_Mention_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_Mention_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_DepTree_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_DepTree_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Document_RelationMention_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Document_RelationMention_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Relation_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Relation_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Relation_RelationMentionRef_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Relation_RelationMentionRef_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Entity_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Entity_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_distant_protobuf_Entity_EntityMentionRef_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_distant_protobuf_Entity_EntityMentionRef_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\016Document.proto\022\020distant.protobuf\"\207\004\n\010D" +
"ocument\022\020\n\010filename\030\001 \002(\t\0226\n\tsentences\030\002" +
" \003(\0132#.distant.protobuf.Document.Sentenc" +
"e\032\247\001\n\010Sentence\0220\n\006tokens\030\001 \003(\0132 .distant" +
".protobuf.Document.Token\0224\n\010mentions\030\002 \003" +
"(\0132\".distant.protobuf.Document.Mention\0223" +
"\n\007depTree\030\003 \001(\0132\".distant.protobuf.Docum" +
"ent.DepTree\032/\n\005Token\022\014\n\004word\030\001 \002(\t\022\013\n\003ta" +
"g\030\002 \001(\t\022\013\n\003ner\030\003 \001(\t\032R\n\007Mention\022\n\n\002id\030\001 " +
"\002(\005\022\022\n\nentityGuid\030\002 \001(\t\022\014\n\004from\030\003 \002(\005\022\n\n",
"\002to\030\004 \002(\005\022\r\n\005label\030\005 \002(\t\0326\n\007DepTree\022\014\n\004r" +
"oot\030\001 \002(\005\022\014\n\004head\030\002 \003(\005\022\017\n\007relType\030\003 \003(\t" +
"\032J\n\017RelationMention\022\n\n\002id\030\001 \002(\005\022\016\n\006sourc" +
"e\030\002 \002(\005\022\014\n\004dest\030\003 \002(\005\022\r\n\005label\030\004 \002(\t\"\356\001\n" +
"\010Relation\022\022\n\nsourceGuid\030\001 \002(\t\022\020\n\010destGui" +
"d\030\002 \002(\t\022\017\n\007relType\030\003 \002(\t\022>\n\007mention\030\004 \003(" +
"\0132-.distant.protobuf.Relation.RelationMe" +
"ntionRef\032k\n\022RelationMentionRef\022\020\n\010filena" +
"me\030\001 \002(\t\022\020\n\010sourceId\030\002 \002(\005\022\016\n\006destId\030\003 \002" +
"(\005\022\017\n\007feature\030\004 \003(\t\022\020\n\010sentence\030\005 \001(\t\"\277\001",
"\n\006Entity\022\014\n\004guid\030\001 \002(\t\022\014\n\004name\030\002 \001(\t\022\014\n\004" +
"type\030\003 \001(\t\022\014\n\004pred\030\004 \001(\t\022:\n\007mention\030\005 \003(" +
"\0132).distant.protobuf.Entity.EntityMentio" +
"nRef\032A\n\020EntityMentionRef\022\020\n\010filename\030\001 \002" +
"(\t\022\n\n\002id\030\002 \002(\005\022\017\n\007feature\030\003 \003(\tB&\n\024cc.fa" +
"ctorie.protobufB\016DocumentProtos"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_distant_protobuf_Document_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_distant_protobuf_Document_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_descriptor,
new java.lang.String[] { "Filename", "Sentences", },
cc.factorie.protobuf.DocumentProtos.Document.class,
cc.factorie.protobuf.DocumentProtos.Document.Builder.class);
internal_static_distant_protobuf_Document_Sentence_descriptor =
internal_static_distant_protobuf_Document_descriptor.getNestedTypes().get(0);
internal_static_distant_protobuf_Document_Sentence_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_Sentence_descriptor,
new java.lang.String[] { "Tokens", "Mentions", "DepTree", },
cc.factorie.protobuf.DocumentProtos.Document.Sentence.class,
cc.factorie.protobuf.DocumentProtos.Document.Sentence.Builder.class);
internal_static_distant_protobuf_Document_Token_descriptor =
internal_static_distant_protobuf_Document_descriptor.getNestedTypes().get(1);
internal_static_distant_protobuf_Document_Token_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_Token_descriptor,
new java.lang.String[] { "Word", "Tag", "Ner", },
cc.factorie.protobuf.DocumentProtos.Document.Token.class,
cc.factorie.protobuf.DocumentProtos.Document.Token.Builder.class);
internal_static_distant_protobuf_Document_Mention_descriptor =
internal_static_distant_protobuf_Document_descriptor.getNestedTypes().get(2);
internal_static_distant_protobuf_Document_Mention_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_Mention_descriptor,
new java.lang.String[] { "Id", "EntityGuid", "From", "To", "Label", },
cc.factorie.protobuf.DocumentProtos.Document.Mention.class,
cc.factorie.protobuf.DocumentProtos.Document.Mention.Builder.class);
internal_static_distant_protobuf_Document_DepTree_descriptor =
internal_static_distant_protobuf_Document_descriptor.getNestedTypes().get(3);
internal_static_distant_protobuf_Document_DepTree_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_DepTree_descriptor,
new java.lang.String[] { "Root", "Head", "RelType", },
cc.factorie.protobuf.DocumentProtos.Document.DepTree.class,
cc.factorie.protobuf.DocumentProtos.Document.DepTree.Builder.class);
internal_static_distant_protobuf_Document_RelationMention_descriptor =
internal_static_distant_protobuf_Document_descriptor.getNestedTypes().get(4);
internal_static_distant_protobuf_Document_RelationMention_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Document_RelationMention_descriptor,
new java.lang.String[] { "Id", "Source", "Dest", "Label", },
cc.factorie.protobuf.DocumentProtos.Document.RelationMention.class,
cc.factorie.protobuf.DocumentProtos.Document.RelationMention.Builder.class);
internal_static_distant_protobuf_Relation_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_distant_protobuf_Relation_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Relation_descriptor,
new java.lang.String[] { "SourceGuid", "DestGuid", "RelType", "Mention", },
cc.factorie.protobuf.DocumentProtos.Relation.class,
cc.factorie.protobuf.DocumentProtos.Relation.Builder.class);
internal_static_distant_protobuf_Relation_RelationMentionRef_descriptor =
internal_static_distant_protobuf_Relation_descriptor.getNestedTypes().get(0);
internal_static_distant_protobuf_Relation_RelationMentionRef_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Relation_RelationMentionRef_descriptor,
new java.lang.String[] { "Filename", "SourceId", "DestId", "Feature", "Sentence", },
cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.class,
cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef.Builder.class);
internal_static_distant_protobuf_Entity_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_distant_protobuf_Entity_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Entity_descriptor,
new java.lang.String[] { "Guid", "Name", "Type", "Pred", "Mention", },
cc.factorie.protobuf.DocumentProtos.Entity.class,
cc.factorie.protobuf.DocumentProtos.Entity.Builder.class);
internal_static_distant_protobuf_Entity_EntityMentionRef_descriptor =
internal_static_distant_protobuf_Entity_descriptor.getNestedTypes().get(0);
internal_static_distant_protobuf_Entity_EntityMentionRef_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_distant_protobuf_Entity_EntityMentionRef_descriptor,
new java.lang.String[] { "Filename", "Id", "Feature", },
cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.class,
cc.factorie.protobuf.DocumentProtos.Entity.EntityMentionRef.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}
| 177,913 | 37.710618 | 176 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/AveragedPerceptron.java | package edu.uw.cs.multir.learning.algorithm;
import java.util.Random;
import edu.uw.cs.multir.learning.data.Dataset;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.util.DenseVector;
import edu.uw.cs.multir.util.SparseBinaryVector;
public class AveragedPerceptron {
public int maxIterations = 50;
public boolean computeAvgParameters = true;
public boolean updateOnTrueY = true;
public double delta = 1;
private Scorer scorer;
private Model model;
private Random random;
public AveragedPerceptron(Model model, Random random) {
scorer = new Scorer();
this.model = model;
this.random = random;
}
// the following two are actually not storing weights:
// the first is storing the iteration in which the average weights were
// last updated, and the other is storing the next update value
private Parameters avgParamsLastUpdatesIter;
private Parameters avgParamsLastUpdates;
private Parameters avgParameters;
private Parameters iterParameters;
public Parameters train(Dataset trainingData) {
if (computeAvgParameters) {
avgParameters = new Parameters();
avgParameters.model = model;
avgParameters.init();
avgParamsLastUpdatesIter = new Parameters();
avgParamsLastUpdates = new Parameters();
avgParamsLastUpdatesIter.model = avgParamsLastUpdates.model = model;
avgParamsLastUpdatesIter.init();
avgParamsLastUpdates.init();
}
iterParameters = new Parameters();
iterParameters.model = model;
iterParameters.init();
for (int i = 0; i < maxIterations; i++)
trainingIteration(i, trainingData);
if (computeAvgParameters) finalizeRel();
return (computeAvgParameters) ? avgParameters : iterParameters;
}
int avgIteration = 0;
public void trainingIteration(int iteration, Dataset trainingData) {
System.out.println("iteration " + iteration);
MILDocument doc = new MILDocument();
trainingData.shuffle(random);
trainingData.reset();
while (trainingData.next(doc)) {
// compute most likely label under current parameters
Parse predictedParse = FullInference.infer(doc, scorer,
iterParameters);
if (updateOnTrueY || !YsAgree(predictedParse.Y, doc.Y)) {
// if this is the first avgIteration, then we need to initialize
// the lastUpdate vector
if (computeAvgParameters && avgIteration == 0)
avgParamsLastUpdates.sum(iterParameters, 1.0f);
Parse trueParse = ConditionalInference.infer(doc, scorer,
iterParameters);
update(predictedParse, trueParse);
}
if (computeAvgParameters) avgIteration++;
}
}
private boolean YsAgree(int[] y1, int[] y2) {
if (y1.length != y2.length)
return false;
for (int i = 0; i < y1.length; i++)
if (y1[i] != y2[i])
return false;
return true;
}
// a bit dangerous, since scorer.setDocument is called only inside inference
public void update(Parse pred, Parse tru) {
int numMentions = tru.Z.length;
// iterate over mentions
for (int m = 0; m < numMentions; m++) {
int truRel = tru.Z[m];
int predRel = pred.Z[m];
if (truRel != predRel) {
SparseBinaryVector v1a = scorer.getMentionRelationFeatures(
tru.doc, m, truRel);
updateRel(truRel, v1a, delta, computeAvgParameters);
SparseBinaryVector v2a = scorer.getMentionRelationFeatures(
tru.doc, m, predRel);
updateRel(predRel, v2a, -delta, computeAvgParameters);
}
}
}
private void updateRel(int toState, SparseBinaryVector features,
double delta, boolean useIterAverage) {
iterParameters.relParameters[toState].addSparse(features, delta);
if (useIterAverage) {
DenseVector lastUpdatesIter = (DenseVector) avgParamsLastUpdatesIter.relParameters[toState];
DenseVector lastUpdates = (DenseVector) avgParamsLastUpdates.relParameters[toState];
DenseVector avg = (DenseVector) avgParameters.relParameters[toState];
DenseVector iter = (DenseVector) iterParameters.relParameters[toState];
for (int j = 0; j < features.num; j++) {
int id = features.ids[j];
if (lastUpdates.vals[id] != 0)
avg.vals[id] += (avgIteration - lastUpdatesIter.vals[id])
* lastUpdates.vals[id];
lastUpdatesIter.vals[id] = avgIteration;
lastUpdates.vals[id] = iter.vals[id];
}
}
}
private void finalizeRel() {
for (int s = 0; s < model.numRelations; s++) {
DenseVector lastUpdatesIter = (DenseVector) avgParamsLastUpdatesIter.relParameters[s];
DenseVector lastUpdates = (DenseVector) avgParamsLastUpdates.relParameters[s];
DenseVector avg = (DenseVector) avgParameters.relParameters[s];
for (int id = 0; id < avg.vals.length; id++) {
if (lastUpdates.vals[id] != 0) {
avg.vals[id] += (avgIteration - lastUpdatesIter.vals[id])
* lastUpdates.vals[id];
lastUpdatesIter.vals[id] = avgIteration;
}
}
}
}
}
| 4,782 | 28.89375 | 95 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/ConditionalInference.java | package edu.uw.cs.multir.learning.algorithm;
import java.util.Arrays;
import java.util.Comparator;
import edu.uw.cs.multir.learning.data.MILDocument;
public class ConditionalInference {
public static Parse infer(MILDocument doc,
Scorer parseScorer, Parameters params) {
int numMentions = doc.numMentions;
Parse parse = new Parse();
parse.doc = doc;
parseScorer.setParameters(params);
Viterbi viterbi = new Viterbi(params.model, parseScorer);
Viterbi.Parse[] vp = new Viterbi.Parse[numMentions];
for (int m = 0; m < numMentions; m++) {
vp[m] = viterbi.parse(doc, m);
}
// each mention can be linked to one of the doc relations or NA
int numRelevantRelations = doc.Y.length + 1;
// solve bipartite graph matching problem
Edge[] es = new Edge[numMentions * numRelevantRelations];
for (int m = 0; m < numMentions; m++) {
// edge from m to NA
es[numRelevantRelations*m + 0] =
new Edge(m, 0, vp[m].scores[0]);
// edge from m to any other relation
for (int y = 1; y < numRelevantRelations; y++)
es[numRelevantRelations*m + y] =
new Edge(m, y, vp[m].scores[doc.Y[y-1]]);
}
// NOTE: strictly speaking, no sorting is necessary
// in the following steps; however, we do sorting
// for easier code maintainability
// array to hold solution (mapping from z's to y's)
int[] z = new int[numMentions];
for (int i=0; i < numMentions; i++) z[i] = -1;
// there is a special case where there are more target
// relations than there are mentions; in this case we
// only add the highest scoring edges
if (numMentions < doc.Y.length) {
// sort edges by decreasing score
Arrays.sort(es, new Comparator<Edge>() {
public int compare(Edge e1, Edge e2) {
double d = e2.score - e1.score;
if (d < 0) return -1; else return 1;
}});
boolean[] ysCovered = new boolean[numRelevantRelations];
for (int ei = 0; ei < es.length; ei++) {
Edge e = es[ei];
if (e.y == 0) continue;
if (z[e.m] < 0 && !ysCovered[e.y]) {
z[e.m] = doc.Y[e.y-1];
ysCovered[e.y] = true;
}
}
} else {
// more mentions than target relations: enforce all Ys
// sort by y, then decreasing score
Arrays.sort(es, new Comparator<Edge>() {
public int compare(Edge e1, Edge e2) {
int c = e1.y - e2.y;
if (c != 0) return c;
double d = e2.score - e1.score;
if (d < 0) return -1; else return 1;
}});
// note that after this step the "es" array has to
// be indexed differently
// iterate over y's
for (int y=1; y < numRelevantRelations; y++) {
// find highest weight edge to y, from a
// mention m which does not yet have an
// outgoing edge
for (int j=0; j < numMentions; j++) {
Edge e = es[numMentions*y + j];
if (z[e.m] < 0) {
// we can add this edge
//System.out.println("adding " + doc.Y[y-1]);
z[e.m] = (y==0)? 0 : doc.Y[y-1];
break;
}
}
}
// there might be unmapped m's
// sort by m, then decreasing score
Arrays.sort(es, new Comparator<Edge>() {
public int compare(Edge e1, Edge e2) {
int c = e1.m - e2.m;
if (c != 0) return c;
double d = e2.score - e1.score;
if (d < 0) return -1; else return 1;
}});
for (int m=0; m < numMentions; m++) {
if (z[m] < 0) {
// unmapped mention, need to take highest score
Edge e = es[numRelevantRelations*m];
z[m] = e.y == 0? 0 : doc.Y[e.y-1];
}
}
}
// we can now write the results
parse.Y = doc.Y;
parse.Z = z;
parse.score = 0;
for (int i=0; i < numMentions; i++) {
parse.score += vp[i].scores[z[i]];
}
return parse;
}
static class Edge {
int m;
int y;
double score;
Edge(int m, int y, double score) {
this.m = m;
this.y = y;
this.score = score;
}
}
}
| 3,843 | 25.881119 | 65 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/FullInference.java | package edu.uw.cs.multir.learning.algorithm;
import edu.uw.cs.multir.learning.data.MILDocument;
public class FullInference {
public static Parse infer(MILDocument doc,
Scorer parseScorer, Parameters params) {
Parse parse = new Parse();
parse.doc = doc;
parse.Z = new int[doc.numMentions];
parseScorer.setParameters(params);
Viterbi viterbi = new Viterbi(params.model, parseScorer);
double[] scores = new double[params.model.numRelations];
for (int i=0; i < scores.length; i++) scores[i] = Double.NEGATIVE_INFINITY;
boolean[] binaryYs = new boolean[params.model.numRelations];
int numYs = 0;
for (int m = 0; m < doc.numMentions; m++) {
Viterbi.Parse p = viterbi.parse(doc, m);
parse.Z[m] = p.state;
if (p.state > 0 && !binaryYs[p.state]) {
binaryYs[p.state] = true;
numYs++;
}
if (p.score > scores[parse.Z[m]])
scores[parse.Z[m]] = p.score;
}
parse.Y = new int[numYs];
int pos = 0;
for (int i=1; i < binaryYs.length; i++)
if (binaryYs[i]) {
parse.Y[pos++] = i;
if (pos == numYs) break;
}
parse.scores = scores;
// It's important to ignore the _NO_RELATION_ type here, so
// need to start at 1!
// final value is avg of maxes
int sumNum = 0;
double sumSum = 0;
for (int i=1; i < scores.length; i++)
if (scores[i] > Double.NEGATIVE_INFINITY) {
sumNum++; sumSum += scores[i];
}
if (sumNum ==0) parse.score = Double.NEGATIVE_INFINITY;
else parse.score = sumSum / sumNum;
return parse;
}
}
| 1,523 | 24.4 | 77 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/Model.java | package edu.uw.cs.multir.learning.algorithm;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
public class Model {
public int numRelations;
public int[] numFeaturesPerRelation;
public int numFeatures(int rel) {
return numFeaturesPerRelation[rel];
}
public int noRelationState;
public void read(String file) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(file), "utf-8"));
numRelations = Integer.parseInt(r.readLine());
numFeaturesPerRelation = new int[numRelations];
for (int i=0; i < numRelations; i++) {
numFeaturesPerRelation[i] = Integer.parseInt(r.readLine());
}
r.close();
}
public void write(String file) throws IOException {
BufferedWriter w = new BufferedWriter(new OutputStreamWriter
(new FileOutputStream(file), "utf-8"));
w.write(numRelations + "\n");
for (int i=0; i < numFeaturesPerRelation.length; i++)
w.write(numFeaturesPerRelation[i] + "\n");
w.close();
}
}
| 1,162 | 26.690476 | 62 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/Parameters.java | package edu.uw.cs.multir.learning.algorithm;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import edu.uw.cs.multir.util.DenseVector;
public class Parameters {
public DenseVector[] relParameters;
public Model model;
private DenseVector sum(DenseVector v1, DenseVector v2, float factor) {
if (v1 == null && v2 == null) return null;
else if (v2 == null) return v1.copy();
else if (v1 == null) {
DenseVector v = v2.copy();
v.scale(factor);
return v;
}
else return v1.sum(v2, factor);
}
public void sum(Parameters p, float factor) {
for (int i=0; i < relParameters.length; i++)
relParameters[i] = sum(relParameters[i], p.relParameters[i], factor);
}
public void init() {
if (relParameters == null) {
relParameters = new DenseVector[model.numRelations];
System.out.println("requesting " + (8*relParameters.length*
(long)model.numFeaturesPerRelation[0]) + " bytes");
for (int j=0; j < relParameters.length; j++) {
relParameters[j] =
new DenseVector(model.numFeatures(j));
}
}
}
public void reset() {
for (int i=0; i < relParameters.length; i++)
if (relParameters[i] != null)
relParameters[i].reset();
}
public void serialize(OutputStream os)
throws IOException {
DenseVector[] r = relParameters;
for (int i=0; i < r.length; i++)
r[i].serialize(os);
}
public void deserialize(InputStream is)
throws IOException {
init();
DenseVector[] r = relParameters;
for (int i=0; i < r.length; i++)
r[i].deserialize(is);
}
public void serialize(String file)
throws IOException {
OutputStream os = new BufferedOutputStream(new FileOutputStream(file));
serialize(os);
os.close();
}
public void deserialize(String file)
throws IOException {
InputStream is = new BufferedInputStream(new FileInputStream(file));
deserialize(is);
is.close();
}
}
| 2,037 | 23.853659 | 73 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/Parse.java | package edu.uw.cs.multir.learning.algorithm;
import edu.uw.cs.multir.learning.data.MILDocument;
public class Parse {
public int[] Y;
public int[] Z;
public double score;
public MILDocument doc;
public double[] scores; // for each relation
public double[][] allScores;
public Parse() {}
}
| 300 | 17.8125 | 50 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/Scorer.java | package edu.uw.cs.multir.learning.algorithm;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.util.DenseVector;
import edu.uw.cs.multir.util.SparseBinaryVector;
public class Scorer {
private Parameters params;
public Scorer() {}
// scoring on mention documents, all 2*numRelation
public double scoreMentionRelation(MILDocument doc, int m, int rel) {
double sum = 0;
DenseVector p = params.relParameters[rel];
sum += p.dotProduct(doc.features[m]);
return sum;
}
// need to consider additional features that are dependent on rel ...
public SparseBinaryVector getMentionRelationFeatures(MILDocument doc, int m, int rel) {
return doc.features[m];
}
public void setParameters(Parameters params) {
this.params = params;
}
}
| 777 | 25.827586 | 88 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/algorithm/Viterbi.java | package edu.uw.cs.multir.learning.algorithm;
import edu.uw.cs.multir.learning.data.MILDocument;
public class Viterbi {
private Scorer parseScorer;
private Model model;
public Viterbi(Model model, Scorer parseScorer) {
this.model = model;
this.parseScorer = parseScorer;
}
public Parse parse(MILDocument doc, int mention) {
int numRelations = model.numRelations;
// relation X argsReversed
double[] scores = new double[numRelations];
// lookup signature
for (int s = 0; s < numRelations; s++)
scores[s] = parseScorer.scoreMentionRelation(doc, mention, s);
int bestRel = 0;
for (int r = 0; r < model.numRelations; r++) {
if (scores[r] > scores[bestRel]) {
bestRel = r; }
}
Parse p = new Parse(bestRel, scores[bestRel]);
p.scores = scores;
return p;
}
public static class Parse {
// MPE
public int state;
public double score;
// scores of all assignments
public double[] scores;
Parse(int state, double score) {
this.state = state;
this.score = score;
}
}
}
| 1,042 | 19.86 | 65 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/data/Dataset.java | package edu.uw.cs.multir.learning.data;
import java.util.Random;
public interface Dataset {
public int numDocs();
public void shuffle(Random random);
public MILDocument next();
public boolean next(MILDocument doc);
public void reset();
}
| 255 | 14.058824 | 39 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/data/MILDocument.java | package edu.uw.cs.multir.learning.data;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import edu.uw.cs.multir.util.SparseBinaryVector;
/*
* The purpose of this data structure is to keep all relevant information for
* learning while using as little memory as possible. Using less memory helps
* keeping more records in memory at the same time, thus improving speed.
*/
public class MILDocument {
public static final int MNT_CAPACITY = 2;
public String arg1, arg2;
public int random = 0;
// relations between arg1 and arg2, sorted by ID
public int[] Y;
// mentions of this entity pair
public int numMentions = 0;
public int[] mentionIDs;
public int[] Z;
public SparseBinaryVector[] features;
public MILDocument() {
mentionIDs = new int[MNT_CAPACITY];
Z = new int[MNT_CAPACITY];
features = new SparseBinaryVector[MNT_CAPACITY];
}
public void clear() {
numMentions = 0;
}
public void setCapacity(int targetSize) {
int[] newMentionIDs = new int[targetSize];
int[] newZ = new int[targetSize];
SparseBinaryVector[] newFeatures = new SparseBinaryVector[targetSize];
if (numMentions > 0) {
System.arraycopy(mentionIDs, 0, newMentionIDs, 0, numMentions);
System.arraycopy(Z, 0, newZ, 0, numMentions);
System.arraycopy(features, 0, newFeatures, 0, numMentions);
}
mentionIDs = newMentionIDs;
Z = newZ;
features = newFeatures;
}
public boolean read(DataInputStream dis) throws IOException {
try {
random = dis.readInt();
arg1 = dis.readUTF();
arg2 = dis.readUTF();
int lenY = dis.readInt();
Y = new int[lenY];
for (int i=0; i < lenY; i++) Y[i] = dis.readInt();
int numMentions = dis.readInt();
if (numMentions > mentionIDs.length) setCapacity(numMentions);
this.numMentions = numMentions;
for (int i=0; i < numMentions; i++) {
mentionIDs[i] = dis.readInt();
Z[i] = dis.readInt();
if (features[i] == null) features[i] = new SparseBinaryVector();
features[i].deserialize(dis);
}
//arg1 = arg2 = null;
//mentionIDs = null;
return true;
} catch (EOFException e) { return false; }
}
public void write(DataOutputStream dos) throws IOException {
dos.writeInt(random);
dos.writeUTF(arg1);
dos.writeUTF(arg2);
dos.writeInt(Y.length);
for (int i=0; i < Y.length; i++)
dos.writeInt(Y[i]);
dos.writeInt(numMentions);
for (int i=0; i < numMentions; i++) {
dos.writeInt(mentionIDs[i]);
dos.writeInt(Z[i]);
features[i].serialize(dos);
}
}
} | 2,571 | 25.791667 | 77 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/learning/data/MemoryDataset.java | package edu.uw.cs.multir.learning.data;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class MemoryDataset implements Dataset {
private MILDocument[] docs;
private int cursor = 0;
public MemoryDataset() { }
public MemoryDataset(String file)
throws IOException {
MILDocument d = new MILDocument();
List<MILDocument> l = new ArrayList<MILDocument>();
DataInputStream dis = new DataInputStream(new BufferedInputStream
(new FileInputStream(file)));
while (d.read(dis)) {
l.add(d);
d = new MILDocument();
}
dis.close();
docs = l.toArray(new MILDocument[0]);
}
public int numDocs() { return docs.length; }
public void shuffle(Random random) {
for (int i=0; i < docs.length; i++) {
// pick element that we want to swap with
int e = i + random.nextInt(docs.length - i);
MILDocument tmp = docs[e];
docs[e] = docs[i];
docs[i] = tmp;
}
}
public MILDocument next() {
if (cursor < docs.length)
return docs[cursor++];
else return null;
}
public boolean next(MILDocument doc) {
if (cursor < docs.length) {
MILDocument d = docs[cursor++];
doc.arg1 = d.arg1;
doc.arg2 = d.arg2;
doc.features = d.features;
doc.mentionIDs = d.mentionIDs;
doc.numMentions = d.numMentions;
doc.Y = d.Y;
doc.Z = d.Z;
return true;
}
return false;
}
public void reset() {
cursor = 0;
}
} | 1,531 | 21.202899 | 67 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/AggregatePrecisionRecallCurve.java | package edu.uw.cs.multir.main;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import edu.uw.cs.multir.learning.algorithm.FullInference;
import edu.uw.cs.multir.learning.algorithm.Model;
import edu.uw.cs.multir.learning.algorithm.Parameters;
import edu.uw.cs.multir.learning.algorithm.Parse;
import edu.uw.cs.multir.learning.algorithm.Scorer;
import edu.uw.cs.multir.learning.data.Dataset;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.learning.data.MemoryDataset;
public class AggregatePrecisionRecallCurve {
public static void run(String dir)
throws IOException {
Model model = new Model();
model.read(dir + File.separatorChar + "model");
Parameters params = new Parameters();
params.model = model;
params.deserialize(dir + File.separatorChar + "params");
Dataset test = new MemoryDataset(dir + File.separatorChar + "test");
eval(test, params, System.out);
}
public static void eval(Dataset test, Parameters params,
PrintStream ps) throws IOException {
System.out.println("eval");
Scorer scorer = new Scorer();
// this could also be a file
List<Prediction> predictions = new ArrayList<Prediction>();
MILDocument doc = new MILDocument();
int numRelationInst = 0;
test.reset();
while (test.next(doc)) {
//numRelationInst += doc.Y.length;
Parse parse = FullInference.infer(doc, scorer, params);
int[] Yt = doc.Y;
int[] Yp = parse.Y;
// NA is empty array
if (Yt.length == 0 && Yp.length == 0) continue;
// true negative, we ignore that
boolean[] binaryYt = new boolean[100];
boolean[] binaryYp = new boolean[100];
for (int i=0; i < Yt.length; i++)
binaryYt[Yt[i]] = true;
for (int i=0; i < Yp.length; i++)
binaryYp[Yp[i]] = true;
for (int i=1; i < binaryYt.length; i++) {
if (binaryYt[i] || binaryYp[i]) {
predictions.add
(new Prediction(i, binaryYt[i], binaryYp[i], parse.scores[i], doc, parse));
}
}
for (int i=1; i < binaryYt.length; i++)
if (binaryYt[i]) numRelationInst++;
}
Collections.sort(predictions, new Comparator<Prediction>() {
public int compare(Prediction p1, Prediction p2) {
if (p1.score > p2.score) return -1;
else return +1;
} });
PrecisionRecallTester prt = new PrecisionRecallTester();
prt.reset();
double prevRec = -1, prevPre = -1;
for (int i=0; i < predictions.size(); i++) {
Prediction p = predictions.get(i);
prt.handle(p.rel, p.predRel, p.trueRel, p.score);
prt.numRelations = numRelationInst;
double recall = prt.recall();
double precision = prt.precision();
if (recall != prevRec || precision != prevPre) {
ps.println(recall + "\t" + precision);
prevRec = recall;
prevPre = precision;
}
}
}
static class Prediction {
int rel;
boolean trueRel;
boolean predRel;
double score;
MILDocument doc;
Parse parse;
Prediction(int rel, boolean trueRel, boolean predRel, double score,
MILDocument doc, Parse parse) {
this.rel = rel;
this.trueRel = trueRel;
this.predRel = predRel;
this.score = score;
this.doc = doc;
this.parse = parse;
}
}
static class PrecisionRecallTester {
public double numCorrect, numPredictions, numRelations;
public void handle(String[] tokens, boolean[] predictedLabels,
boolean[] trueLabels, double score) {
boolean[] p = predictedLabels;
boolean[] t = trueLabels;
for (int i=1; i < p.length; i++) {
if (p[i] && !t[i]) numPredictions++;
else if (!p[i] && t[i]) numRelations++;
else if (p[i] && t[i]) {
numCorrect++;
numPredictions++;
numRelations++;
}
}
}
public void handle(int rel, boolean p, boolean t, double score) {
if (p && !t) numPredictions++;
else if (!p && t) numRelations++;
else if (p && t) {
numCorrect++;
numPredictions++;
numRelations++;
}
}
public void reset() { numCorrect = numPredictions = numRelations = 0; }
public double precision() {
if (numPredictions == 0) return 1;
return numCorrect / numPredictions;
}
public double recall() {
return numCorrect / numRelations;
}
}
}
| 4,286 | 26.480769 | 81 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/Main.java | package edu.uw.cs.multir.main;
import java.io.IOException;
public class Main {
public static void main(String[] args) throws IOException {
if (args.length == 0)
printUsage();
String c = args[0];
if (c.equals("preprocess")) {
String trainFile = null;
String testFile = null;
String outDir = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-trainFile") && i+1 < args.length)
trainFile = args[++i];
else if (args[i].equals("-testFile") && i+1 < args.length)
testFile = args[++i];
else if (args[i].equals("-outDir") && i+1 < args.length)
outDir = args[++i];
else printUsagePreprocess();
}
if (trainFile == null || testFile == null || outDir == null)
printUsagePreprocess();
Preprocess.preprocess(trainFile, testFile, outDir);
} else if (c.equals("train")) {
String dir = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-dir") && i+1 < args.length)
dir = args[++i];
else printUsageTrain();
}
if (dir == null) printUsageTrain();
Train.train(dir);
} else if (c.equals("test")) {
String dir = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-dir") && i+1 < args.length)
dir = args[++i];
else printUsageTest();
}
if (dir == null) printUsageTest();
Test.test(dir);
} else if (c.equals("results")) {
String dir = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-dir") && i+1 < args.length)
dir = args[++i];
else printUsageResults();
}
if (dir == null) printUsageResults();
ResultWriter.write(dir);
} else if (c.equals("aggPR")) {
String dir = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-dir") && i+1 < args.length)
dir = args[++i];
else printUsageAggPR();
}
if (dir == null) printUsageAggPR();
AggregatePrecisionRecallCurve.run(dir);
} else if (c.equals("senPR")) {
String labelsFile = null;
String resultsFile = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-labelsFile") && i+1 < args.length)
labelsFile = args[++i];
else if (args[i].equals("-resultsFile") && i+1 < args.length)
resultsFile = args[++i];
else printUsageSenPR();
}
if (labelsFile == null || resultsFile == null)
printUsageSenPR();
SententialPrecisionRecallCurve.run(labelsFile, resultsFile);
} else if (c.equals("senRel")) {
String labelsFile = null;
String resultsFile = null;
for (int i=1; i < args.length; i++) {
if (args[i].equals("-labelsFile") && i+1 < args.length)
labelsFile = args[++i];
else if (args[i].equals("-resultsFile") && i+1 < args.length)
resultsFile = args[++i];
else printUsageSenRel();
}
if (labelsFile == null || resultsFile == null)
printUsageSenRel();
SententialPrecisionRecallByRelation.run(labelsFile, resultsFile);
} else {
printUsage();
}
}
private static void printUsage() {
System.out.println("Usage: Main command params \n" +
" where command can take the following values \n" +
" {preprocess,train,test,results,aggPR,senPR,senRel}");
System.exit(1);
}
private static void printUsagePreprocess() {
System.out.println("Usage: Main preprocess -trainFile .. -testFile .. -outDir ..");
System.exit(1);
}
private static void printUsageTrain() {
System.out.println("Usage: Main train -dir ..");
System.exit(1);
}
private static void printUsageTest() {
System.out.println("Usage: Main test -dir ..");
System.exit(1);
}
private static void printUsageResults() {
System.out.println("Usage: Main results -dir ..");
System.exit(1);
}
private static void printUsageAggPR() {
System.out.println("Usage: Main aggPR -dir ..");
System.exit(1);
}
private static void printUsageSenPR() {
System.out.println("Usage: Main senPR -labelsFile .. -resultsFile ..");
System.exit(1);
}
private static void printUsageSenRel() {
System.out.println("Usage: Main senRel -labelsFile .. -resultsFile ..");
System.exit(1);
}
}
| 4,111 | 27.755245 | 85 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/Preprocess.java | package edu.uw.cs.multir.main;
import java.io.File;
import java.io.IOException;
import edu.uw.cs.multir.learning.algorithm.Model;
import edu.uw.cs.multir.preprocess.ConvertProtobufToMILDocument;
import edu.uw.cs.multir.preprocess.Mappings;
public class Preprocess {
public static void preprocess(String trainFile, String testFile, String outDir)
throws IOException {
String mappingFile = outDir + File.separatorChar + "mapping";
String modelFile = outDir + File.separatorChar + "model";
{
String output1 = outDir + File.separatorChar + "train";
ConvertProtobufToMILDocument.convert(trainFile, output1, mappingFile, true, true);
}
{
String output2 = outDir + File.separatorChar + "test";
ConvertProtobufToMILDocument.convert(testFile, output2, mappingFile, false, false);
}
{
Model m = new Model();
Mappings mappings = new Mappings();
mappings.read(mappingFile);
m.numRelations = mappings.numRelations();
m.numFeaturesPerRelation = new int[m.numRelations];
for (int i=0; i < m.numRelations; i++)
m.numFeaturesPerRelation[i] = mappings.numFeatures();
m.write(modelFile);
}
}
}
| 1,151 | 27.8 | 86 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/ResultWriter.java | package edu.uw.cs.multir.main;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import edu.uw.cs.multir.learning.algorithm.FullInference;
import edu.uw.cs.multir.learning.algorithm.Model;
import edu.uw.cs.multir.learning.algorithm.Parameters;
import edu.uw.cs.multir.learning.algorithm.Parse;
import edu.uw.cs.multir.learning.algorithm.Scorer;
import edu.uw.cs.multir.learning.data.Dataset;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.learning.data.MemoryDataset;
import edu.uw.cs.multir.preprocess.Mappings;
public class ResultWriter {
// allow only a single output label per entity pair, instead of multiple
static boolean singleBestOnly = false;
public static void write(String dir) throws IOException {
Model model = new Model();
model.read(dir + File.separatorChar + "model");
Parameters params = new Parameters();
params.model = model;
params.deserialize(dir + File.separatorChar + "params");
Dataset test = new MemoryDataset(dir + File.separatorChar + "test");
PrintStream ps = new PrintStream(dir + File.separatorChar + "results");
ResultWriter.eval(dir + File.separatorChar + "mapping", test, params, ps);
ps.close();
}
public static void eval(String mappingFile, Dataset test, Parameters params,
PrintStream ps) throws IOException {
// need mapping from relIDs to rels
Mappings mapping = new Mappings();
mapping.read(mappingFile);
Map<Integer,String> relID2rel = new HashMap<Integer,String>();
for (Map.Entry<String,Integer> e : mapping.getRel2RelID().entrySet())
relID2rel.put(e.getValue(), e.getKey());
System.out.println("eval");
Scorer scorer = new Scorer();
StringBuilder sb1 = new StringBuilder();
for (int i=0; i < mapping.numRelations(); i++)
sb1.append(relID2rel.get(i) + " ");
ps.append(sb1.toString() + "\n");
MILDocument doc = new MILDocument();
test.reset();
while (test.next(doc)) {
Parse parse = FullInference.infer(doc, scorer, params);
int[] Yp = parse.Y;
if (Yp.length > 1 && singleBestOnly) {
int max = 0;
for (int i=1; i < Yp.length; i++)
if (parse.scores[Yp[i]] > parse.scores[Yp[max]]) max = i;
Yp = new int[] { Yp[max] };
// set sentence-level predictions
for (int m = 0; m < doc.numMentions; m++) {
if (parse.Z[m] != 0 && parse.Z[m] != max) {
if (parse.allScores[m][0] > parse.allScores[m][max]) parse.Z[m] = 0;
else parse.Z[m] = max;
}
}
}
for (int m = 0; m < doc.numMentions; m++) {
StringBuilder sb2 = new StringBuilder();
ps.append(doc.arg1 + "\t" + doc.arg2 + "\t" + m + "\t" +
relID2rel.get(parse.Z[m]) + "\t" + parse.score + "\t" + sb2.toString() + "\n");
}
}
}
}
| 2,795 | 31.137931 | 85 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/SententialPrecisionRecallByRelation.java | package edu.uw.cs.multir.main;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SententialPrecisionRecallByRelation {
// treat "indirect" labels as "y" labels
static boolean indirect = true;
public static void run(String labelsFile, String resultsFile) throws IOException {
// put results into map
// guid1, guid2, mtnID -> rel
Map<String,String> results = new HashMap<String,String>();
{
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(resultsFile), "utf-8"));
r.readLine();
String l = null;
while ((l = r.readLine())!= null) {
String[] c = l.split("\t");
String guid1 = c[0];
String guid2 = c[1];
int mntID = Integer.parseInt(c[2]);
String rel = c[3];
results.put(guid1 + "\t" + guid2 + "\t" + mntID, rel);
}
r.close();
}
// read labels
// relation --> (guid1, guid2, mntID -> y/n/indirect)
Map<String,Map<String,String>> labels = new HashMap<String,Map<String,String>>();
{
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(labelsFile), "utf-8"));
String l = null;
while ((l = r.readLine())!= null) {
String[] c = l.split("\t");
String relation = c[3];
String guid1 = c[0];
String guid2 = c[1];
String mentionID = c[2];
String label = c[4];
// ignore relation /location/administrative_division/country
// since it is just the inverse of
// /location/country/administrative_divisions which is also
// in the dataset
if (relation.equals("/location/administrative_division/country"))
continue;
Map<String,String> m = labels.get(relation);
if (m == null) {
m = new HashMap<String,String>();
labels.put(relation, m);
}
m.put(guid1 + "\t" + guid2 + "\t" + mentionID, label);
}
r.close();
}
// sort by their number of true labels
List<Map.Entry<String,Map<String,String>>> l = new
ArrayList<Map.Entry<String,Map<String,String>>>();
l.addAll(labels.entrySet());
Collections.sort(l, new Comparator<Map.Entry<String,Map<String,String>>>() {
public int compare(Map.Entry<String,Map<String,String>> e1,
Map.Entry<String,Map<String,String>> e2) {
int n1 = 0;
for (Map.Entry<String,String> e : e1.getValue().entrySet())
if (e.getValue().equals("y") ||
(indirect && e.getValue().equals("indirect"))) n1++;
int n2 = 0;
for (Map.Entry<String,String> e : e2.getValue().entrySet())
if (e.getValue().equals("y") ||
(indirect && e.getValue().equals("indirect"))) n2++;
return n2 - n1;
}
});
// evaluate
for (Map.Entry<String,Map<String,String>> e : l) { //labels.entrySet()) {
System.out.println(e.getKey());
String fbRel = e.getKey();
int TP = 0, FP = 0, TN = 0, FN = 0;
int numFbIsTrue = 0;
Map<String,String> m = e.getValue();
for (Map.Entry<String,String> c : m.entrySet()) {
String prRel = results.get(c.getKey());
boolean fbIsTrue = c.getValue().equals("y") ||
(indirect && c.getValue().equals("indirect"));
if (fbRel.equals(prRel) && fbIsTrue) TP++;
else if (fbRel.equals(prRel) && !fbIsTrue) FP++;
else if (!fbRel.equals(prRel) && fbIsTrue) FN++;
else if (!fbRel.equals(prRel) && !fbIsTrue) TN++;
if (fbIsTrue) numFbIsTrue++;
}
if (TP + FP == 0)
System.out.println(" precision\tNA (no positive predictions)");
else
System.out.println(" precision\t" + (double)TP / (double)(TP + FP));
if (TP + FN == 0)
System.out.println(" recall\tNA (no positive labels in test data)");
else
System.out.println(" recall\t" + (double)TP / (double)(TP + FN));
System.out.println(" # fb annot \t" + m.size() + " (fb annotation precision " + (numFbIsTrue / (double)m.size()) + ")");
}
}
}
| 4,054 | 31.18254 | 124 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/SententialPrecisionRecallCurve.java | package edu.uw.cs.multir.main;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SententialPrecisionRecallCurve {
static boolean outputTopSentences = false;
// treat "indirect" labels as "y" labels
static boolean indirect = true;
public static void run(String labelsFile, String resultsFile) throws IOException {
// put results into map
// guid1, guid2, mtnID -> ex
List<Example> predictions = new ArrayList<Example>();
{
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(resultsFile), "utf-8"));
String l = null;
while ((l = r.readLine())!= null) {
String[] c = l.split("\t");
if (c.length < 2) continue; // column header
Example e = new Example();
e.arg1 = c[0];
e.arg2 = c[1];
e.mentionID = Integer.parseInt(c[2]);
e.predRelation = c[3];
e.predScore = Double.parseDouble(c[4]);
predictions.add(e);
}
r.close();
}
Map<String,List<Label>> labels = new HashMap<String,List<Label>>();
{
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(labelsFile), "utf-8"));
String l = null;
while ((l = r.readLine())!= null) {
String[] c = l.split("\t");
String key = c[0] + "\t" + c[1] + "\t" + c[2]; // arg1, arg2, mentionID
Label label = new Label();
label.relation = c[3];
// ignore relation /location/administrative_division/country
// since it is just the inverse of
// /location/country/administrative_divisions which is also
// in the dataset
if (label.relation.equals("/location/administrative_division/country")) continue;
label.tf = c[4].equals("y") || c[4].equals("indirect");
label.name1 = c[6];
label.name2 = c[7];
label.sentence = c[8];
List<Label> ll = labels.get(key);
if (ll == null) {
ll = new ArrayList<Label>();
labels.put(key, ll);
}
ll.add(label);
}
r.close();
}
// sort predictions by decreasing score
Collections.sort(predictions, new Comparator<Example>() {
public int compare(Example e1, Example e2) {
if (e1.predScore > e2.predScore) return -1; else return 1;
}
});
// max recall
int MAX_TP = 0;
for (List<Label> ll : labels.values()) {
for (Label l : ll)
if (!l.relation.equals("NA") && l.tf) MAX_TP++;
}
List<double[]> curve = new ArrayList<double[]>();
int TP = 0, FP = 0, FN = 0;
for (Example e : predictions) {
String key = e.arg1 + "\t" + e.arg2 + "\t" + e.mentionID;
List<Label> ll = labels.get(key);
if (ll != null) {
for (Label l : ll) {
if (l.relation.equals(e.predRelation)) {
if (l.tf) TP++;
else FP++;
} else {
if (l.tf) FN++; // && e.predRelation.equals("NA")) FN++;
//else TN++;
}
}
double precision = TP / (double)(TP + FP);
double recall = TP / (double)(MAX_TP);
curve.add(new double[] { precision, recall } );
}
}
{
for (double[] d : curve) {
System.out.println(d[1] + "\t" + d[0]);
}
}
// print the most confident predictions
if (outputTopSentences)
{
for (Example e : predictions) {
String key = e.arg1 + "\t" + e.arg2 + "\t" + e.mentionID;
List<Label> ll = labels.get(key);
if (ll != null) {
StringBuilder sb = new StringBuilder();
for (Label l : ll) {
sb.append(l.tf + ":" + l.relation + ", ");
}
Label l1 = ll.get(0);
System.out.println(l1.name1 + "\t" + l1.name2 + "\t" +
e.predRelation + "\t" + sb.toString() + "\t" +
l1.sentence + "\t" + e.predScore + "\n");
}
}
}
}
static class Example {
String arg1;
String arg2;
int mentionID;
String predRelation;
double predScore;
boolean correct = false;
}
static class Label {
String relation;
boolean tf;
String name1;
String name2;
String sentence;
}
} | 4,097 | 25.269231 | 85 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/Test.java | package edu.uw.cs.multir.main;
import java.io.File;
import java.io.IOException;
import edu.uw.cs.multir.learning.algorithm.FullInference;
import edu.uw.cs.multir.learning.algorithm.Model;
import edu.uw.cs.multir.learning.algorithm.Parameters;
import edu.uw.cs.multir.learning.algorithm.Scorer;
import edu.uw.cs.multir.learning.data.Dataset;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.learning.data.MemoryDataset;
public class Test {
public static void test(String dir) throws IOException {
Model model = new Model();
model.read(dir + File.separatorChar + "model");
Parameters params = new Parameters();
params.model = model;
params.deserialize(dir + File.separatorChar + "params");
Dataset test = new MemoryDataset(dir + File.separatorChar + "test");
long startTest = System.currentTimeMillis();
MILDocument doc = new MILDocument();
Scorer scorer = new Scorer();
test.reset();
while (test.next(doc)) {
FullInference.infer(doc, scorer, params);
}
long endTest = System.currentTimeMillis();
System.out.println("testing time " + (endTest-startTest)/1000.0 + " seconds");
}
}
| 1,149 | 29.263158 | 80 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/main/Train.java | package edu.uw.cs.multir.main;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import edu.uw.cs.multir.learning.algorithm.AveragedPerceptron;
import edu.uw.cs.multir.learning.algorithm.Model;
import edu.uw.cs.multir.learning.algorithm.Parameters;
import edu.uw.cs.multir.learning.data.Dataset;
import edu.uw.cs.multir.learning.data.MemoryDataset;
public class Train {
public static void train(String dir) throws IOException {
Random random = new Random(1);
Model model = new Model();
model.read(dir + File.separatorChar + "model");
AveragedPerceptron ct = new AveragedPerceptron(model, random);
Dataset train = new MemoryDataset(dir + File.separatorChar + "train");
System.out.println("starting training");
long start = System.currentTimeMillis();
Parameters params = ct.train(train);
long end = System.currentTimeMillis();
System.out.println("training time " + (end-start)/1000.0 + " seconds");
params.serialize(dir + File.separatorChar + "params");
}
}
| 1,026 | 27.527778 | 73 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/preprocess/ConvertProtobufToMILDocument.java | package edu.uw.cs.multir.preprocess;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.zip.GZIPInputStream;
import edu.uw.cs.multir.learning.data.MILDocument;
import edu.uw.cs.multir.util.SparseBinaryVector;
import cc.factorie.protobuf.DocumentProtos.Relation;
import cc.factorie.protobuf.DocumentProtos.Relation.RelationMentionRef;
public class ConvertProtobufToMILDocument {
public static void main(String[] args) throws IOException {
// arg1 is protobuf file
// arg2 is MIL file
String input = args[0];
String output= args[1];
String mappingFile = args[2];
boolean writeMapping = false;
boolean writeRelations = false;
convert(input, output, mappingFile, writeMapping, writeRelations);
}
public static void convert(String input, String output, String mappingFile,
boolean writeFeatureMapping, boolean writeRelationMapping) throws IOException {
// This tool can be used in two ways:
// 1) a new Mapping is created and saved at the end
// 2) an existing Mapping is used; non-existent relations
// or features are ignored
Mappings m = new Mappings();
if (!writeFeatureMapping || !writeRelationMapping)
m.read(mappingFile);
else
// ensure that relation NA gets ID 0
m.getRelationID("NA", true);
DataOutputStream os = new DataOutputStream
(new BufferedOutputStream(new FileOutputStream(output)));
InputStream is = new GZIPInputStream(
new BufferedInputStream
(new FileInputStream(input)));
Relation r = null;
MILDocument doc = new MILDocument();
int count = 0;
while ((r = Relation.parseDelimitedFrom(is))!=null) {
if (++count % 10000 == 0) System.out.println(count);
doc.clear();
doc.arg1 = r.getSourceGuid();
doc.arg2 = r.getDestGuid();
// set relations
{
String[] rels = r.getRelType().split(",");
int[] irels = new int[rels.length];
for (int i=0; i < rels.length; i++)
irels[i] = m.getRelationID(rels[i], writeRelationMapping);
Arrays.sort(irels);
// ignore NA and non-mapped relations
int countUnique = 0;
for (int i=0; i < irels.length; i++)
if (irels[i] > 0 && (i == 0 || irels[i-1] != irels[i]))
countUnique++;
doc.Y = new int[countUnique];
int pos = 0;
for (int i=0; i < irels.length; i++)
if (irels[i] > 0 && (i == 0 || irels[i-1] != irels[i]))
doc.Y[pos++] = irels[i];
}
// set mentions
doc.setCapacity(r.getMentionCount());
doc.numMentions = r.getMentionCount();
for (int j=0; j < r.getMentionCount(); j++) {
RelationMentionRef rmf = r.getMention(j);
doc.Z[j] = -1;
doc.mentionIDs[j] = j;
SparseBinaryVector sv = doc.features[j] = new SparseBinaryVector();
int[] fts = new int[rmf.getFeatureCount()];
for (int i=0; i < rmf.getFeatureCount(); i++)
fts[i] = m.getFeatureID(rmf.getFeature(i), writeFeatureMapping);
Arrays.sort(fts);
int countUnique = 0;
for (int i=0; i < fts.length; i++)
if (fts[i] != -1 && (i == 0 || fts[i-1] != fts[i]))
countUnique++;
sv.num = countUnique;
sv.ids = new int[countUnique];
int pos = 0;
for (int i=0; i < fts.length; i++)
if (fts[i] != -1 && (i == 0 || fts[i-1] != fts[i]))
sv.ids[pos++] = fts[i];
}
doc.write(os);
}
is.close();
os.close();
if (writeFeatureMapping || writeRelationMapping)
m.write(mappingFile);
}
}
| 3,793 | 30.355372 | 82 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/preprocess/Mappings.java | package edu.uw.cs.multir.preprocess;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Mappings {
private Map<String,Integer> rel2relID = new HashMap<String,Integer>();
private Map<String,Integer> ft2ftID = new HashMap<String,Integer>();
public int getRelationID(String relation, boolean addNew) {
Integer i = rel2relID.get(relation);
if (i == null) {
if (!addNew) return -1;
i = rel2relID.size();
rel2relID.put(relation, i);
}
return i;
}
public Map<String,Integer> getRel2RelID() {
return rel2relID;
}
public int getFeatureID(String feature, boolean addNew) {
Integer i = ft2ftID.get(feature);
if (i == null) {
if (!addNew) return -1;
i = ft2ftID.size();
ft2ftID.put(feature, i);
}
return i;
}
public int numRelations() {
return rel2relID.size();
}
public int numFeatures() {
return ft2ftID.size();
}
public void write(String file) throws IOException {
BufferedWriter w = new BufferedWriter(new OutputStreamWriter
(new FileOutputStream(file), "utf-8"));
writeMap(rel2relID, w);
writeMap(ft2ftID, w);
w.close();
}
public void read(String file) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader
(new FileInputStream(file), "utf-8"));
readMap(rel2relID, r);
readMap(ft2ftID, r);
r.close();
}
private void writeMap(Map<String,Integer> m, BufferedWriter w)
throws IOException {
w.write(m.size() + "\n");
List<Map.Entry<String,Integer>> l = new
ArrayList<Map.Entry<String,Integer>>(m.entrySet());
Collections.sort(l, new Comparator<Map.Entry<String,Integer>>() {
public int compare(Map.Entry<String,Integer> e1, Map.Entry<String,Integer> e2) {
return e1.getValue() - e2.getValue(); } } );
for (Map.Entry<String,Integer> e : l)
w.write(e.getValue() + "\t" + e.getKey() + "\n");
}
private void readMap(Map<String,Integer> m, BufferedReader r)
throws IOException {
int count = Integer.parseInt(r.readLine());
for (int i=0; i < count; i++) {
String[] t = r.readLine().split("\t");
m.put(t[1], Integer.parseInt(t[0]));
}
}
}
| 2,437 | 25.791209 | 83 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/util/DenseVector.java | package edu.uw.cs.multir.util;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class DenseVector {
public double[] vals;
public DenseVector(int length) {
this.vals = new double[length];
}
public double dotProduct(SparseBinaryVector v) {
return dotProduct(this, v);
}
public void reset() {
for (int i=0; i < vals.length; i++) vals[i] = 0;
}
public static double dotProduct(DenseVector v1, SparseBinaryVector v2) {
double sum = 0;
for (int i=0; i < v2.num; i++) {
sum += v1.vals[v2.ids[i]];
}
return sum;
}
public DenseVector copy() {
DenseVector n = new DenseVector(vals.length);
System.arraycopy(vals, 0, n.vals, 0, vals.length);
return n;
}
public void scale(float factor) {
for (int i=0; i < vals.length; i++)
vals[i] *= factor;
}
public void addSparse(SparseBinaryVector v, double factor) {
for (int i=0; i < v.num; i++)
vals[v.ids[i]] += factor;
}
public static DenseVector sum(DenseVector v1, DenseVector v2, double factor) {
DenseVector n = new DenseVector(v1.vals.length);
for (int i=0; i < v1.vals.length; i++)
n.vals[i] = v1.vals[i] + factor * v2.vals[i];
return n;
}
public static DenseVector scale(DenseVector v, float factor) {
DenseVector n = v.copy();
n.scale(factor);
return n;
}
public void serialize(OutputStream os)
throws IOException {
DataOutputStream dos = new DataOutputStream(os);
dos.writeInt(this.vals.length);
for (int i=0; i < this.vals.length; i++) {
dos.writeDouble(this.vals[i]);
}
}
public void deserialize(InputStream is)
throws IOException {
DataInputStream dis = new DataInputStream(is);
int len = dis.readInt();
this.vals = new double[len];
for (int i=0; i < len; i++) {
this.vals[i] = dis.readDouble();
}
}
public DenseVector sum(DenseVector v, float factor) {
return sum(this, (DenseVector)v, factor);
}
}
| 1,982 | 22.329412 | 79 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/multir/src/edu/uw/cs/multir/util/SparseBinaryVector.java | package edu.uw.cs.multir.util;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class SparseBinaryVector {
public int[] ids; // sorted
public int num; // the array might not be full
public SparseBinaryVector() {
this.ids = new int[0];
this.num = 0;
}
public SparseBinaryVector(int[] ids, int num) {
this.ids = ids;
this.num = num;
}
public void reset() {
num = 0;
}
public SparseBinaryVector copy() {
SparseBinaryVector n = new SparseBinaryVector(new int[num], num);
System.arraycopy(ids, 0, n.ids, 0, num);
return n;
}
public double dotProduct(SparseBinaryVector v) {
int i = 0, j = 0;
double sum = 0;
while (i < num && j < v.num) {
if (ids[i] < v.ids[j])
i++;
else if (ids[i] > v.ids[j])
j++;
else {
sum += 1;
i++; j++;
}
}
return sum;
}
public void serialize(OutputStream os)
throws IOException {
DataOutputStream dos = new DataOutputStream(os);
dos.writeInt(this.num);
for (int i=0; i < this.num; i++) {
dos.writeInt(this.ids[i]);
}
}
public void deserialize(InputStream is)
throws IOException {
DataInputStream dis = new DataInputStream(is);
this.num = dis.readInt();
this.ids = new int[this.num];
for (int i=0; i < this.num; i++) {
this.ids[i] = dis.readInt();
}
}
public String toString() {
StringBuilder sb = new StringBuilder();
for (int i=0; i < num; i++) {
if (i > 0) sb.append(" ");
sb.append(ids[i]);
}
return sb.toString();
}
} | 1,592 | 19.688312 | 67 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/svm_predict.java | import libsvm.*;
import java.io.*;
import java.util.*;
class svm_predict {
private static svm_print_interface svm_print_null = new svm_print_interface()
{
public void print(String s) {}
};
private static svm_print_interface svm_print_stdout = new svm_print_interface()
{
public void print(String s)
{
System.out.print(s);
}
};
private static svm_print_interface svm_print_string = svm_print_stdout;
static void info(String s)
{
svm_print_string.print(s);
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
private static void predict(BufferedReader input, DataOutputStream output, svm_model model, int predict_probability) throws IOException
{
int correct = 0;
int total = 0;
double error = 0;
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
int svm_type=svm.svm_get_svm_type(model);
int nr_class=svm.svm_get_nr_class(model);
double[] prob_estimates=null;
if(predict_probability == 1)
{
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
svm_predict.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+svm.svm_get_svr_probability(model)+"\n");
}
else
{
int[] labels=new int[nr_class];
svm.svm_get_labels(model,labels);
prob_estimates = new double[nr_class];
output.writeBytes("labels");
for(int j=0;j<nr_class;j++)
output.writeBytes(" "+labels[j]);
output.writeBytes("\n");
}
}
while(true)
{
String line = input.readLine();
if(line == null) break;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
double target = atof(st.nextToken());
int m = st.countTokens()/2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(st.nextToken());
x[j].value = atof(st.nextToken());
}
double v;
if (predict_probability==1 && (svm_type==svm_parameter.C_SVC || svm_type==svm_parameter.NU_SVC))
{
v = svm.svm_predict_probability(model,x,prob_estimates);
output.writeBytes(v+" ");
for(int j=0;j<nr_class;j++)
output.writeBytes(prob_estimates[j]+" ");
output.writeBytes("\n");
}
else
{
v = svm.svm_predict(model,x);
output.writeBytes(v+"\n");
}
if(v == target)
++correct;
error += (v-target)*(v-target);
sumv += v;
sumy += target;
sumvv += v*v;
sumyy += target*target;
sumvy += v*target;
++total;
}
if(svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
{
svm_predict.info("Mean squared error = "+error/total+" (regression)\n");
svm_predict.info("Squared correlation coefficient = "+
((total*sumvy-sumv*sumy)*(total*sumvy-sumv*sumy))/
((total*sumvv-sumv*sumv)*(total*sumyy-sumy*sumy))+
" (regression)\n");
}
else
svm_predict.info("Accuracy = "+(double)correct/total*100+
"% ("+correct+"/"+total+") (classification)\n");
}
private static void exit_with_help()
{
System.err.print("usage: svm_predict [options] test_file model_file output_file\n"
+"options:\n"
+"-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n"
+"-q : quiet mode (no outputs)\n");
System.exit(1);
}
public static void main(String argv[]) throws IOException
{
int i, predict_probability=0;
svm_print_string = svm_print_stdout;
// parse options
for(i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
++i;
switch(argv[i-1].charAt(1))
{
case 'b':
predict_probability = atoi(argv[i]);
break;
case 'q':
svm_print_string = svm_print_null;
i--;
break;
default:
System.err.print("Unknown option: " + argv[i-1] + "\n");
exit_with_help();
}
}
if(i>=argv.length-2)
exit_with_help();
try
{
BufferedReader input = new BufferedReader(new FileReader(argv[i]));
DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(argv[i+2])));
svm_model model = svm.svm_load_model(argv[i+1]);
if (model == null)
{
System.err.print("can't open model file "+argv[i+1]+"\n");
System.exit(1);
}
if(predict_probability == 1)
{
if(svm.svm_check_probability_model(model)==0)
{
System.err.print("Model does not support probabiliy estimates\n");
System.exit(1);
}
}
else
{
if(svm.svm_check_probability_model(model)!=0)
{
svm_predict.info("Model supports probability estimates, but disabled in prediction.\n");
}
}
predict(input,output,model,predict_probability);
input.close();
output.close();
}
catch(FileNotFoundException e)
{
exit_with_help();
}
catch(ArrayIndexOutOfBoundsException e)
{
exit_with_help();
}
}
}
| 4,950 | 24.389744 | 183 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/svm_scale.java | import libsvm.*;
import java.io.*;
import java.util.*;
import java.text.DecimalFormat;
class svm_scale
{
private String line = null;
private double lower = -1.0;
private double upper = 1.0;
private double y_lower;
private double y_upper;
private boolean y_scaling = false;
private double[] feature_max;
private double[] feature_min;
private double y_max = -Double.MAX_VALUE;
private double y_min = Double.MAX_VALUE;
private int max_index;
private long num_nonzeros = 0;
private long new_num_nonzeros = 0;
private static void exit_with_help()
{
System.out.print(
"Usage: svm-scale [options] data_filename\n"
+"options:\n"
+"-l lower : x scaling lower limit (default -1)\n"
+"-u upper : x scaling upper limit (default +1)\n"
+"-y y_lower y_upper : y scaling limits (default: no y scaling)\n"
+"-s save_filename : save scaling parameters to save_filename\n"
+"-r restore_filename : restore scaling parameters from restore_filename\n"
);
System.exit(1);
}
private BufferedReader rewind(BufferedReader fp, String filename) throws IOException
{
fp.close();
return new BufferedReader(new FileReader(filename));
}
private void output_target(double value)
{
if(y_scaling)
{
if(value == y_min)
value = y_lower;
else if(value == y_max)
value = y_upper;
else
value = y_lower + (y_upper-y_lower) *
(value-y_min) / (y_max-y_min);
}
System.out.print(value + " ");
}
private void output(int index, double value)
{
/* skip single-valued attribute */
if(feature_max[index] == feature_min[index])
return;
if(value == feature_min[index])
value = lower;
else if(value == feature_max[index])
value = upper;
else
value = lower + (upper-lower) *
(value-feature_min[index])/
(feature_max[index]-feature_min[index]);
if(value != 0)
{
System.out.print(index + ":" + value + " ");
new_num_nonzeros++;
}
}
private String readline(BufferedReader fp) throws IOException
{
line = fp.readLine();
return line;
}
private void run(String []argv) throws IOException
{
int i,index;
BufferedReader fp = null, fp_restore = null;
String save_filename = null;
String restore_filename = null;
String data_filename = null;
for(i=0;i<argv.length;i++)
{
if (argv[i].charAt(0) != '-') break;
++i;
switch(argv[i-1].charAt(1))
{
case 'l': lower = Double.parseDouble(argv[i]); break;
case 'u': upper = Double.parseDouble(argv[i]); break;
case 'y':
y_lower = Double.parseDouble(argv[i]);
++i;
y_upper = Double.parseDouble(argv[i]);
y_scaling = true;
break;
case 's': save_filename = argv[i]; break;
case 'r': restore_filename = argv[i]; break;
default:
System.err.println("unknown option");
exit_with_help();
}
}
if(!(upper > lower) || (y_scaling && !(y_upper > y_lower)))
{
System.err.println("inconsistent lower/upper specification");
System.exit(1);
}
if(restore_filename != null && save_filename != null)
{
System.err.println("cannot use -r and -s simultaneously");
System.exit(1);
}
if(argv.length != i+1)
exit_with_help();
data_filename = argv[i];
try {
fp = new BufferedReader(new FileReader(data_filename));
} catch (Exception e) {
System.err.println("can't open file " + data_filename);
System.exit(1);
}
/* assumption: min index of attributes is 1 */
/* pass 1: find out max index of attributes */
max_index = 0;
if(restore_filename != null)
{
int idx, c;
try {
fp_restore = new BufferedReader(new FileReader(restore_filename));
}
catch (Exception e) {
System.err.println("can't open file " + restore_filename);
System.exit(1);
}
if((c = fp_restore.read()) == 'y')
{
fp_restore.readLine();
fp_restore.readLine();
fp_restore.readLine();
}
fp_restore.readLine();
fp_restore.readLine();
String restore_line = null;
while((restore_line = fp_restore.readLine())!=null)
{
StringTokenizer st2 = new StringTokenizer(restore_line);
idx = Integer.parseInt(st2.nextToken());
max_index = Math.max(max_index, idx);
}
fp_restore = rewind(fp_restore, restore_filename);
}
while (readline(fp) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
st.nextToken();
while(st.hasMoreTokens())
{
index = Integer.parseInt(st.nextToken());
max_index = Math.max(max_index, index);
st.nextToken();
num_nonzeros++;
}
}
try {
feature_max = new double[(max_index+1)];
feature_min = new double[(max_index+1)];
} catch(OutOfMemoryError e) {
System.err.println("can't allocate enough memory");
System.exit(1);
}
for(i=0;i<=max_index;i++)
{
feature_max[i] = -Double.MAX_VALUE;
feature_min[i] = Double.MAX_VALUE;
}
fp = rewind(fp, data_filename);
/* pass 2: find out min/max value */
while(readline(fp) != null)
{
int next_index = 1;
double target;
double value;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
target = Double.parseDouble(st.nextToken());
y_max = Math.max(y_max, target);
y_min = Math.min(y_min, target);
while (st.hasMoreTokens())
{
index = Integer.parseInt(st.nextToken());
value = Double.parseDouble(st.nextToken());
for (i = next_index; i<index; i++)
{
feature_max[i] = Math.max(feature_max[i], 0);
feature_min[i] = Math.min(feature_min[i], 0);
}
feature_max[index] = Math.max(feature_max[index], value);
feature_min[index] = Math.min(feature_min[index], value);
next_index = index + 1;
}
for(i=next_index;i<=max_index;i++)
{
feature_max[i] = Math.max(feature_max[i], 0);
feature_min[i] = Math.min(feature_min[i], 0);
}
}
fp = rewind(fp, data_filename);
/* pass 2.5: save/restore feature_min/feature_max */
if(restore_filename != null)
{
// fp_restore rewinded in finding max_index
int idx, c;
double fmin, fmax;
fp_restore.mark(2); // for reset
if((c = fp_restore.read()) == 'y')
{
fp_restore.readLine(); // pass the '\n' after 'y'
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
y_lower = Double.parseDouble(st.nextToken());
y_upper = Double.parseDouble(st.nextToken());
st = new StringTokenizer(fp_restore.readLine());
y_min = Double.parseDouble(st.nextToken());
y_max = Double.parseDouble(st.nextToken());
y_scaling = true;
}
else
fp_restore.reset();
if(fp_restore.read() == 'x') {
fp_restore.readLine(); // pass the '\n' after 'x'
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
lower = Double.parseDouble(st.nextToken());
upper = Double.parseDouble(st.nextToken());
String restore_line = null;
while((restore_line = fp_restore.readLine())!=null)
{
StringTokenizer st2 = new StringTokenizer(restore_line);
idx = Integer.parseInt(st2.nextToken());
fmin = Double.parseDouble(st2.nextToken());
fmax = Double.parseDouble(st2.nextToken());
if (idx <= max_index)
{
feature_min[idx] = fmin;
feature_max[idx] = fmax;
}
}
}
fp_restore.close();
}
if(save_filename != null)
{
Formatter formatter = new Formatter(new StringBuilder());
BufferedWriter fp_save = null;
try {
fp_save = new BufferedWriter(new FileWriter(save_filename));
} catch(IOException e) {
System.err.println("can't open file " + save_filename);
System.exit(1);
}
if(y_scaling)
{
formatter.format("y\n");
formatter.format("%.16g %.16g\n", y_lower, y_upper);
formatter.format("%.16g %.16g\n", y_min, y_max);
}
formatter.format("x\n");
formatter.format("%.16g %.16g\n", lower, upper);
for(i=1;i<=max_index;i++)
{
if(feature_min[i] != feature_max[i])
formatter.format("%d %.16g %.16g\n", i, feature_min[i], feature_max[i]);
}
fp_save.write(formatter.toString());
fp_save.close();
}
/* pass 3: scale */
while(readline(fp) != null)
{
int next_index = 1;
double target;
double value;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
target = Double.parseDouble(st.nextToken());
output_target(target);
while(st.hasMoreElements())
{
index = Integer.parseInt(st.nextToken());
value = Double.parseDouble(st.nextToken());
for (i = next_index; i<index; i++)
output(i, 0);
output(index, value);
next_index = index + 1;
}
for(i=next_index;i<= max_index;i++)
output(i, 0);
System.out.print("\n");
}
if (new_num_nonzeros > num_nonzeros)
System.err.print(
"WARNING: original #nonzeros " + num_nonzeros+"\n"
+" new #nonzeros " + new_num_nonzeros+"\n"
+"Use -l 0 if many original feature values are zeros\n");
fp.close();
}
public static void main(String argv[]) throws IOException
{
svm_scale s = new svm_scale();
s.run(argv);
}
}
| 8,944 | 24.48433 | 85 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/svm_toy.java | import libsvm.*;
import java.applet.*;
import java.awt.*;
import java.util.*;
import java.awt.event.*;
import java.io.*;
public class svm_toy extends Applet {
static final String DEFAULT_PARAM="-t 2 -c 100";
int XLEN;
int YLEN;
// off-screen buffer
Image buffer;
Graphics buffer_gc;
// pre-allocated colors
final static Color colors[] =
{
new Color(0,0,0),
new Color(0,120,120),
new Color(120,120,0),
new Color(120,0,120),
new Color(0,200,200),
new Color(200,200,0),
new Color(200,0,200)
};
class point {
point(double x, double y, byte value)
{
this.x = x;
this.y = y;
this.value = value;
}
double x, y;
byte value;
}
Vector<point> point_list = new Vector<point>();
byte current_value = 1;
public void init()
{
setSize(getSize());
final Button button_change = new Button("Change");
Button button_run = new Button("Run");
Button button_clear = new Button("Clear");
Button button_save = new Button("Save");
Button button_load = new Button("Load");
final TextField input_line = new TextField(DEFAULT_PARAM);
BorderLayout layout = new BorderLayout();
this.setLayout(layout);
Panel p = new Panel();
GridBagLayout gridbag = new GridBagLayout();
p.setLayout(gridbag);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.weightx = 1;
c.gridwidth = 1;
gridbag.setConstraints(button_change,c);
gridbag.setConstraints(button_run,c);
gridbag.setConstraints(button_clear,c);
gridbag.setConstraints(button_save,c);
gridbag.setConstraints(button_load,c);
c.weightx = 5;
c.gridwidth = 5;
gridbag.setConstraints(input_line,c);
button_change.setBackground(colors[current_value]);
p.add(button_change);
p.add(button_run);
p.add(button_clear);
p.add(button_save);
p.add(button_load);
p.add(input_line);
this.add(p,BorderLayout.SOUTH);
button_change.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_change_clicked(); button_change.setBackground(colors[current_value]); }});
button_run.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
button_clear.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_clear_clicked(); }});
button_save.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_save_clicked(input_line.getText()); }});
button_load.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_load_clicked(); }});
input_line.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
this.enableEvents(AWTEvent.MOUSE_EVENT_MASK);
}
void draw_point(point p)
{
Color c = colors[p.value+3];
Graphics window_gc = getGraphics();
buffer_gc.setColor(c);
buffer_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
window_gc.setColor(c);
window_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
}
void clear_all()
{
point_list.removeAllElements();
if(buffer != null)
{
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
repaint();
}
void draw_all_points()
{
int n = point_list.size();
for(int i=0;i<n;i++)
draw_point(point_list.elementAt(i));
}
void button_change_clicked()
{
++current_value;
if(current_value > 3) current_value = 1;
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
void button_run_clicked(String args)
{
// guard
if(point_list.isEmpty()) return;
svm_parameter param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0;
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 40;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
// parse options
StringTokenizer st = new StringTokenizer(args);
String[] argv = new String[st.countTokens()];
for(int i=0;i<argv.length;i++)
argv[i] = st.nextToken();
for(int i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
{
System.err.print("unknown option\n");
break;
}
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("unknown option\n");
}
}
// build problem
svm_problem prob = new svm_problem();
prob.l = point_list.size();
prob.y = new double[prob.l];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
{
}
else if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
if(param.gamma == 0) param.gamma = 1;
prob.x = new svm_node[prob.l][1];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.y[i] = p.y;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[1];
x[0] = new svm_node();
x[0].index = 1;
int[] j = new int[XLEN];
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
{
x[0].value = (double) i / XLEN;
j[i] = (int)(YLEN*svm.svm_predict(model, x));
}
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(0,0,0,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(0,0,0,YLEN-1);
int p = (int)(param.p * YLEN);
for(int i=1;i<XLEN;i++)
{
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(i,0,i,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(i,0,i,YLEN-1);
buffer_gc.setColor(colors[5]);
window_gc.setColor(colors[5]);
buffer_gc.drawLine(i-1,j[i-1],i,j[i]);
window_gc.drawLine(i-1,j[i-1],i,j[i]);
if(param.svm_type == svm_parameter.EPSILON_SVR)
{
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
window_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
window_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
}
}
}
else
{
if(param.gamma == 0) param.gamma = 0.5;
prob.x = new svm_node [prob.l][2];
for(int i=0;i<prob.l;i++)
{
point p = point_list.elementAt(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.x[i][1] = new svm_node();
prob.x[i][1].index = 2;
prob.x[i][1].value = p.y;
prob.y[i] = p.value;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[2];
x[0] = new svm_node();
x[1] = new svm_node();
x[0].index = 1;
x[1].index = 2;
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
for (int j = 0; j < YLEN ; j++) {
x[0].value = (double) i / XLEN;
x[1].value = (double) j / YLEN;
double d = svm.svm_predict(model, x);
if (param.svm_type == svm_parameter.ONE_CLASS && d<0) d=2;
buffer_gc.setColor(colors[(int)d]);
window_gc.setColor(colors[(int)d]);
buffer_gc.drawLine(i,j,i,j);
window_gc.drawLine(i,j,i,j);
}
}
draw_all_points();
}
void button_clear_clicked()
{
clear_all();
}
void button_save_clicked(String args)
{
FileDialog dialog = new FileDialog(new Frame(),"Save",FileDialog.SAVE);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
try {
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(filename)));
int svm_type = svm_parameter.C_SVC;
int svm_type_idx = args.indexOf("-s ");
if(svm_type_idx != -1)
{
StringTokenizer svm_str_st = new StringTokenizer(args.substring(svm_type_idx+2).trim());
svm_type = atoi(svm_str_st.nextToken());
}
int n = point_list.size();
if(svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.y+" 1:"+p.x+"\n");
}
}
else
{
for(int i=0;i<n;i++)
{
point p = point_list.elementAt(i);
fp.writeBytes(p.value+" 1:"+p.x+" 2:"+p.y+"\n");
}
}
fp.close();
} catch (IOException e) { System.err.print(e); }
}
void button_load_clicked()
{
FileDialog dialog = new FileDialog(new Frame(),"Load",FileDialog.LOAD);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
clear_all();
try {
BufferedReader fp = new BufferedReader(new FileReader(filename));
String line;
while((line = fp.readLine()) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
if(st.countTokens() == 5)
{
byte value = (byte)atoi(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
st.nextToken();
double y = atof(st.nextToken());
point_list.addElement(new point(x,y,value));
}
else if(st.countTokens() == 3)
{
double y = atof(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
point_list.addElement(new point(x,y,current_value));
}else
break;
}
fp.close();
} catch (IOException e) { System.err.print(e); }
draw_all_points();
}
protected void processMouseEvent(MouseEvent e)
{
if(e.getID() == MouseEvent.MOUSE_PRESSED)
{
if(e.getX() >= XLEN || e.getY() >= YLEN) return;
point p = new point((double)e.getX()/XLEN,
(double)e.getY()/YLEN,
current_value);
point_list.addElement(p);
draw_point(p);
}
}
public void paint(Graphics g)
{
// create buffer first time
if(buffer == null) {
buffer = this.createImage(XLEN,YLEN);
buffer_gc = buffer.getGraphics();
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
g.drawImage(buffer,0,0,this);
}
public Dimension getPreferredSize() { return new Dimension(XLEN,YLEN+50); }
public void setSize(Dimension d) { setSize(d.width,d.height); }
public void setSize(int w,int h) {
super.setSize(w,h);
XLEN = w;
YLEN = h-50;
clear_all();
}
public static void main(String[] argv)
{
new AppletFrame("svm_toy",new svm_toy(),500,500+50);
}
}
class AppletFrame extends Frame {
AppletFrame(String title, Applet applet, int width, int height)
{
super(title);
this.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
applet.init();
applet.setSize(width,height);
applet.start();
this.add(applet);
this.pack();
this.setVisible(true);
}
}
| 12,269 | 23.393638 | 104 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/svm_train.java | import libsvm.*;
import java.io.*;
import java.util.*;
class svm_train {
private svm_parameter param; // set by parse_command_line
private svm_problem prob; // set by read_problem
private svm_model model;
private String input_file_name; // set by parse_command_line
private String model_file_name; // set by parse_command_line
private String error_msg;
private int cross_validation;
private int nr_fold;
private static svm_print_interface svm_print_null = new svm_print_interface()
{
public void print(String s) {}
};
private static void exit_with_help()
{
System.out.print(
"Usage: svm_train [options] training_set_file [model_file]\n"
+"options:\n"
+"-s svm_type : set type of SVM (default 0)\n"
+" 0 -- C-SVC (multi-class classification)\n"
+" 1 -- nu-SVC (multi-class classification)\n"
+" 2 -- one-class SVM\n"
+" 3 -- epsilon-SVR (regression)\n"
+" 4 -- nu-SVR (regression)\n"
+"-t kernel_type : set type of kernel function (default 2)\n"
+" 0 -- linear: u'*v\n"
+" 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
+" 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
+" 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
+" 4 -- precomputed kernel (kernel values in training_set_file)\n"
+"-d degree : set degree in kernel function (default 3)\n"
+"-g gamma : set gamma in kernel function (default 1/num_features)\n"
+"-r coef0 : set coef0 in kernel function (default 0)\n"
+"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
+"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
+"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
+"-m cachesize : set cache memory size in MB (default 100)\n"
+"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
+"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
+"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
+"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
+"-v n : n-fold cross validation mode\n"
+"-q : quiet mode (no outputs)\n"
);
System.exit(1);
}
private void do_cross_validation()
{
int i;
int total_correct = 0;
double total_error = 0;
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
double[] target = new double[prob.l];
svm.svm_cross_validation(prob,param,nr_fold,target);
if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
for(i=0;i<prob.l;i++)
{
double y = prob.y[i];
double v = target[i];
total_error += (v-y)*(v-y);
sumv += v;
sumy += y;
sumvv += v*v;
sumyy += y*y;
sumvy += v*y;
}
System.out.print("Cross Validation Mean squared error = "+total_error/prob.l+"\n");
System.out.print("Cross Validation Squared correlation coefficient = "+
((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))+"\n"
);
}
else
{
for(i=0;i<prob.l;i++)
if(target[i] == prob.y[i])
++total_correct;
System.out.print("Cross Validation Accuracy = "+100.0*total_correct/prob.l+"%\n");
}
}
private void run(String argv[]) throws IOException
{
parse_command_line(argv);
read_problem();
error_msg = svm.svm_check_parameter(prob,param);
if(error_msg != null)
{
System.err.print("ERROR: "+error_msg+"\n");
System.exit(1);
}
if(cross_validation != 0)
{
do_cross_validation();
}
else
{
model = svm.svm_train(prob,param);
svm.svm_save_model(model_file_name,model);
}
}
public static void main(String argv[]) throws IOException
{
svm_train t = new svm_train();
t.run(argv);
}
private static double atof(String s)
{
double d = Double.valueOf(s).doubleValue();
if (Double.isNaN(d) || Double.isInfinite(d))
{
System.err.print("NaN or Infinity in input\n");
System.exit(1);
}
return(d);
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
private void parse_command_line(String argv[])
{
int i;
svm_print_interface print_func = null; // default printing to stdout
param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0; // 1/num_features
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 100;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
cross_validation = 0;
// parse options
for(i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
exit_with_help();
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'q':
print_func = svm_print_null;
i--;
break;
case 'v':
cross_validation = 1;
nr_fold = atoi(argv[i]);
if(nr_fold < 2)
{
System.err.print("n-fold cross validation: n must >= 2\n");
exit_with_help();
}
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("Unknown option: " + argv[i-1] + "\n");
exit_with_help();
}
}
svm.svm_set_print_string_function(print_func);
// determine filenames
if(i>=argv.length)
exit_with_help();
input_file_name = argv[i];
if(i<argv.length-1)
model_file_name = argv[i+1];
else
{
int p = argv[i].lastIndexOf('/');
++p; // whew...
model_file_name = argv[i].substring(p)+".model";
}
}
// read in a problem (in svmlight format)
private void read_problem() throws IOException
{
BufferedReader fp = new BufferedReader(new FileReader(input_file_name));
Vector<Double> vy = new Vector<Double>();
Vector<svm_node[]> vx = new Vector<svm_node[]>();
int max_index = 0;
while(true)
{
String line = fp.readLine();
if(line == null) break;
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
vy.addElement(atof(st.nextToken()));
int m = st.countTokens()/2;
svm_node[] x = new svm_node[m];
for(int j=0;j<m;j++)
{
x[j] = new svm_node();
x[j].index = atoi(st.nextToken());
x[j].value = atof(st.nextToken());
}
if(m>0) max_index = Math.max(max_index, x[m-1].index);
vx.addElement(x);
}
prob = new svm_problem();
prob.l = vy.size();
prob.x = new svm_node[prob.l][];
for(int i=0;i<prob.l;i++)
prob.x[i] = vx.elementAt(i);
prob.y = new double[prob.l];
for(int i=0;i<prob.l;i++)
prob.y[i] = vy.elementAt(i);
if(param.gamma == 0 && max_index > 0)
param.gamma = 1.0/max_index;
if(param.kernel_type == svm_parameter.PRECOMPUTED)
for(int i=0;i<prob.l;i++)
{
if (prob.x[i][0].index != 0)
{
System.err.print("Wrong kernel matrix: first column must be 0:sample_serial_number\n");
System.exit(1);
}
if ((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > max_index)
{
System.err.print("Wrong input format: sample_serial_number out of range\n");
System.exit(1);
}
}
fp.close();
}
}
| 8,355 | 25.194357 | 115 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm.java |
package libsvm;
import java.io.*;
import java.util.*;
//
// Kernel Cache
//
// l is the number of total data items
// size is the cache size limit in bytes
//
class Cache {
private final int l;
private long size;
private final class head_t
{
head_t prev, next; // a cicular list
float[] data;
int len; // data[0,len) is cached in this entry
}
private final head_t[] head;
private head_t lru_head;
Cache(int l_, long size_)
{
l = l_;
size = size_;
head = new head_t[l];
for(int i=0;i<l;i++) head[i] = new head_t();
size /= 4;
size -= l * (16/4); // sizeof(head_t) == 16
size = Math.max(size, 2* (long) l); // cache must be large enough for two columns
lru_head = new head_t();
lru_head.next = lru_head.prev = lru_head;
}
private void lru_delete(head_t h)
{
// delete from current location
h.prev.next = h.next;
h.next.prev = h.prev;
}
private void lru_insert(head_t h)
{
// insert to last position
h.next = lru_head;
h.prev = lru_head.prev;
h.prev.next = h;
h.next.prev = h;
}
// request data [0,len)
// return some position p where [p,len) need to be filled
// (p >= len if nothing needs to be filled)
// java: simulate pointer using single-element array
int get_data(int index, float[][] data, int len)
{
head_t h = head[index];
if(h.len > 0) lru_delete(h);
int more = len - h.len;
if(more > 0)
{
// free old space
while(size < more)
{
head_t old = lru_head.next;
lru_delete(old);
size += old.len;
old.data = null;
old.len = 0;
}
// allocate new space
float[] new_data = new float[len];
if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
h.data = new_data;
size -= more;
do {int _=h.len; h.len=len; len=_;} while(false);
}
lru_insert(h);
data[0] = h.data;
return len;
}
void swap_index(int i, int j)
{
if(i==j) return;
if(head[i].len > 0) lru_delete(head[i]);
if(head[j].len > 0) lru_delete(head[j]);
do {float[] _=head[i].data; head[i].data=head[j].data; head[j].data=_;} while(false);
do {int _=head[i].len; head[i].len=head[j].len; head[j].len=_;} while(false);
if(head[i].len > 0) lru_insert(head[i]);
if(head[j].len > 0) lru_insert(head[j]);
if(i>j) do {int _=i; i=j; j=_;} while(false);
for(head_t h = lru_head.next; h!=lru_head; h=h.next)
{
if(h.len > i)
{
if(h.len > j)
do {float _=h.data[i]; h.data[i]=h.data[j]; h.data[j]=_;} while(false);
else
{
// give up
lru_delete(h);
size += h.len;
h.data = null;
h.len = 0;
}
}
}
}
}
//
// Kernel evaluation
//
// the static method k_function is for doing single kernel evaluation
// the constructor of Kernel prepares to calculate the l*l kernel matrix
// the member function get_Q is for getting one column from the Q Matrix
//
abstract class QMatrix {
abstract float[] get_Q(int column, int len);
abstract double[] get_QD();
abstract void swap_index(int i, int j);
};
abstract class Kernel extends QMatrix {
private svm_node[][] x;
private final double[] x_square;
// svm_parameter
private final int kernel_type;
private final int degree;
private final double gamma;
private final double coef0;
abstract float[] get_Q(int column, int len);
abstract double[] get_QD();
void swap_index(int i, int j)
{
do {svm_node[] _=x[i]; x[i]=x[j]; x[j]=_;} while(false);
if(x_square != null) do {double _=x_square[i]; x_square[i]=x_square[j]; x_square[j]=_;} while(false);
}
private static double powi(double base, int times)
{
double tmp = base, ret = 1.0;
for(int t=times; t>0; t/=2)
{
if(t%2==1) ret*=tmp;
tmp = tmp * tmp;
}
return ret;
}
double kernel_function(int i, int j)
{
switch(kernel_type)
{
case svm_parameter.LINEAR:
return dot(x[i],x[j]);
case svm_parameter.POLY:
return powi(gamma*dot(x[i],x[j])+coef0,degree);
case svm_parameter.RBF:
return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
case svm_parameter.SIGMOID:
return Math.tanh(gamma*dot(x[i],x[j])+coef0);
case svm_parameter.PRECOMPUTED:
return x[i][(int)(x[j][0].value)].value;
default:
return 0; // java
}
}
Kernel(int l, svm_node[][] x_, svm_parameter param)
{
this.kernel_type = param.kernel_type;
this.degree = param.degree;
this.gamma = param.gamma;
this.coef0 = param.coef0;
x = (svm_node[][])x_.clone();
if(kernel_type == svm_parameter.RBF)
{
x_square = new double[l];
for(int i=0;i<l;i++)
x_square[i] = dot(x[i],x[i]);
}
else x_square = null;
}
static double dot(svm_node[] x, svm_node[] y)
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
sum += x[i++].value * y[j++].value;
else
{
if(x[i].index > y[j].index)
++j;
else
++i;
}
}
return sum;
}
static double k_function(svm_node[] x, svm_node[] y,
svm_parameter param)
{
switch(param.kernel_type)
{
case svm_parameter.LINEAR:
return dot(x,y);
case svm_parameter.POLY:
return powi(param.gamma*dot(x,y)+param.coef0,param.degree);
case svm_parameter.RBF:
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
{
double d = x[i++].value - y[j++].value;
sum += d*d;
}
else if(x[i].index > y[j].index)
{
sum += y[j].value * y[j].value;
++j;
}
else
{
sum += x[i].value * x[i].value;
++i;
}
}
while(i < xlen)
{
sum += x[i].value * x[i].value;
++i;
}
while(j < ylen)
{
sum += y[j].value * y[j].value;
++j;
}
return Math.exp(-param.gamma*sum);
}
case svm_parameter.SIGMOID:
return Math.tanh(param.gamma*dot(x,y)+param.coef0);
case svm_parameter.PRECOMPUTED:
return x[(int)(y[0].value)].value;
default:
return 0; // java
}
}
}
// An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918
// Solves:
//
// min 0.5(\alpha^T Q \alpha) + p^T \alpha
//
// y^T \alpha = \delta
// y_i = +1 or -1
// 0 <= alpha_i <= Cp for y_i = 1
// 0 <= alpha_i <= Cn for y_i = -1
//
// Given:
//
// Q, p, y, Cp, Cn, and an initial feasible point \alpha
// l is the size of vectors and matrices
// eps is the stopping tolerance
//
// solution will be put in \alpha, objective value will be put in obj
//
class Solver {
int active_size;
byte[] y;
double[] G; // gradient of objective function
static final byte LOWER_BOUND = 0;
static final byte UPPER_BOUND = 1;
static final byte FREE = 2;
byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
double[] alpha;
QMatrix Q;
double[] QD;
double eps;
double Cp,Cn;
double[] p;
int[] active_set;
double[] G_bar; // gradient, if we treat free variables as 0
int l;
boolean unshrink; // XXX
static final double INF = java.lang.Double.POSITIVE_INFINITY;
double get_C(int i)
{
return (y[i] > 0)? Cp : Cn;
}
void update_alpha_status(int i)
{
if(alpha[i] >= get_C(i))
alpha_status[i] = UPPER_BOUND;
else if(alpha[i] <= 0)
alpha_status[i] = LOWER_BOUND;
else alpha_status[i] = FREE;
}
boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
boolean is_free(int i) { return alpha_status[i] == FREE; }
// java: information about solution except alpha,
// because we cannot return multiple values otherwise...
static class SolutionInfo {
double obj;
double rho;
double upper_bound_p;
double upper_bound_n;
double r; // for Solver_NU
}
void swap_index(int i, int j)
{
Q.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
do {double _=G[i]; G[i]=G[j]; G[j]=_;} while(false);
do {byte _=alpha_status[i]; alpha_status[i]=alpha_status[j]; alpha_status[j]=_;} while(false);
do {double _=alpha[i]; alpha[i]=alpha[j]; alpha[j]=_;} while(false);
do {double _=p[i]; p[i]=p[j]; p[j]=_;} while(false);
do {int _=active_set[i]; active_set[i]=active_set[j]; active_set[j]=_;} while(false);
do {double _=G_bar[i]; G_bar[i]=G_bar[j]; G_bar[j]=_;} while(false);
}
void reconstruct_gradient()
{
// reconstruct inactive elements of G from G_bar and free variables
if(active_size == l) return;
int i,j;
int nr_free = 0;
for(j=active_size;j<l;j++)
G[j] = G_bar[j] + p[j];
for(j=0;j<active_size;j++)
if(is_free(j))
nr_free++;
if(2*nr_free < active_size)
svm.info("\nWARNING: using -h 0 may be faster\n");
if (nr_free*l > 2*active_size*(l-active_size))
{
for(i=active_size;i<l;i++)
{
float[] Q_i = Q.get_Q(i,active_size);
for(j=0;j<active_size;j++)
if(is_free(j))
G[i] += alpha[j] * Q_i[j];
}
}
else
{
for(i=0;i<active_size;i++)
if(is_free(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
for(j=active_size;j<l;j++)
G[j] += alpha_i * Q_i[j];
}
}
}
void Solve(int l, QMatrix Q, double[] p_, byte[] y_,
double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
{
this.l = l;
this.Q = Q;
QD = Q.get_QD();
p = (double[])p_.clone();
y = (byte[])y_.clone();
alpha = (double[])alpha_.clone();
this.Cp = Cp;
this.Cn = Cn;
this.eps = eps;
this.unshrink = false;
// initialize alpha_status
{
alpha_status = new byte[l];
for(int i=0;i<l;i++)
update_alpha_status(i);
}
// initialize active set (for shrinking)
{
active_set = new int[l];
for(int i=0;i<l;i++)
active_set[i] = i;
active_size = l;
}
// initialize gradient
{
G = new double[l];
G_bar = new double[l];
int i;
for(i=0;i<l;i++)
{
G[i] = p[i];
G_bar[i] = 0;
}
for(i=0;i<l;i++)
if(!is_lower_bound(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
int j;
for(j=0;j<l;j++)
G[j] += alpha_i*Q_i[j];
if(is_upper_bound(i))
for(j=0;j<l;j++)
G_bar[j] += get_C(i) * Q_i[j];
}
}
// optimization step
int iter = 0;
int max_iter = Math.max(10000000, l>Integer.MAX_VALUE/100 ? Integer.MAX_VALUE : 100*l);
int counter = Math.min(l,1000)+1;
int[] working_set = new int[2];
while(iter < max_iter)
{
// show progress and do shrinking
if(--counter == 0)
{
counter = Math.min(l,1000);
if(shrinking!=0) do_shrinking();
svm.info(".");
}
if(select_working_set(working_set)!=0)
{
// reconstruct the whole gradient
reconstruct_gradient();
// reset active set size and check
active_size = l;
svm.info("*");
if(select_working_set(working_set)!=0)
break;
else
counter = 1; // do shrinking next iteration
}
int i = working_set[0];
int j = working_set[1];
++iter;
// update alpha[i] and alpha[j], handle bounds carefully
float[] Q_i = Q.get_Q(i,active_size);
float[] Q_j = Q.get_Q(j,active_size);
double C_i = get_C(i);
double C_j = get_C(j);
double old_alpha_i = alpha[i];
double old_alpha_j = alpha[j];
if(y[i]!=y[j])
{
double quad_coef = QD[i]+QD[j]+2*Q_i[j];
if (quad_coef <= 0)
quad_coef = 1e-12;
double delta = (-G[i]-G[j])/quad_coef;
double diff = alpha[i] - alpha[j];
alpha[i] += delta;
alpha[j] += delta;
if(diff > 0)
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = diff;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = -diff;
}
}
if(diff > C_i - C_j)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = C_i - diff;
}
}
else
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = C_j + diff;
}
}
}
else
{
double quad_coef = QD[i]+QD[j]-2*Q_i[j];
if (quad_coef <= 0)
quad_coef = 1e-12;
double delta = (G[i]-G[j])/quad_coef;
double sum = alpha[i] + alpha[j];
alpha[i] -= delta;
alpha[j] += delta;
if(sum > C_i)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = sum - C_i;
}
}
else
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = sum;
}
}
if(sum > C_j)
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = sum - C_j;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = sum;
}
}
}
// update G
double delta_alpha_i = alpha[i] - old_alpha_i;
double delta_alpha_j = alpha[j] - old_alpha_j;
for(int k=0;k<active_size;k++)
{
G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
}
// update alpha_status and G_bar
{
boolean ui = is_upper_bound(i);
boolean uj = is_upper_bound(j);
update_alpha_status(i);
update_alpha_status(j);
int k;
if(ui != is_upper_bound(i))
{
Q_i = Q.get_Q(i,l);
if(ui)
for(k=0;k<l;k++)
G_bar[k] -= C_i * Q_i[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_i * Q_i[k];
}
if(uj != is_upper_bound(j))
{
Q_j = Q.get_Q(j,l);
if(uj)
for(k=0;k<l;k++)
G_bar[k] -= C_j * Q_j[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_j * Q_j[k];
}
}
}
if(iter >= max_iter)
{
if(active_size < l)
{
// reconstruct the whole gradient to calculate objective value
reconstruct_gradient();
active_size = l;
svm.info("*");
}
System.err.print("\nWARNING: reaching max number of iterations\n");
}
// calculate rho
si.rho = calculate_rho();
// calculate objective value
{
double v = 0;
int i;
for(i=0;i<l;i++)
v += alpha[i] * (G[i] + p[i]);
si.obj = v/2;
}
// put back the solution
{
for(int i=0;i<l;i++)
alpha_[active_set[i]] = alpha[i];
}
si.upper_bound_p = Cp;
si.upper_bound_n = Cn;
svm.info("\noptimization finished, #iter = "+iter+"\n");
}
// return 1 if already optimal, return 0 otherwise
int select_working_set(int[] working_set)
{
// return i,j such that
// i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
// j: mimimizes the decrease of obj value
// (if quadratic coefficeint <= 0, replace it with tau)
// -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
double Gmax = -INF;
double Gmax2 = -INF;
int Gmax_idx = -1;
int Gmin_idx = -1;
double obj_diff_min = INF;
for(int t=0;t<active_size;t++)
if(y[t]==+1)
{
if(!is_upper_bound(t))
if(-G[t] >= Gmax)
{
Gmax = -G[t];
Gmax_idx = t;
}
}
else
{
if(!is_lower_bound(t))
if(G[t] >= Gmax)
{
Gmax = G[t];
Gmax_idx = t;
}
}
int i = Gmax_idx;
float[] Q_i = null;
if(i != -1) // null Q_i not accessed: Gmax=-INF if i=-1
Q_i = Q.get_Q(i,active_size);
for(int j=0;j<active_size;j++)
{
if(y[j]==+1)
{
if (!is_lower_bound(j))
{
double grad_diff=Gmax+G[j];
if (G[j] >= Gmax2)
Gmax2 = G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
else
{
if (!is_upper_bound(j))
{
double grad_diff= Gmax-G[j];
if (-G[j] >= Gmax2)
Gmax2 = -G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
}
if(Gmax+Gmax2 < eps || Gmin_idx == -1)
return 1;
working_set[0] = Gmax_idx;
working_set[1] = Gmin_idx;
return 0;
}
private boolean be_shrunk(int i, double Gmax1, double Gmax2)
{
if(is_upper_bound(i))
{
if(y[i]==+1)
return(-G[i] > Gmax1);
else
return(-G[i] > Gmax2);
}
else if(is_lower_bound(i))
{
if(y[i]==+1)
return(G[i] > Gmax2);
else
return(G[i] > Gmax1);
}
else
return(false);
}
void do_shrinking()
{
int i;
double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) }
double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) }
// find maximal violating pair first
for(i=0;i<active_size;i++)
{
if(y[i]==+1)
{
if(!is_upper_bound(i))
{
if(-G[i] >= Gmax1)
Gmax1 = -G[i];
}
if(!is_lower_bound(i))
{
if(G[i] >= Gmax2)
Gmax2 = G[i];
}
}
else
{
if(!is_upper_bound(i))
{
if(-G[i] >= Gmax2)
Gmax2 = -G[i];
}
if(!is_lower_bound(i))
{
if(G[i] >= Gmax1)
Gmax1 = G[i];
}
}
}
if(unshrink == false && Gmax1 + Gmax2 <= eps*10)
{
unshrink = true;
reconstruct_gradient();
active_size = l;
}
for(i=0;i<active_size;i++)
if (be_shrunk(i, Gmax1, Gmax2))
{
active_size--;
while (active_size > i)
{
if (!be_shrunk(active_size, Gmax1, Gmax2))
{
swap_index(i,active_size);
break;
}
active_size--;
}
}
}
double calculate_rho()
{
double r;
int nr_free = 0;
double ub = INF, lb = -INF, sum_free = 0;
for(int i=0;i<active_size;i++)
{
double yG = y[i]*G[i];
if(is_lower_bound(i))
{
if(y[i] > 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else if(is_upper_bound(i))
{
if(y[i] < 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else
{
++nr_free;
sum_free += yG;
}
}
if(nr_free>0)
r = sum_free/nr_free;
else
r = (ub+lb)/2;
return r;
}
}
//
// Solver for nu-svm classification and regression
//
// additional constraint: e^T \alpha = constant
//
final class Solver_NU extends Solver
{
private SolutionInfo si;
void Solve(int l, QMatrix Q, double[] p, byte[] y,
double[] alpha, double Cp, double Cn, double eps,
SolutionInfo si, int shrinking)
{
this.si = si;
super.Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking);
}
// return 1 if already optimal, return 0 otherwise
int select_working_set(int[] working_set)
{
// return i,j such that y_i = y_j and
// i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
// j: minimizes the decrease of obj value
// (if quadratic coefficeint <= 0, replace it with tau)
// -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
double Gmaxp = -INF;
double Gmaxp2 = -INF;
int Gmaxp_idx = -1;
double Gmaxn = -INF;
double Gmaxn2 = -INF;
int Gmaxn_idx = -1;
int Gmin_idx = -1;
double obj_diff_min = INF;
for(int t=0;t<active_size;t++)
if(y[t]==+1)
{
if(!is_upper_bound(t))
if(-G[t] >= Gmaxp)
{
Gmaxp = -G[t];
Gmaxp_idx = t;
}
}
else
{
if(!is_lower_bound(t))
if(G[t] >= Gmaxn)
{
Gmaxn = G[t];
Gmaxn_idx = t;
}
}
int ip = Gmaxp_idx;
int in = Gmaxn_idx;
float[] Q_ip = null;
float[] Q_in = null;
if(ip != -1) // null Q_ip not accessed: Gmaxp=-INF if ip=-1
Q_ip = Q.get_Q(ip,active_size);
if(in != -1)
Q_in = Q.get_Q(in,active_size);
for(int j=0;j<active_size;j++)
{
if(y[j]==+1)
{
if (!is_lower_bound(j))
{
double grad_diff=Gmaxp+G[j];
if (G[j] >= Gmaxp2)
Gmaxp2 = G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[ip]+QD[j]-2*Q_ip[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
else
{
if (!is_upper_bound(j))
{
double grad_diff=Gmaxn-G[j];
if (-G[j] >= Gmaxn2)
Gmaxn2 = -G[j];
if (grad_diff > 0)
{
double obj_diff;
double quad_coef = QD[in]+QD[j]-2*Q_in[j];
if (quad_coef > 0)
obj_diff = -(grad_diff*grad_diff)/quad_coef;
else
obj_diff = -(grad_diff*grad_diff)/1e-12;
if (obj_diff <= obj_diff_min)
{
Gmin_idx=j;
obj_diff_min = obj_diff;
}
}
}
}
}
if(Math.max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps || Gmin_idx == -1)
return 1;
if(y[Gmin_idx] == +1)
working_set[0] = Gmaxp_idx;
else
working_set[0] = Gmaxn_idx;
working_set[1] = Gmin_idx;
return 0;
}
private boolean be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4)
{
if(is_upper_bound(i))
{
if(y[i]==+1)
return(-G[i] > Gmax1);
else
return(-G[i] > Gmax4);
}
else if(is_lower_bound(i))
{
if(y[i]==+1)
return(G[i] > Gmax2);
else
return(G[i] > Gmax3);
}
else
return(false);
}
void do_shrinking()
{
double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) }
double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) }
double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) }
double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) }
// find maximal violating pair first
int i;
for(i=0;i<active_size;i++)
{
if(!is_upper_bound(i))
{
if(y[i]==+1)
{
if(-G[i] > Gmax1) Gmax1 = -G[i];
}
else if(-G[i] > Gmax4) Gmax4 = -G[i];
}
if(!is_lower_bound(i))
{
if(y[i]==+1)
{
if(G[i] > Gmax2) Gmax2 = G[i];
}
else if(G[i] > Gmax3) Gmax3 = G[i];
}
}
if(unshrink == false && Math.max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10)
{
unshrink = true;
reconstruct_gradient();
active_size = l;
}
for(i=0;i<active_size;i++)
if (be_shrunk(i, Gmax1, Gmax2, Gmax3, Gmax4))
{
active_size--;
while (active_size > i)
{
if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4))
{
swap_index(i,active_size);
break;
}
active_size--;
}
}
}
double calculate_rho()
{
int nr_free1 = 0,nr_free2 = 0;
double ub1 = INF, ub2 = INF;
double lb1 = -INF, lb2 = -INF;
double sum_free1 = 0, sum_free2 = 0;
for(int i=0;i<active_size;i++)
{
if(y[i]==+1)
{
if(is_lower_bound(i))
ub1 = Math.min(ub1,G[i]);
else if(is_upper_bound(i))
lb1 = Math.max(lb1,G[i]);
else
{
++nr_free1;
sum_free1 += G[i];
}
}
else
{
if(is_lower_bound(i))
ub2 = Math.min(ub2,G[i]);
else if(is_upper_bound(i))
lb2 = Math.max(lb2,G[i]);
else
{
++nr_free2;
sum_free2 += G[i];
}
}
}
double r1,r2;
if(nr_free1 > 0)
r1 = sum_free1/nr_free1;
else
r1 = (ub1+lb1)/2;
if(nr_free2 > 0)
r2 = sum_free2/nr_free2;
else
r2 = (ub2+lb2)/2;
si.r = (r1+r2)/2;
return (r1-r2)/2;
}
}
//
// Q matrices for various formulations
//
class SVC_Q extends Kernel
{
private final byte[] y;
private final Cache cache;
private final double[] QD;
SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
{
super(prob.l, prob.x, param);
y = (byte[])y_.clone();
cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
QD = new double[prob.l];
for(int i=0;i<prob.l;i++)
QD[i] = kernel_function(i,i);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start, j;
if((start = cache.get_data(i,data,len)) < len)
{
for(j=start;j<len;j++)
data[0][j] = (float)(y[i]*y[j]*kernel_function(i,j));
}
return data[0];
}
double[] get_QD()
{
return QD;
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
}
class ONE_CLASS_Q extends Kernel
{
private final Cache cache;
private final double[] QD;
ONE_CLASS_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
QD = new double[prob.l];
for(int i=0;i<prob.l;i++)
QD[i] = kernel_function(i,i);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start, j;
if((start = cache.get_data(i,data,len)) < len)
{
for(j=start;j<len;j++)
data[0][j] = (float)kernel_function(i,j);
}
return data[0];
}
double[] get_QD()
{
return QD;
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
}
class SVR_Q extends Kernel
{
private final int l;
private final Cache cache;
private final byte[] sign;
private final int[] index;
private int next_buffer;
private float[][] buffer;
private final double[] QD;
SVR_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
l = prob.l;
cache = new Cache(l,(long)(param.cache_size*(1<<20)));
QD = new double[2*l];
sign = new byte[2*l];
index = new int[2*l];
for(int k=0;k<l;k++)
{
sign[k] = 1;
sign[k+l] = -1;
index[k] = k;
index[k+l] = k;
QD[k] = kernel_function(k,k);
QD[k+l] = QD[k];
}
buffer = new float[2][2*l];
next_buffer = 0;
}
void swap_index(int i, int j)
{
do {byte _=sign[i]; sign[i]=sign[j]; sign[j]=_;} while(false);
do {int _=index[i]; index[i]=index[j]; index[j]=_;} while(false);
do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int j, real_i = index[i];
if(cache.get_data(real_i,data,l) < l)
{
for(j=0;j<l;j++)
data[0][j] = (float)kernel_function(real_i,j);
}
// reorder and copy
float buf[] = buffer[next_buffer];
next_buffer = 1 - next_buffer;
byte si = sign[i];
for(j=0;j<len;j++)
buf[j] = (float) si * sign[j] * data[0][index[j]];
return buf;
}
double[] get_QD()
{
return QD;
}
}
public class svm {
//
// construct and solve various formulations
//
public static final int LIBSVM_VERSION=321;
public static final Random rand = new Random();
private static svm_print_interface svm_print_stdout = new svm_print_interface()
{
public void print(String s)
{
System.out.print(s);
System.out.flush();
}
};
private static svm_print_interface svm_print_string = svm_print_stdout;
static void info(String s)
{
svm_print_string.print(s);
}
private static void solve_c_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si,
double Cp, double Cn)
{
int l = prob.l;
double[] minus_ones = new double[l];
byte[] y = new byte[l];
int i;
for(i=0;i<l;i++)
{
alpha[i] = 0;
minus_ones[i] = -1;
if(prob.y[i] > 0) y[i] = +1; else y[i] = -1;
}
Solver s = new Solver();
s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
alpha, Cp, Cn, param.eps, si, param.shrinking);
double sum_alpha=0;
for(i=0;i<l;i++)
sum_alpha += alpha[i];
if (Cp==Cn)
svm.info("nu = "+sum_alpha/(Cp*prob.l)+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i];
}
private static void solve_nu_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int i;
int l = prob.l;
double nu = param.nu;
byte[] y = new byte[l];
for(i=0;i<l;i++)
if(prob.y[i]>0)
y[i] = +1;
else
y[i] = -1;
double sum_pos = nu*l/2;
double sum_neg = nu*l/2;
for(i=0;i<l;i++)
if(y[i] == +1)
{
alpha[i] = Math.min(1.0,sum_pos);
sum_pos -= alpha[i];
}
else
{
alpha[i] = Math.min(1.0,sum_neg);
sum_neg -= alpha[i];
}
double[] zeros = new double[l];
for(i=0;i<l;i++)
zeros[i] = 0;
Solver_NU s = new Solver_NU();
s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
double r = si.r;
svm.info("C = "+1/r+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i]/r;
si.rho /= r;
si.obj /= (r*r);
si.upper_bound_p = 1/r;
si.upper_bound_n = 1/r;
}
private static void solve_one_class(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] zeros = new double[l];
byte[] ones = new byte[l];
int i;
int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
for(i=0;i<n;i++)
alpha[i] = 1;
if(n<prob.l)
alpha[n] = param.nu * prob.l - n;
for(i=n+1;i<l;i++)
alpha[i] = 0;
for(i=0;i<l;i++)
{
zeros[i] = 0;
ones[i] = 1;
}
Solver s = new Solver();
s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
}
private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
for(i=0;i<l;i++)
{
alpha2[i] = 0;
linear_term[i] = param.p - prob.y[i];
y[i] = 1;
alpha2[i+l] = 0;
linear_term[i+l] = param.p + prob.y[i];
y[i+l] = -1;
}
Solver s = new Solver();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, param.C, param.C, param.eps, si, param.shrinking);
double sum_alpha = 0;
for(i=0;i<l;i++)
{
alpha[i] = alpha2[i] - alpha2[i+l];
sum_alpha += Math.abs(alpha[i]);
}
svm.info("nu = "+sum_alpha/(param.C*l)+"\n");
}
private static void solve_nu_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double C = param.C;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
double sum = C * param.nu * l / 2;
for(i=0;i<l;i++)
{
alpha2[i] = alpha2[i+l] = Math.min(sum,C);
sum -= alpha2[i];
linear_term[i] = - prob.y[i];
y[i] = 1;
linear_term[i+l] = prob.y[i];
y[i+l] = -1;
}
Solver_NU s = new Solver_NU();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, C, C, param.eps, si, param.shrinking);
svm.info("epsilon = "+(-si.r)+"\n");
for(i=0;i<l;i++)
alpha[i] = alpha2[i] - alpha2[i+l];
}
//
// decision_function
//
static class decision_function
{
double[] alpha;
double rho;
};
static decision_function svm_train_one(
svm_problem prob, svm_parameter param,
double Cp, double Cn)
{
double[] alpha = new double[prob.l];
Solver.SolutionInfo si = new Solver.SolutionInfo();
switch(param.svm_type)
{
case svm_parameter.C_SVC:
solve_c_svc(prob,param,alpha,si,Cp,Cn);
break;
case svm_parameter.NU_SVC:
solve_nu_svc(prob,param,alpha,si);
break;
case svm_parameter.ONE_CLASS:
solve_one_class(prob,param,alpha,si);
break;
case svm_parameter.EPSILON_SVR:
solve_epsilon_svr(prob,param,alpha,si);
break;
case svm_parameter.NU_SVR:
solve_nu_svr(prob,param,alpha,si);
break;
}
svm.info("obj = "+si.obj+", rho = "+si.rho+"\n");
// output SVs
int nSV = 0;
int nBSV = 0;
for(int i=0;i<prob.l;i++)
{
if(Math.abs(alpha[i]) > 0)
{
++nSV;
if(prob.y[i] > 0)
{
if(Math.abs(alpha[i]) >= si.upper_bound_p)
++nBSV;
}
else
{
if(Math.abs(alpha[i]) >= si.upper_bound_n)
++nBSV;
}
}
}
svm.info("nSV = "+nSV+", nBSV = "+nBSV+"\n");
decision_function f = new decision_function();
f.alpha = alpha;
f.rho = si.rho;
return f;
}
// Platt's binary SVM Probablistic Output: an improvement from Lin et al.
private static void sigmoid_train(int l, double[] dec_values, double[] labels,
double[] probAB)
{
double A, B;
double prior1=0, prior0 = 0;
int i;
for (i=0;i<l;i++)
if (labels[i] > 0) prior1+=1;
else prior0+=1;
int max_iter=100; // Maximal number of iterations
double min_step=1e-10; // Minimal step taken in line search
double sigma=1e-12; // For numerically strict PD of Hessian
double eps=1e-5;
double hiTarget=(prior1+1.0)/(prior1+2.0);
double loTarget=1/(prior0+2.0);
double[] t= new double[l];
double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
double newA,newB,newf,d1,d2;
int iter;
// Initial Point and Initial Fun Value
A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
double fval = 0.0;
for (i=0;i<l;i++)
{
if (labels[i]>0) t[i]=hiTarget;
else t[i]=loTarget;
fApB = dec_values[i]*A+B;
if (fApB>=0)
fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
for (iter=0;iter<max_iter;iter++)
{
// Update Gradient and Hessian (use H' = H + sigma I)
h11=sigma; // numerically ensures strict PD
h22=sigma;
h21=0.0;g1=0.0;g2=0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*A+B;
if (fApB >= 0)
{
p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
q=1.0/(1.0+Math.exp(-fApB));
}
else
{
p=1.0/(1.0+Math.exp(fApB));
q=Math.exp(fApB)/(1.0+Math.exp(fApB));
}
d2=p*q;
h11+=dec_values[i]*dec_values[i]*d2;
h22+=d2;
h21+=dec_values[i]*d2;
d1=t[i]-p;
g1+=dec_values[i]*d1;
g2+=d1;
}
// Stopping Criteria
if (Math.abs(g1)<eps && Math.abs(g2)<eps)
break;
// Finding Newton direction: -inv(H') * g
det=h11*h22-h21*h21;
dA=-(h22*g1 - h21 * g2) / det;
dB=-(-h21*g1+ h11 * g2) / det;
gd=g1*dA+g2*dB;
stepsize = 1; // Line Search
while (stepsize >= min_step)
{
newA = A + stepsize * dA;
newB = B + stepsize * dB;
// New function value
newf = 0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*newA+newB;
if (fApB >= 0)
newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
// Check sufficient decrease
if (newf<fval+0.0001*stepsize*gd)
{
A=newA;B=newB;fval=newf;
break;
}
else
stepsize = stepsize / 2.0;
}
if (stepsize < min_step)
{
svm.info("Line search fails in two-class probability estimates\n");
break;
}
}
if (iter>=max_iter)
svm.info("Reaching maximal iterations in two-class probability estimates\n");
probAB[0]=A;probAB[1]=B;
}
private static double sigmoid_predict(double decision_value, double A, double B)
{
double fApB = decision_value*A+B;
if (fApB >= 0)
return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
else
return 1.0/(1+Math.exp(fApB)) ;
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[][] r, double[] p)
{
int t,j;
int iter = 0, max_iter=Math.max(100,k);
double[][] Q=new double[k][k];
double[] Qp=new double[k];
double pQp, eps=0.005/k;
for (t=0;t<k;t++)
{
p[t]=1.0/k; // Valid if k = 1
Q[t][t]=0;
for (j=0;j<t;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=Q[j][t];
}
for (j=t+1;j<k;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=-r[j][t]*r[t][j];
}
}
for (iter=0;iter<max_iter;iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp=0;
for (t=0;t<k;t++)
{
Qp[t]=0;
for (j=0;j<k;j++)
Qp[t]+=Q[t][j]*p[j];
pQp+=p[t]*Qp[t];
}
double max_error=0;
for (t=0;t<k;t++)
{
double error=Math.abs(Qp[t]-pQp);
if (error>max_error)
max_error=error;
}
if (max_error<eps) break;
for (t=0;t<k;t++)
{
double diff=(-Qp[t]+pQp)/Q[t][t];
p[t]+=diff;
pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
for (j=0;j<k;j++)
{
Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
p[j]/=(1+diff);
}
}
}
if (iter>=max_iter)
svm.info("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
{
int i;
int nr_fold = 5;
int[] perm = new int[prob.l];
double[] dec_values = new double[prob.l];
// random shuffle
for(i=0;i<prob.l;i++) perm[i]=i;
for(i=0;i<prob.l;i++)
{
int j = i+rand.nextInt(prob.l-i);
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
int begin = i*prob.l/nr_fold;
int end = (i+1)*prob.l/nr_fold;
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = prob.l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<prob.l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
int p_count=0,n_count=0;
for(j=0;j<k;j++)
if(subprob.y[j]>0)
p_count++;
else
n_count++;
if(p_count==0 && n_count==0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 0;
else if(p_count > 0 && n_count == 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 1;
else if(p_count == 0 && n_count > 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = -1;
else
{
svm_parameter subparam = (svm_parameter)param.clone();
subparam.probability=0;
subparam.C=1.0;
subparam.nr_weight=2;
subparam.weight_label = new int[2];
subparam.weight = new double[2];
subparam.weight_label[0]=+1;
subparam.weight_label[1]=-1;
subparam.weight[0]=Cp;
subparam.weight[1]=Cn;
svm_model submodel = svm_train(subprob,subparam);
for(j=begin;j<end;j++)
{
double[] dec_value=new double[1];
svm_predict_values(submodel,prob.x[perm[j]],dec_value);
dec_values[perm[j]]=dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.label[0];
}
}
}
sigmoid_train(prob.l,dec_values,prob.y,probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(svm_problem prob, svm_parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.l];
double mae = 0;
svm_parameter newparam = (svm_parameter)param.clone();
newparam.probability = 0;
svm_cross_validation(prob,newparam,nr_fold,ymv);
for(i=0;i<prob.l;i++)
{
ymv[i]=prob.y[i]-ymv[i];
mae += Math.abs(ymv[i]);
}
mae /= prob.l;
double std=Math.sqrt(2*mae*mae);
int count=0;
mae=0;
for(i=0;i<prob.l;i++)
if (Math.abs(ymv[i]) > 5*std)
count=count+1;
else
mae+=Math.abs(ymv[i]);
mae /= (prob.l-count);
svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
return mae;
}
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
// perm, length l, must be allocated before calling this subroutine
private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] data_label = new int[l];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)(prob.y[i]);
int j;
for(j=0;j<nr_class;j++)
{
if(this_label == label[j])
{
++count[j];
break;
}
}
data_label[i] = j;
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
//
// Labels are ordered by their first occurrence in the training set.
// However, for two-class sets with -1/+1 labels and -1 appears first,
// we swap labels to ensure that internally the binary SVM has positive data corresponding to the +1 instances.
//
if (nr_class == 2 && label[0] == -1 && label[1] == +1)
{
do {int _=label[0]; label[0]=label[1]; label[1]=_;} while(false);
do {int _=count[0]; count[0]=count[1]; count[1]=_;} while(false);
for(i=0;i<l;i++)
{
if(data_label[i] == 0)
data_label[i] = 1;
else
data_label[i] = 0;
}
}
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
for(i=0;i<l;i++)
{
perm[start[data_label[i]]] = i;
++start[data_label[i]];
}
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
nr_class_ret[0] = nr_class;
label_ret[0] = label;
start_ret[0] = start;
count_ret[0] = count;
}
//
// Interface functions
//
public static svm_model svm_train(svm_problem prob, svm_parameter param)
{
svm_model model = new svm_model();
model.param = param;
if(param.svm_type == svm_parameter.ONE_CLASS ||
param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
// regression or one-class-svm
model.nr_class = 2;
model.label = null;
model.nSV = null;
model.probA = null; model.probB = null;
model.sv_coef = new double[1][];
if(param.probability == 1 &&
(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR))
{
model.probA = new double[1];
model.probA[0] = svm_svr_probability(prob,param);
}
decision_function f = svm_train_one(prob,param,0,0);
model.rho = new double[1];
model.rho[0] = f.rho;
int nSV = 0;
int i;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0) ++nSV;
model.l = nSV;
model.SV = new svm_node[nSV][];
model.sv_coef[0] = new double[nSV];
model.sv_indices = new int[nSV];
int j = 0;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0)
{
model.SV[j] = prob.x[i];
model.sv_coef[0][j] = f.alpha[i];
model.sv_indices[j] = i+1;
++j;
}
}
else
{
// classification
int l = prob.l;
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
int[] perm = new int[l];
// group training data of the same class
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
if(nr_class == 1)
svm.info("WARNING: training data in only one class. See README for details.\n");
svm_node[][] x = new svm_node[l][];
int i;
for(i=0;i<l;i++)
x[i] = prob.x[perm[i]];
// calculate weighted C
double[] weighted_C = new double[nr_class];
for(i=0;i<nr_class;i++)
weighted_C[i] = param.C;
for(i=0;i<param.nr_weight;i++)
{
int j;
for(j=0;j<nr_class;j++)
if(param.weight_label[i] == label[j])
break;
if(j == nr_class)
System.err.print("WARNING: class label "+param.weight_label[i]+" specified in weight is not found\n");
else
weighted_C[j] *= param.weight[i];
}
// train k*(k-1)/2 models
boolean[] nonzero = new boolean[l];
for(i=0;i<l;i++)
nonzero[i] = false;
decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
double[] probA=null,probB=null;
if (param.probability == 1)
{
probA=new double[nr_class*(nr_class-1)/2];
probB=new double[nr_class*(nr_class-1)/2];
}
int p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
svm_problem sub_prob = new svm_problem();
int si = start[i], sj = start[j];
int ci = count[i], cj = count[j];
sub_prob.l = ci+cj;
sub_prob.x = new svm_node[sub_prob.l][];
sub_prob.y = new double[sub_prob.l];
int k;
for(k=0;k<ci;k++)
{
sub_prob.x[k] = x[si+k];
sub_prob.y[k] = +1;
}
for(k=0;k<cj;k++)
{
sub_prob.x[ci+k] = x[sj+k];
sub_prob.y[ci+k] = -1;
}
if(param.probability == 1)
{
double[] probAB=new double[2];
svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
probA[p]=probAB[0];
probB[p]=probAB[1];
}
f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
for(k=0;k<ci;k++)
if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
nonzero[si+k] = true;
for(k=0;k<cj;k++)
if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
nonzero[sj+k] = true;
++p;
}
// build output
model.nr_class = nr_class;
model.label = new int[nr_class];
for(i=0;i<nr_class;i++)
model.label[i] = label[i];
model.rho = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
model.rho[i] = f[i].rho;
if(param.probability == 1)
{
model.probA = new double[nr_class*(nr_class-1)/2];
model.probB = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
{
model.probA[i] = probA[i];
model.probB[i] = probB[i];
}
}
else
{
model.probA=null;
model.probB=null;
}
int nnz = 0;
int[] nz_count = new int[nr_class];
model.nSV = new int[nr_class];
for(i=0;i<nr_class;i++)
{
int nSV = 0;
for(int j=0;j<count[i];j++)
if(nonzero[start[i]+j])
{
++nSV;
++nnz;
}
model.nSV[i] = nSV;
nz_count[i] = nSV;
}
svm.info("Total nSV = "+nnz+"\n");
model.l = nnz;
model.SV = new svm_node[nnz][];
model.sv_indices = new int[nnz];
p = 0;
for(i=0;i<l;i++)
if(nonzero[i])
{
model.SV[p] = x[i];
model.sv_indices[p++] = perm[i] + 1;
}
int[] nz_start = new int[nr_class];
nz_start[0] = 0;
for(i=1;i<nr_class;i++)
nz_start[i] = nz_start[i-1]+nz_count[i-1];
model.sv_coef = new double[nr_class-1][];
for(i=0;i<nr_class-1;i++)
model.sv_coef[i] = new double[nnz];
p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
// classifier (i,j): coefficients with
// i are in sv_coef[j-1][nz_start[i]...],
// j are in sv_coef[i][nz_start[j]...]
int si = start[i];
int sj = start[j];
int ci = count[i];
int cj = count[j];
int q = nz_start[i];
int k;
for(k=0;k<ci;k++)
if(nonzero[si+k])
model.sv_coef[j-1][q++] = f[p].alpha[k];
q = nz_start[j];
for(k=0;k<cj;k++)
if(nonzero[sj+k])
model.sv_coef[i][q++] = f[p].alpha[ci+k];
++p;
}
}
return model;
}
// Stratified cross validation
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
{
int i;
int[] fold_start = new int[nr_fold+1];
int l = prob.l;
int[] perm = new int[l];
// stratified cv may not give leave-one-out rate
// Each class to l folds -> some folds may have zero elements
if((param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
{
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
// random shuffle and then data grouped by fold using the array perm
int[] fold_count = new int[nr_fold];
int c;
int[] index = new int[l];
for(i=0;i<l;i++)
index[i]=perm[i];
for (c=0; c<nr_class; c++)
for(i=0;i<count[c];i++)
{
int j = i+rand.nextInt(count[c]-i);
do {int _=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
fold_count[i] = 0;
for (c=0; c<nr_class;c++)
fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
for (c=0; c<nr_class;c++)
for(i=0;i<nr_fold;i++)
{
int begin = start[c]+i*count[c]/nr_fold;
int end = start[c]+(i+1)*count[c]/nr_fold;
for(int j=begin;j<end;j++)
{
perm[fold_start[i]] = index[j];
fold_start[i]++;
}
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
}
else
{
for(i=0;i<l;i++) perm[i]=i;
for(i=0;i<l;i++)
{
int j = i+rand.nextInt(l-i);
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<=nr_fold;i++)
fold_start[i]=i*l/nr_fold;
}
for(i=0;i<nr_fold;i++)
{
int begin = fold_start[i];
int end = fold_start[i+1];
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
svm_model submodel = svm_train(subprob,param);
if(param.probability==1 &&
(param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC))
{
double[] prob_estimates= new double[svm_get_nr_class(submodel)];
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
}
else
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
}
}
public static int svm_get_svm_type(svm_model model)
{
return model.param.svm_type;
}
public static int svm_get_nr_class(svm_model model)
{
return model.nr_class;
}
public static void svm_get_labels(svm_model model, int[] label)
{
if (model.label != null)
for(int i=0;i<model.nr_class;i++)
label[i] = model.label[i];
}
public static void svm_get_sv_indices(svm_model model, int[] indices)
{
if (model.sv_indices != null)
for(int i=0;i<model.l;i++)
indices[i] = model.sv_indices[i];
}
public static int svm_get_nr_sv(svm_model model)
{
return model.l;
}
public static double svm_get_svr_probability(svm_model model)
{
if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null)
return model.probA[0];
else
{
System.err.print("Model doesn't contain information for SVR probability inference\n");
return 0;
}
}
public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
{
int i;
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
{
double[] sv_coef = model.sv_coef[0];
double sum = 0;
for(i=0;i<model.l;i++)
sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
sum -= model.rho[0];
dec_values[0] = sum;
if(model.param.svm_type == svm_parameter.ONE_CLASS)
return (sum>0)?1:-1;
else
return sum;
}
else
{
int nr_class = model.nr_class;
int l = model.l;
double[] kvalue = new double[l];
for(i=0;i<l;i++)
kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+model.nSV[i-1];
int[] vote = new int[nr_class];
for(i=0;i<nr_class;i++)
vote[i] = 0;
int p=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
double sum = 0;
int si = start[i];
int sj = start[j];
int ci = model.nSV[i];
int cj = model.nSV[j];
int k;
double[] coef1 = model.sv_coef[j-1];
double[] coef2 = model.sv_coef[i];
for(k=0;k<ci;k++)
sum += coef1[si+k] * kvalue[si+k];
for(k=0;k<cj;k++)
sum += coef2[sj+k] * kvalue[sj+k];
sum -= model.rho[p];
dec_values[p] = sum;
if(dec_values[p] > 0)
++vote[i];
else
++vote[j];
p++;
}
int vote_max_idx = 0;
for(i=1;i<nr_class;i++)
if(vote[i] > vote[vote_max_idx])
vote_max_idx = i;
return model.label[vote_max_idx];
}
}
public static double svm_predict(svm_model model, svm_node[] x)
{
int nr_class = model.nr_class;
double[] dec_values;
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
dec_values = new double[1];
else
dec_values = new double[nr_class*(nr_class-1)/2];
double pred_result = svm_predict_values(model, x, dec_values);
return pred_result;
}
public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
{
if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null)
{
int i;
int nr_class = model.nr_class;
double[] dec_values = new double[nr_class*(nr_class-1)/2];
svm_predict_values(model, x, dec_values);
double min_prob=1e-7;
double[][] pairwise_prob=new double[nr_class][nr_class];
int k=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
pairwise_prob[j][i]=1-pairwise_prob[i][j];
k++;
}
multiclass_probability(nr_class,pairwise_prob,prob_estimates);
int prob_max_idx = 0;
for(i=1;i<nr_class;i++)
if(prob_estimates[i] > prob_estimates[prob_max_idx])
prob_max_idx = i;
return model.label[prob_max_idx];
}
else
return svm_predict(model, x);
}
static final String svm_type_table[] =
{
"c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
};
static final String kernel_type_table[]=
{
"linear","polynomial","rbf","sigmoid","precomputed"
};
public static void svm_save_model(String model_file_name, svm_model model) throws IOException
{
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(model_file_name)));
svm_parameter param = model.param;
fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
if(param.kernel_type == svm_parameter.POLY)
fp.writeBytes("degree "+param.degree+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.RBF ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("gamma "+param.gamma+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("coef0 "+param.coef0+"\n");
int nr_class = model.nr_class;
int l = model.l;
fp.writeBytes("nr_class "+nr_class+"\n");
fp.writeBytes("total_sv "+l+"\n");
{
fp.writeBytes("rho");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.rho[i]);
fp.writeBytes("\n");
}
if(model.label != null)
{
fp.writeBytes("label");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.label[i]);
fp.writeBytes("\n");
}
if(model.probA != null) // regression has probA only
{
fp.writeBytes("probA");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probA[i]);
fp.writeBytes("\n");
}
if(model.probB != null)
{
fp.writeBytes("probB");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probB[i]);
fp.writeBytes("\n");
}
if(model.nSV != null)
{
fp.writeBytes("nr_sv");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.nSV[i]);
fp.writeBytes("\n");
}
fp.writeBytes("SV\n");
double[][] sv_coef = model.sv_coef;
svm_node[][] SV = model.SV;
for(int i=0;i<l;i++)
{
for(int j=0;j<nr_class-1;j++)
fp.writeBytes(sv_coef[j][i]+" ");
svm_node[] p = SV[i];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
fp.writeBytes("0:"+(int)(p[0].value));
else
for(int j=0;j<p.length;j++)
fp.writeBytes(p[j].index+":"+p[j].value+" ");
fp.writeBytes("\n");
}
fp.close();
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
private static boolean read_model_header(BufferedReader fp, svm_model model)
{
svm_parameter param = new svm_parameter();
model.param = param;
try
{
while(true)
{
String cmd = fp.readLine();
String arg = cmd.substring(cmd.indexOf(' ')+1);
if(cmd.startsWith("svm_type"))
{
int i;
for(i=0;i<svm_type_table.length;i++)
{
if(arg.indexOf(svm_type_table[i])!=-1)
{
param.svm_type=i;
break;
}
}
if(i == svm_type_table.length)
{
System.err.print("unknown svm type.\n");
return false;
}
}
else if(cmd.startsWith("kernel_type"))
{
int i;
for(i=0;i<kernel_type_table.length;i++)
{
if(arg.indexOf(kernel_type_table[i])!=-1)
{
param.kernel_type=i;
break;
}
}
if(i == kernel_type_table.length)
{
System.err.print("unknown kernel function.\n");
return false;
}
}
else if(cmd.startsWith("degree"))
param.degree = atoi(arg);
else if(cmd.startsWith("gamma"))
param.gamma = atof(arg);
else if(cmd.startsWith("coef0"))
param.coef0 = atof(arg);
else if(cmd.startsWith("nr_class"))
model.nr_class = atoi(arg);
else if(cmd.startsWith("total_sv"))
model.l = atoi(arg);
else if(cmd.startsWith("rho"))
{
int n = model.nr_class * (model.nr_class-1)/2;
model.rho = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.rho[i] = atof(st.nextToken());
}
else if(cmd.startsWith("label"))
{
int n = model.nr_class;
model.label = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.label[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("probA"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probA = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probA[i] = atof(st.nextToken());
}
else if(cmd.startsWith("probB"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probB = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probB[i] = atof(st.nextToken());
}
else if(cmd.startsWith("nr_sv"))
{
int n = model.nr_class;
model.nSV = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.nSV[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("SV"))
{
break;
}
else
{
System.err.print("unknown text in model file: ["+cmd+"]\n");
return false;
}
}
}
catch(Exception e)
{
return false;
}
return true;
}
public static svm_model svm_load_model(String model_file_name) throws IOException
{
return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
}
public static svm_model svm_load_model(BufferedReader fp) throws IOException
{
// read parameters
svm_model model = new svm_model();
model.rho = null;
model.probA = null;
model.probB = null;
model.label = null;
model.nSV = null;
if (read_model_header(fp, model) == false)
{
System.err.print("ERROR: failed to read model\n");
return null;
}
// read sv_coef and SV
int m = model.nr_class - 1;
int l = model.l;
model.sv_coef = new double[m][l];
model.SV = new svm_node[l][];
for(int i=0;i<l;i++)
{
String line = fp.readLine();
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
for(int k=0;k<m;k++)
model.sv_coef[k][i] = atof(st.nextToken());
int n = st.countTokens()/2;
model.SV[i] = new svm_node[n];
for(int j=0;j<n;j++)
{
model.SV[i][j] = new svm_node();
model.SV[i][j].index = atoi(st.nextToken());
model.SV[i][j].value = atof(st.nextToken());
}
}
fp.close();
return model;
}
public static String svm_check_parameter(svm_problem prob, svm_parameter param)
{
// svm_type
int svm_type = param.svm_type;
if(svm_type != svm_parameter.C_SVC &&
svm_type != svm_parameter.NU_SVC &&
svm_type != svm_parameter.ONE_CLASS &&
svm_type != svm_parameter.EPSILON_SVR &&
svm_type != svm_parameter.NU_SVR)
return "unknown svm type";
// kernel_type, degree
int kernel_type = param.kernel_type;
if(kernel_type != svm_parameter.LINEAR &&
kernel_type != svm_parameter.POLY &&
kernel_type != svm_parameter.RBF &&
kernel_type != svm_parameter.SIGMOID &&
kernel_type != svm_parameter.PRECOMPUTED)
return "unknown kernel type";
if(param.gamma < 0)
return "gamma < 0";
if(param.degree < 0)
return "degree of polynomial kernel < 0";
// cache_size,eps,C,nu,p,shrinking
if(param.cache_size <= 0)
return "cache_size <= 0";
if(param.eps <= 0)
return "eps <= 0";
if(svm_type == svm_parameter.C_SVC ||
svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
if(param.C <= 0)
return "C <= 0";
if(svm_type == svm_parameter.NU_SVC ||
svm_type == svm_parameter.ONE_CLASS ||
svm_type == svm_parameter.NU_SVR)
if(param.nu <= 0 || param.nu > 1)
return "nu <= 0 or nu > 1";
if(svm_type == svm_parameter.EPSILON_SVR)
if(param.p < 0)
return "p < 0";
if(param.shrinking != 0 &&
param.shrinking != 1)
return "shrinking != 0 and shrinking != 1";
if(param.probability != 0 &&
param.probability != 1)
return "probability != 0 and probability != 1";
if(param.probability == 1 &&
svm_type == svm_parameter.ONE_CLASS)
return "one-class SVM probability output not supported yet";
// check whether nu-svc is feasible
if(svm_type == svm_parameter.NU_SVC)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)prob.y[i];
int j;
for(j=0;j<nr_class;j++)
if(this_label == label[j])
{
++count[j];
break;
}
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
for(i=0;i<nr_class;i++)
{
int n1 = count[i];
for(int j=i+1;j<nr_class;j++)
{
int n2 = count[j];
if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
return "specified nu is infeasible";
}
}
}
return null;
}
public static int svm_check_probability_model(svm_model model)
{
if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null) ||
((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null))
return 1;
else
return 0;
}
public static void svm_set_print_string_function(svm_print_interface print_func)
{
if (print_func == null)
svm_print_string = svm_print_stdout;
else
svm_print_string = print_func;
}
}
| 63,839 | 21.4 | 145 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm_model.java | //
// svm_model
//
package libsvm;
public class svm_model implements java.io.Serializable
{
public svm_parameter param; // parameter
public int nr_class; // number of classes, = 2 in regression/one class svm
public int l; // total #SV
public svm_node[][] SV; // SVs (SV[l])
public double[][] sv_coef; // coefficients for SVs in decision functions (sv_coef[k-1][l])
public double[] rho; // constants in decision functions (rho[k*(k-1)/2])
public double[] probA; // pariwise probability information
public double[] probB;
public int[] sv_indices; // sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set
// for classification only
public int[] label; // label of each class (label[k])
public int[] nSV; // number of SVs for each class (nSV[k])
// nSV[0] + nSV[1] + ... + nSV[k-1] = l
};
| 868 | 36.782609 | 133 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm_node.java | package libsvm;
public class svm_node implements java.io.Serializable
{
public int index;
public double value;
}
| 115 | 15.571429 | 53 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm_parameter.java | package libsvm;
public class svm_parameter implements Cloneable,java.io.Serializable
{
/* svm_type */
public static final int C_SVC = 0;
public static final int NU_SVC = 1;
public static final int ONE_CLASS = 2;
public static final int EPSILON_SVR = 3;
public static final int NU_SVR = 4;
/* kernel_type */
public static final int LINEAR = 0;
public static final int POLY = 1;
public static final int RBF = 2;
public static final int SIGMOID = 3;
public static final int PRECOMPUTED = 4;
public int svm_type;
public int kernel_type;
public int degree; // for poly
public double gamma; // for poly/rbf/sigmoid
public double coef0; // for poly/sigmoid
// these are for training only
public double cache_size; // in MB
public double eps; // stopping criteria
public double C; // for C_SVC, EPSILON_SVR and NU_SVR
public int nr_weight; // for C_SVC
public int[] weight_label; // for C_SVC
public double[] weight; // for C_SVC
public double nu; // for NU_SVC, ONE_CLASS, and NU_SVR
public double p; // for EPSILON_SVR
public int shrinking; // use the shrinking heuristics
public int probability; // do probability estimates
public Object clone()
{
try
{
return super.clone();
} catch (CloneNotSupportedException e)
{
return null;
}
}
}
| 1,288 | 25.854167 | 68 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm_print_interface.java | package libsvm;
public interface svm_print_interface
{
public void print(String s);
}
| 87 | 13.666667 | 36 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/libsvm/java/libsvm/svm_problem.java | package libsvm;
public class svm_problem implements java.io.Serializable
{
public int l;
public double[] y;
public svm_node[][] x;
}
| 136 | 16.125 | 56 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/CustomKernel.java | package libsvm;
public abstract class CustomKernel
{
abstract public double kernel(svm_node[] x, svm_node[] y);
abstract public svm_node new_svm_node();
}
| 160 | 19.125 | 60 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/svm.java | package libsvm;
import java.io.*;
import java.util.*;
//
// Kernel Cache
//
// l is the number of total data items
// size is the cache size limit in bytes
//
class Cache {
private final int l;
private int size;
private final class head_t
{
head_t prev, next; // a cicular list
float[] data;
int len; // data[0,len) is cached in this entry
}
private final head_t[] head;
private head_t lru_head;
Cache(int l_, int size_)
{
l = l_;
size = size_;
head = new head_t[l];
for(int i=0;i<l;i++) head[i] = new head_t();
size /= 4;
size -= l * (16/4); // sizeof(head_t) == 16
lru_head = new head_t();
lru_head.next = lru_head.prev = lru_head;
}
private void lru_delete(head_t h)
{
// delete from current location
h.prev.next = h.next;
h.next.prev = h.prev;
}
private void lru_insert(head_t h)
{
// insert to last position
h.next = lru_head;
h.prev = lru_head.prev;
h.prev.next = h;
h.next.prev = h;
}
// request data [0,len)
// return some position p where [p,len) need to be filled
// (p >= len if nothing needs to be filled)
// java: simulate pointer using single-element array
int get_data(int index, float[][] data, int len)
{
head_t h = head[index];
if(h.len > 0) lru_delete(h);
int more = len - h.len;
if(more > 0)
{
// free old space
while(size < more)
{
head_t old = lru_head.next;
lru_delete(old);
size += old.len;
old.data = null;
old.len = 0;
}
// allocate new space
float[] new_data = new float[len];
if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
h.data = new_data;
size -= more;
do {int _=h.len; h.len=len; len=_;} while(false);
}
lru_insert(h);
data[0] = h.data;
return len;
}
void swap_index(int i, int j)
{
if(i==j) return;
if(head[i].len > 0) lru_delete(head[i]);
if(head[j].len > 0) lru_delete(head[j]);
do {float[] _=head[i].data; head[i].data=head[j].data; head[j].data=_;} while(false);
do {int _=head[i].len; head[i].len=head[j].len; head[j].len=_;} while(false);
if(head[i].len > 0) lru_insert(head[i]);
if(head[j].len > 0) lru_insert(head[j]);
if(i>j) do {int _=i; i=j; j=_;} while(false);
for(head_t h = lru_head.next; h!=lru_head; h=h.next)
{
if(h.len > i)
{
if(h.len > j)
do {float _=h.data[i]; h.data[i]=h.data[j]; h.data[j]=_;} while(false);
else
{
// give up
lru_delete(h);
size += h.len;
h.data = null;
h.len = 0;
}
}
}
}
}
//
// Kernel evaluation
//
// the static method k_function is for doing single kernel evaluation
// the constructor of Kernel prepares to calculate the l*l kernel matrix
// the member function get_Q is for getting one column from the Q Matrix
//
abstract class QMatrix {
abstract float[] get_Q(int column, int len);
abstract void swap_index(int i, int j);
};
abstract class Kernel extends QMatrix {
private svm_node[][] x;
private final double[] x_square;
// svm_parameter
private final int kernel_type;
private final double degree;
private final double gamma;
private final double coef0;
abstract float[] get_Q(int column, int len);
void swap_index(int i, int j)
{
do {svm_node[] _=x[i]; x[i]=x[j]; x[j]=_;} while(false);
if(x_square != null) do {double _=x_square[i]; x_square[i]=x_square[j]; x_square[j]=_;} while(false);
}
private static double tanh(double x)
{
double e = Math.exp(x);
return 1.0-2.0/(e*e+1);
}
double kernel_function(int i, int j)
{
switch(kernel_type)
{
case svm_parameter.LINEAR:
return dot(x[i],x[j]);
case svm_parameter.POLY:
return Math.pow(gamma*dot(x[i],x[j])+coef0,degree);
case svm_parameter.RBF:
return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
case svm_parameter.SIGMOID:
return tanh(gamma*dot(x[i],x[j])+coef0);
case svm_parameter.CUSTOM:
return svm.m_ck.kernel(x[i], x[j]);
default:
return 0; // java
}
}
Kernel(int l, svm_node[][] x_, svm_parameter param)
{
this.kernel_type = param.kernel_type;
this.degree = param.degree;
this.gamma = param.gamma;
this.coef0 = param.coef0;
x = (svm_node[][])x_.clone();
if(kernel_type == svm_parameter.RBF)
{
x_square = new double[l];
for(int i=0;i<l;i++)
x_square[i] = dot(x[i],x[i]);
}
else x_square = null;
}
static double dot(svm_node[] x, svm_node[] y)
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
sum += x[i++].value * y[j++].value;
else
{
if(x[i].index > y[j].index)
++j;
else
++i;
}
}
return sum;
}
static double k_function(svm_node[] x, svm_node[] y,
svm_parameter param)
{
switch(param.kernel_type)
{
case svm_parameter.LINEAR:
return dot(x,y);
case svm_parameter.POLY:
return Math.pow(param.gamma*dot(x,y)+param.coef0,param.degree);
case svm_parameter.RBF:
{
double sum = 0;
int xlen = x.length;
int ylen = y.length;
int i = 0;
int j = 0;
while(i < xlen && j < ylen)
{
if(x[i].index == y[j].index)
{
double d = x[i++].value - y[j++].value;
sum += d*d;
}
else if(x[i].index > y[j].index)
{
sum += y[j].value * y[j].value;
++j;
}
else
{
sum += x[i].value * x[i].value;
++i;
}
}
while(i < xlen)
{
sum += x[i].value * x[i].value;
++i;
}
while(j < ylen)
{
sum += y[j].value * y[j].value;
++j;
}
return Math.exp(-param.gamma*sum);
}
case svm_parameter.SIGMOID:
return tanh(param.gamma*dot(x,y)+param.coef0);
case svm_parameter.CUSTOM:
return svm.m_ck.kernel(x, y);
default:
return 0; // java
}
}
}
// Generalized SMO+SVMlight algorithm
// Solves:
//
// min 0.5(\alpha^T Q \alpha) + b^T \alpha
//
// y^T \alpha = \delta
// y_i = +1 or -1
// 0 <= alpha_i <= Cp for y_i = 1
// 0 <= alpha_i <= Cn for y_i = -1
//
// Given:
//
// Q, b, y, Cp, Cn, and an initial feasible point \alpha
// l is the size of vectors and matrices
// eps is the stopping criterion
//
// solution will be put in \alpha, objective value will be put in obj
//
class Solver {
int active_size;
byte[] y;
double[] G; // gradient of objective function
static final byte LOWER_BOUND = 0;
static final byte UPPER_BOUND = 1;
static final byte FREE = 2;
byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
double[] alpha;
QMatrix Q;
double eps;
double Cp,Cn;
double[] b;
int[] active_set;
double[] G_bar; // gradient, if we treat free variables as 0
int l;
boolean unshrinked; // XXX
static final double INF = java.lang.Double.POSITIVE_INFINITY;
double get_C(int i)
{
return (y[i] > 0)? Cp : Cn;
}
void update_alpha_status(int i)
{
if(alpha[i] >= get_C(i))
alpha_status[i] = UPPER_BOUND;
else if(alpha[i] <= 0)
alpha_status[i] = LOWER_BOUND;
else alpha_status[i] = FREE;
}
boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
boolean is_free(int i) { return alpha_status[i] == FREE; }
// java: information about solution except alpha,
// because we cannot return multiple values otherwise...
static class SolutionInfo {
double obj;
double rho;
double upper_bound_p;
double upper_bound_n;
double r; // for Solver_NU
}
void swap_index(int i, int j)
{
Q.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
do {double _=G[i]; G[i]=G[j]; G[j]=_;} while(false);
do {byte _=alpha_status[i]; alpha_status[i]=alpha_status[j]; alpha_status[j]=_;} while(false);
do {double _=alpha[i]; alpha[i]=alpha[j]; alpha[j]=_;} while(false);
do {double _=b[i]; b[i]=b[j]; b[j]=_;} while(false);
do {int _=active_set[i]; active_set[i]=active_set[j]; active_set[j]=_;} while(false);
do {double _=G_bar[i]; G_bar[i]=G_bar[j]; G_bar[j]=_;} while(false);
}
void reconstruct_gradient()
{
// reconstruct inactive elements of G from G_bar and free variables
if(active_size == l) return;
int i;
for(i=active_size;i<l;i++)
G[i] = G_bar[i] + b[i];
for(i=0;i<active_size;i++)
if(is_free(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
for(int j=active_size;j<l;j++)
G[j] += alpha_i * Q_i[j];
}
}
void Solve(int l, QMatrix Q, double[] b_, byte[] y_,
double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
{
this.l = l;
this.Q = Q;
b = (double[])b_.clone();
y = (byte[])y_.clone();
alpha = (double[])alpha_.clone();
this.Cp = Cp;
this.Cn = Cn;
this.eps = eps;
this.unshrinked = false;
// initialize alpha_status
{
alpha_status = new byte[l];
for(int i=0;i<l;i++)
update_alpha_status(i);
}
// initialize active set (for shrinking)
{
active_set = new int[l];
for(int i=0;i<l;i++)
active_set[i] = i;
active_size = l;
}
// initialize gradient
{
G = new double[l];
G_bar = new double[l];
int i;
for(i=0;i<l;i++)
{
G[i] = b[i];
G_bar[i] = 0;
}
for(i=0;i<l;i++)
if(!is_lower_bound(i))
{
float[] Q_i = Q.get_Q(i,l);
double alpha_i = alpha[i];
int j;
for(j=0;j<l;j++)
G[j] += alpha_i*Q_i[j];
if(is_upper_bound(i))
for(j=0;j<l;j++)
G_bar[j] += get_C(i) * Q_i[j];
}
}
// optimization step
int iter = 0;
int counter = Math.min(l,1000)+1;
int[] working_set = new int[2];
while(true)
{
// show progress and do shrinking
if(--counter == 0)
{
counter = Math.min(l,1000);
if(shrinking!=0) do_shrinking();
System.err.print(".");
}
if(select_working_set(working_set)!=0)
{
// reconstruct the whole gradient
reconstruct_gradient();
// reset active set size and check
active_size = l;
System.err.print("*");
if(select_working_set(working_set)!=0)
break;
else
counter = 1; // do shrinking next iteration
}
int i = working_set[0];
int j = working_set[1];
++iter;
// update alpha[i] and alpha[j], handle bounds carefully
float[] Q_i = Q.get_Q(i,active_size);
float[] Q_j = Q.get_Q(j,active_size);
double C_i = get_C(i);
double C_j = get_C(j);
double old_alpha_i = alpha[i];
double old_alpha_j = alpha[j];
if(y[i]!=y[j])
{
double delta = (-G[i]-G[j])/Math.max(Q_i[i]+Q_j[j]+2*Q_i[j],(float)0);
double diff = alpha[i] - alpha[j];
alpha[i] += delta;
alpha[j] += delta;
if(diff > 0)
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = diff;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = -diff;
}
}
if(diff > C_i - C_j)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = C_i - diff;
}
}
else
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = C_j + diff;
}
}
}
else
{
double delta = (G[i]-G[j])/Math.max(Q_i[i]+Q_j[j]-2*Q_i[j],(float)0);
double sum = alpha[i] + alpha[j];
alpha[i] -= delta;
alpha[j] += delta;
if(sum > C_i)
{
if(alpha[i] > C_i)
{
alpha[i] = C_i;
alpha[j] = sum - C_i;
}
}
else
{
if(alpha[j] < 0)
{
alpha[j] = 0;
alpha[i] = sum;
}
}
if(sum > C_j)
{
if(alpha[j] > C_j)
{
alpha[j] = C_j;
alpha[i] = sum - C_j;
}
}
else
{
if(alpha[i] < 0)
{
alpha[i] = 0;
alpha[j] = sum;
}
}
}
// update G
double delta_alpha_i = alpha[i] - old_alpha_i;
double delta_alpha_j = alpha[j] - old_alpha_j;
for(int k=0;k<active_size;k++)
{
G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
}
// update alpha_status and G_bar
{
boolean ui = is_upper_bound(i);
boolean uj = is_upper_bound(j);
update_alpha_status(i);
update_alpha_status(j);
int k;
if(ui != is_upper_bound(i))
{
Q_i = Q.get_Q(i,l);
if(ui)
for(k=0;k<l;k++)
G_bar[k] -= C_i * Q_i[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_i * Q_i[k];
}
if(uj != is_upper_bound(j))
{
Q_j = Q.get_Q(j,l);
if(uj)
for(k=0;k<l;k++)
G_bar[k] -= C_j * Q_j[k];
else
for(k=0;k<l;k++)
G_bar[k] += C_j * Q_j[k];
}
}
}
// calculate rho
si.rho = calculate_rho();
// calculate objective value
{
double v = 0;
int i;
for(i=0;i<l;i++)
v += alpha[i] * (G[i] + b[i]);
si.obj = v/2;
}
// put back the solution
{
for(int i=0;i<l;i++)
alpha_[active_set[i]] = alpha[i];
}
si.upper_bound_p = Cp;
si.upper_bound_n = Cn;
System.out.print("\noptimization finished, #iter = "+iter+"\n");
}
// return 1 if already optimal, return 0 otherwise
int select_working_set(int[] working_set)
{
// return i,j which maximize -grad(f)^T d , under constraint
// if alpha_i == C, d != +1
// if alpha_i == 0, d != -1
double Gmax1 = -INF; // max { -grad(f)_i * d | y_i*d = +1 }
int Gmax1_idx = -1;
double Gmax2 = -INF; // max { -grad(f)_i * d | y_i*d = -1 }
int Gmax2_idx = -1;
for(int i=0;i<active_size;i++)
{
if(y[i]==+1) // y = +1
{
if(!is_upper_bound(i)) // d = +1
{
if(-G[i] >= Gmax1)
{
Gmax1 = -G[i];
Gmax1_idx = i;
}
}
if(!is_lower_bound(i)) // d = -1
{
if(G[i] >= Gmax2)
{
Gmax2 = G[i];
Gmax2_idx = i;
}
}
}
else // y = -1
{
if(!is_upper_bound(i)) // d = +1
{
if(-G[i] >= Gmax2)
{
Gmax2 = -G[i];
Gmax2_idx = i;
}
}
if(!is_lower_bound(i)) // d = -1
{
if(G[i] >= Gmax1)
{
Gmax1 = G[i];
Gmax1_idx = i;
}
}
}
}
if(Gmax1+Gmax2 < eps)
return 1;
working_set[0] = Gmax1_idx;
working_set[1] = Gmax2_idx;
return 0;
}
void do_shrinking()
{
int i,j,k;
int[] working_set = new int[2];
if(select_working_set(working_set)!=0) return;
i = working_set[0];
j = working_set[1];
double Gm1 = -y[j]*G[j];
double Gm2 = y[i]*G[i];
// shrink
for(k=0;k<active_size;k++)
{
if(is_lower_bound(k))
{
if(y[k]==+1)
{
if(-G[k] >= Gm1) continue;
}
else if(-G[k] >= Gm2) continue;
}
else if(is_upper_bound(k))
{
if(y[k]==+1)
{
if(G[k] >= Gm2) continue;
}
else if(G[k] >= Gm1) continue;
}
else continue;
--active_size;
swap_index(k,active_size);
--k; // look at the newcomer
}
// unshrink, check all variables again before final iterations
if(unshrinked || -(Gm1 + Gm2) > eps*10) return;
unshrinked = true;
reconstruct_gradient();
for(k=l-1;k>=active_size;k--)
{
if(is_lower_bound(k))
{
if(y[k]==+1)
{
if(-G[k] < Gm1) continue;
}
else if(-G[k] < Gm2) continue;
}
else if(is_upper_bound(k))
{
if(y[k]==+1)
{
if(G[k] < Gm2) continue;
}
else if(G[k] < Gm1) continue;
}
else continue;
swap_index(k,active_size);
active_size++;
++k; // look at the newcomer
}
}
double calculate_rho()
{
double r;
int nr_free = 0;
double ub = INF, lb = -INF, sum_free = 0;
for(int i=0;i<active_size;i++)
{
double yG = y[i]*G[i];
if(is_lower_bound(i))
{
if(y[i] > 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else if(is_upper_bound(i))
{
if(y[i] < 0)
ub = Math.min(ub,yG);
else
lb = Math.max(lb,yG);
}
else
{
++nr_free;
sum_free += yG;
}
}
if(nr_free>0)
r = sum_free/nr_free;
else
r = (ub+lb)/2;
return r;
}
}
//
// Solver for nu-svm classification and regression
//
// additional constraint: e^T \alpha = constant
//
final class Solver_NU extends Solver
{
private SolutionInfo si;
void Solve(int l, QMatrix Q, double[] b, byte[] y,
double[] alpha, double Cp, double Cn, double eps,
SolutionInfo si, int shrinking)
{
this.si = si;
super.Solve(l,Q,b,y,alpha,Cp,Cn,eps,si,shrinking);
}
int select_working_set(int[] working_set)
{
// return i,j which maximize -grad(f)^T d , under constraint
// if alpha_i == C, d != +1
// if alpha_i == 0, d != -1
double Gmax1 = -INF; // max { -grad(f)_i * d | y_i = +1, d = +1 }
int Gmax1_idx = -1;
double Gmax2 = -INF; // max { -grad(f)_i * d | y_i = +1, d = -1 }
int Gmax2_idx = -1;
double Gmax3 = -INF; // max { -grad(f)_i * d | y_i = -1, d = +1 }
int Gmax3_idx = -1;
double Gmax4 = -INF; // max { -grad(f)_i * d | y_i = -1, d = -1 }
int Gmax4_idx = -1;
for(int i=0;i<active_size;i++)
{
if(y[i]==+1) // y == +1
{
if(!is_upper_bound(i)) // d = +1
{
if(-G[i] >= Gmax1)
{
Gmax1 = -G[i];
Gmax1_idx = i;
}
}
if(!is_lower_bound(i)) // d = -1
{
if(G[i] >= Gmax2)
{
Gmax2 = G[i];
Gmax2_idx = i;
}
}
}
else // y == -1
{
if(!is_upper_bound(i)) // d = +1
{
if(-G[i] >= Gmax3)
{
Gmax3 = -G[i];
Gmax3_idx = i;
}
}
if(!is_lower_bound(i)) // d = -1
{
if(G[i] >= Gmax4)
{
Gmax4 = G[i];
Gmax4_idx = i;
}
}
}
}
if(Math.max(Gmax1+Gmax2,Gmax3+Gmax4) < eps)
return 1;
if(Gmax1+Gmax2 > Gmax3+Gmax4)
{
working_set[0] = Gmax1_idx;
working_set[1] = Gmax2_idx;
}
else
{
working_set[0] = Gmax3_idx;
working_set[1] = Gmax4_idx;
}
return 0;
}
void do_shrinking()
{
double Gmax1 = -INF; // max { -grad(f)_i * d | y_i = +1, d = +1 }
double Gmax2 = -INF; // max { -grad(f)_i * d | y_i = +1, d = -1 }
double Gmax3 = -INF; // max { -grad(f)_i * d | y_i = -1, d = +1 }
double Gmax4 = -INF; // max { -grad(f)_i * d | y_i = -1, d = -1 }
int k;
for(k=0;k<active_size;k++)
{
if(!is_upper_bound(k))
{
if(y[k]==+1)
{
if(-G[k] > Gmax1) Gmax1 = -G[k];
}
else if(-G[k] > Gmax3) Gmax3 = -G[k];
}
if(!is_lower_bound(k))
{
if(y[k]==+1)
{
if(G[k] > Gmax2) Gmax2 = G[k];
}
else if(G[k] > Gmax4) Gmax4 = G[k];
}
}
double Gm1 = -Gmax2;
double Gm2 = -Gmax1;
double Gm3 = -Gmax4;
double Gm4 = -Gmax3;
for(k=0;k<active_size;k++)
{
if(is_lower_bound(k))
{
if(y[k]==+1)
{
if(-G[k] >= Gm1) continue;
}
else if(-G[k] >= Gm3) continue;
}
else if(is_upper_bound(k))
{
if(y[k]==+1)
{
if(G[k] >= Gm2) continue;
}
else if(G[k] >= Gm4) continue;
}
else continue;
--active_size;
swap_index(k,active_size);
--k; // look at the newcomer
}
// unshrink, check all variables again before final iterations
if(unshrinked || Math.max(-(Gm1+Gm2),-(Gm3+Gm4)) > eps*10) return;
unshrinked = true;
reconstruct_gradient();
for(k=l-1;k>=active_size;k--)
{
if(is_lower_bound(k))
{
if(y[k]==+1)
{
if(-G[k] < Gm1) continue;
}
else if(-G[k] < Gm3) continue;
}
else if(is_upper_bound(k))
{
if(y[k]==+1)
{
if(G[k] < Gm2) continue;
}
else if(G[k] < Gm4) continue;
}
else continue;
swap_index(k,active_size);
active_size++;
++k; // look at the newcomer
}
}
double calculate_rho()
{
int nr_free1 = 0,nr_free2 = 0;
double ub1 = INF, ub2 = INF;
double lb1 = -INF, lb2 = -INF;
double sum_free1 = 0, sum_free2 = 0;
for(int i=0;i<active_size;i++)
{
if(y[i]==+1)
{
if(is_lower_bound(i))
ub1 = Math.min(ub1,G[i]);
else if(is_upper_bound(i))
lb1 = Math.max(lb1,G[i]);
else
{
++nr_free1;
sum_free1 += G[i];
}
}
else
{
if(is_lower_bound(i))
ub2 = Math.min(ub2,G[i]);
else if(is_upper_bound(i))
lb2 = Math.max(lb2,G[i]);
else
{
++nr_free2;
sum_free2 += G[i];
}
}
}
double r1,r2;
if(nr_free1 > 0)
r1 = sum_free1/nr_free1;
else
r1 = (ub1+lb1)/2;
if(nr_free2 > 0)
r2 = sum_free2/nr_free2;
else
r2 = (ub2+lb2)/2;
si.r = (r1+r2)/2;
return (r1-r2)/2;
}
}
//
// Q matrices for various formulations
//
class SVC_Q extends Kernel
{
private final byte[] y;
private final Cache cache;
SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
{
super(prob.l, prob.x, param);
y = (byte[])y_.clone();
cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)));
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start;
if((start = cache.get_data(i,data,len)) < len)
{
for(int j=start;j<len;j++)
data[0][j] = (float)(y[i]*y[j]*kernel_function(i,j));
}
return data[0];
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
}
}
class ONE_CLASS_Q extends Kernel
{
private final Cache cache;
ONE_CLASS_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)));
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int start;
if((start = cache.get_data(i,data,len)) < len)
{
for(int j=start;j<len;j++)
data[0][j] = (float)kernel_function(i,j);
}
return data[0];
}
void swap_index(int i, int j)
{
cache.swap_index(i,j);
super.swap_index(i,j);
}
}
class SVR_Q extends Kernel
{
private final int l;
private final Cache cache;
private final byte[] sign;
private final int[] index;
private int next_buffer;
private float[][] buffer;
SVR_Q(svm_problem prob, svm_parameter param)
{
super(prob.l, prob.x, param);
l = prob.l;
cache = new Cache(l,(int)(param.cache_size*(1<<20)));
sign = new byte[2*l];
index = new int[2*l];
for(int k=0;k<l;k++)
{
sign[k] = 1;
sign[k+l] = -1;
index[k] = k;
index[k+l] = k;
}
buffer = new float[2][2*l];
next_buffer = 0;
}
void swap_index(int i, int j)
{
do {byte _=sign[i]; sign[i]=sign[j]; sign[j]=_;} while(false);
do {int _=index[i]; index[i]=index[j]; index[j]=_;} while(false);
}
float[] get_Q(int i, int len)
{
float[][] data = new float[1][];
int real_i = index[i];
if(cache.get_data(real_i,data,l) < l)
{
for(int j=0;j<l;j++)
data[0][j] = (float)kernel_function(real_i,j);
}
// reorder and copy
float buf[] = buffer[next_buffer];
next_buffer = 1 - next_buffer;
byte si = sign[i];
for(int j=0;j<len;j++)
buf[j] = si * sign[j] * data[0][index[j]];
return buf;
}
}
public class svm {
// Razvan: custom kernel.
static CustomKernel m_ck;
// Razvan: set custom kernel.
public static void setCustomKernel(CustomKernel ck)
{
m_ck = ck;
}
//
// construct and solve various formulations
//
private static void solve_c_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si,
double Cp, double Cn)
{
int l = prob.l;
double[] minus_ones = new double[l];
byte[] y = new byte[l];
int i;
for(i=0;i<l;i++)
{
alpha[i] = 0;
minus_ones[i] = -1;
if(prob.y[i] > 0) y[i] = +1; else y[i]=-1;
}
Solver s = new Solver();
s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
alpha, Cp, Cn, param.eps, si, param.shrinking);
double sum_alpha=0;
for(i=0;i<l;i++)
sum_alpha += alpha[i];
if (Cp==Cn)
System.out.print("nu = "+sum_alpha/(Cp*prob.l)+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i];
}
private static void solve_nu_svc(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int i;
int l = prob.l;
double nu = param.nu;
byte[] y = new byte[l];
for(i=0;i<l;i++)
if(prob.y[i]>0)
y[i] = +1;
else
y[i] = -1;
double sum_pos = nu*l/2;
double sum_neg = nu*l/2;
for(i=0;i<l;i++)
if(y[i] == +1)
{
alpha[i] = Math.min(1.0,sum_pos);
sum_pos -= alpha[i];
}
else
{
alpha[i] = Math.min(1.0,sum_neg);
sum_neg -= alpha[i];
}
double[] zeros = new double[l];
for(i=0;i<l;i++)
zeros[i] = 0;
Solver_NU s = new Solver_NU();
s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
double r = si.r;
System.out.print("C = "+1/r+"\n");
for(i=0;i<l;i++)
alpha[i] *= y[i]/r;
si.rho /= r;
si.obj /= (r*r);
si.upper_bound_p = 1/r;
si.upper_bound_n = 1/r;
}
private static void solve_one_class(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] zeros = new double[l];
byte[] ones = new byte[l];
int i;
int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
for(i=0;i<n;i++)
alpha[i] = 1;
alpha[n] = param.nu * prob.l - n;
for(i=n+1;i<l;i++)
alpha[i] = 0;
for(i=0;i<l;i++)
{
zeros[i] = 0;
ones[i] = 1;
}
Solver s = new Solver();
s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
alpha, 1.0, 1.0, param.eps, si, param.shrinking);
}
private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
for(i=0;i<l;i++)
{
alpha2[i] = 0;
linear_term[i] = param.p - prob.y[i];
y[i] = 1;
alpha2[i+l] = 0;
linear_term[i+l] = param.p + prob.y[i];
y[i+l] = -1;
}
Solver s = new Solver();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, param.C, param.C, param.eps, si, param.shrinking);
double sum_alpha = 0;
for(i=0;i<l;i++)
{
alpha[i] = alpha2[i] - alpha2[i+l];
sum_alpha += Math.abs(alpha[i]);
}
System.out.print("nu = "+sum_alpha/(param.C*l)+"\n");
}
private static void solve_nu_svr(svm_problem prob, svm_parameter param,
double[] alpha, Solver.SolutionInfo si)
{
int l = prob.l;
double C = param.C;
double[] alpha2 = new double[2*l];
double[] linear_term = new double[2*l];
byte[] y = new byte[2*l];
int i;
double sum = C * param.nu * l / 2;
for(i=0;i<l;i++)
{
alpha2[i] = alpha2[i+l] = Math.min(sum,C);
sum -= alpha2[i];
linear_term[i] = - prob.y[i];
y[i] = 1;
linear_term[i+l] = prob.y[i];
y[i+l] = -1;
}
Solver_NU s = new Solver_NU();
s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
alpha2, C, C, param.eps, si, param.shrinking);
System.out.print("epsilon = "+(-si.r)+"\n");
for(i=0;i<l;i++)
alpha[i] = alpha2[i] - alpha2[i+l];
}
//
// decision_function
//
static class decision_function
{
double[] alpha;
double rho;
};
static decision_function svm_train_one(
svm_problem prob, svm_parameter param,
double Cp, double Cn)
{
double[] alpha = new double[prob.l];
Solver.SolutionInfo si = new Solver.SolutionInfo();
switch(param.svm_type)
{
case svm_parameter.C_SVC:
solve_c_svc(prob,param,alpha,si,Cp,Cn);
break;
case svm_parameter.NU_SVC:
solve_nu_svc(prob,param,alpha,si);
break;
case svm_parameter.ONE_CLASS:
solve_one_class(prob,param,alpha,si);
break;
case svm_parameter.EPSILON_SVR:
solve_epsilon_svr(prob,param,alpha,si);
break;
case svm_parameter.NU_SVR:
solve_nu_svr(prob,param,alpha,si);
break;
}
System.out.print("obj = "+si.obj+", rho = "+si.rho+"\n");
// output SVs
int nSV = 0;
int nBSV = 0;
for(int i=0;i<prob.l;i++)
{
if(Math.abs(alpha[i]) > 0)
{
++nSV;
if(prob.y[i] > 0)
{
if(Math.abs(alpha[i]) >= si.upper_bound_p)
++nBSV;
}
else
{
if(Math.abs(alpha[i]) >= si.upper_bound_n)
++nBSV;
}
}
}
System.out.print("nSV = "+nSV+", nBSV = "+nBSV+"\n");
decision_function f = new decision_function();
f.alpha = alpha;
f.rho = si.rho;
return f;
}
// Platt's binary SVM Probablistic Output: an improvement from Lin et al.
private static void sigmoid_train(int l, double[] dec_values, double[] labels,
double[] probAB)
{
double A, B;
double prior1=0, prior0 = 0;
int i;
for (i=0;i<l;i++)
if (labels[i] > 0) prior1+=1;
else prior0+=1;
int max_iter=100; // Maximal number of iterations
double min_step=1e-10; // Minimal step taken in line search
double sigma=1e-3; // For numerically strict PD of Hessian
double eps=1e-5;
double hiTarget=(prior1+1.0)/(prior1+2.0);
double loTarget=1/(prior0+2.0);
double[] t= new double[l];
double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
double newA,newB,newf,d1,d2;
int iter;
// Initial Point and Initial Fun Value
A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
double fval = 0.0;
for (i=0;i<l;i++)
{
if (labels[i]>0) t[i]=hiTarget;
else t[i]=loTarget;
fApB = dec_values[i]*A+B;
if (fApB>=0)
fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
for (iter=0;iter<max_iter;iter++)
{
// Update Gradient and Hessian (use H' = H + sigma I)
h11=sigma; // numerically ensures strict PD
h22=sigma;
h21=0.0;g1=0.0;g2=0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*A+B;
if (fApB >= 0)
{
p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
q=1.0/(1.0+Math.exp(-fApB));
}
else
{
p=1.0/(1.0+Math.exp(fApB));
q=Math.exp(fApB)/(1.0+Math.exp(fApB));
}
d2=p*q;
h11+=dec_values[i]*dec_values[i]*d2;
h22+=d2;
h21+=dec_values[i]*d2;
d1=t[i]-p;
g1+=dec_values[i]*d1;
g2+=d1;
}
// Stopping Criteria
if (Math.abs(g1)<eps && Math.abs(g2)<eps)
break;
// Finding Newton direction: -inv(H') * g
det=h11*h22-h21*h21;
dA=-(h22*g1 - h21 * g2) / det;
dB=-(-h21*g1+ h11 * g2) / det;
gd=g1*dA+g2*dB;
stepsize = 1; // Line Search
while (stepsize >= min_step)
{
newA = A + stepsize * dA;
newB = B + stepsize * dB;
// New function value
newf = 0.0;
for (i=0;i<l;i++)
{
fApB = dec_values[i]*newA+newB;
if (fApB >= 0)
newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
else
newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
}
// Check sufficient decrease
if (newf<fval+0.0001*stepsize*gd)
{
A=newA;B=newB;fval=newf;
break;
}
else
stepsize = stepsize / 2.0;
}
if (stepsize < min_step)
{
System.err.print("Line search fails in two-class probability estimates\n");
break;
}
}
if (iter>=max_iter)
System.err.print("Reaching maximal iterations in two-class probability estimates\n");
probAB[0]=A;probAB[1]=B;
}
private static double sigmoid_predict(double decision_value, double A, double B)
{
double fApB = decision_value*A+B;
if (fApB >= 0)
return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
else
return 1.0/(1+Math.exp(fApB)) ;
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[][] r, double[] p)
{
int t,j;
int iter = 0, max_iter=100;
double[][] Q=new double[k][k];
double[] Qp= new double[k];
double pQp, eps=0.005/k;
for (t=0;t<k;t++)
{
p[t]=1.0/k; // Valid if k = 1
Q[t][t]=0;
for (j=0;j<t;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=Q[j][t];
}
for (j=t+1;j<k;j++)
{
Q[t][t]+=r[j][t]*r[j][t];
Q[t][j]=-r[j][t]*r[t][j];
}
}
for (iter=0;iter<max_iter;iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp=0;
for (t=0;t<k;t++)
{
Qp[t]=0;
for (j=0;j<k;j++)
Qp[t]+=Q[t][j]*p[j];
pQp+=p[t]*Qp[t];
}
double max_error=0;
for (t=0;t<k;t++)
{
double error=Math.abs(Qp[t]-pQp);
if (error>max_error)
max_error=error;
}
if (max_error<eps) break;
for (t=0;t<k;t++)
{
double diff=(-Qp[t]+pQp)/Q[t][t];
p[t]+=diff;
pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
for (j=0;j<k;j++)
{
Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
p[j]/=(1+diff);
}
}
}
if (iter>=max_iter)
System.err.print("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
{
int i;
int nr_fold = 5;
int[] perm = new int[prob.l];
double[] dec_values = new double[prob.l];
// random shuffle
for(i=0;i<prob.l;i++) perm[i]=i;
for(i=0;i<prob.l;i++)
{
int j = i+(int)(Math.random()*(prob.l-i));
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
int begin = i*prob.l/nr_fold;
int end = (i+1)*prob.l/nr_fold;
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = prob.l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<prob.l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
int p_count=0,n_count=0;
for(j=0;j<k;j++)
if(subprob.y[j]>0)
p_count++;
else
n_count++;
if(p_count==0 && n_count==0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 0;
else if(p_count > 0 && n_count == 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = 1;
else if(p_count == 0 && n_count > 0)
for(j=begin;j<end;j++)
dec_values[perm[j]] = -1;
else
{
svm_parameter subparam = (svm_parameter)param.clone();
subparam.probability=0;
subparam.C=1.0;
subparam.nr_weight=2;
subparam.weight_label = new int[2];
subparam.weight = new double[2];
subparam.weight_label[0]=+1;
subparam.weight_label[1]=-1;
subparam.weight[0]=Cp;
subparam.weight[1]=Cn;
svm_model submodel = svm_train(subprob,subparam);
for(j=begin;j<end;j++)
{
double[] dec_value=new double[1];
svm_predict_values(submodel,prob.x[perm[j]],dec_value);
dec_values[perm[j]]=dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.label[0];
}
}
}
sigmoid_train(prob.l,dec_values,prob.y,probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(svm_problem prob, svm_parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.l];
double mae = 0;
svm_parameter newparam = (svm_parameter)param.clone();
newparam.probability = 0;
svm_cross_validation(prob,newparam,nr_fold,ymv);
for(i=0;i<prob.l;i++)
{
ymv[i]=prob.y[i]-ymv[i];
mae += Math.abs(ymv[i]);
}
mae /= prob.l;
double std=Math.sqrt(2*mae*mae);
int count=0;
mae=0;
for(i=0;i<prob.l;i++)
if (Math.abs(ymv[i]) > 5*std)
count=count+1;
else
mae+=Math.abs(ymv[i]);
mae /= (prob.l-count);
System.err.print("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
return mae;
}
// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
// perm, length l, must be allocated before calling this subroutine
private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] data_label = new int[l];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)(prob.y[i]);
int j;
for(j=0;j<nr_class;j++)
{
if(this_label == label[j])
{
++count[j];
break;
}
}
data_label[i] = j;
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
for(i=0;i<l;i++)
{
perm[start[data_label[i]]] = i;
++start[data_label[i]];
}
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+count[i-1];
nr_class_ret[0] = nr_class;
label_ret[0] = label;
start_ret[0] = start;
count_ret[0] = count;
}
//
// Interface functions
//
public static svm_model svm_train(svm_problem prob, svm_parameter param)
{
svm_model model = new svm_model();
model.param = param;
if(param.svm_type == svm_parameter.ONE_CLASS ||
param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
// regression or one-class-svm
model.nr_class = 2;
model.label = null;
model.nSV = null;
model.probA = null; model.probB = null;
model.sv_coef = new double[1][];
if(param.probability == 1 &&
(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR))
{
model.probA = new double[1];
model.probA[0] = svm_svr_probability(prob,param);
}
decision_function f = svm_train_one(prob,param,0,0);
model.rho = new double[1];
model.rho[0] = f.rho;
int nSV = 0;
int i;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0) ++nSV;
model.l = nSV;
model.SV = new svm_node[nSV][];
model.sv_coef[0] = new double[nSV];
int j = 0;
for(i=0;i<prob.l;i++)
if(Math.abs(f.alpha[i]) > 0)
{
model.SV[j] = prob.x[i];
model.sv_coef[0][j] = f.alpha[i];
++j;
}
}
else
{
// classification
int l = prob.l;
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
int[] perm = new int[l];
// group training data of the same class
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
svm_node[][] x = new svm_node[l][];
int i;
for(i=0;i<l;i++)
x[i] = prob.x[perm[i]];
// calculate weighted C
double[] weighted_C = new double[nr_class];
for(i=0;i<nr_class;i++)
weighted_C[i] = param.C;
for(i=0;i<param.nr_weight;i++)
{
int j;
for(j=0;j<nr_class;j++)
if(param.weight_label[i] == label[j])
break;
if(j == nr_class)
System.err.print("warning: class label "+param.weight_label[i]+" specified in weight is not found\n");
else
weighted_C[j] *= param.weight[i];
}
// train k*(k-1)/2 models
boolean[] nonzero = new boolean[l];
for(i=0;i<l;i++)
nonzero[i] = false;
decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
double[] probA=null,probB=null;
if (param.probability == 1)
{
probA=new double[nr_class*(nr_class-1)/2];
probB=new double[nr_class*(nr_class-1)/2];
}
int p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
svm_problem sub_prob = new svm_problem();
int si = start[i], sj = start[j];
int ci = count[i], cj = count[j];
sub_prob.l = ci+cj;
sub_prob.x = new svm_node[sub_prob.l][];
sub_prob.y = new double[sub_prob.l];
int k;
for(k=0;k<ci;k++)
{
sub_prob.x[k] = x[si+k];
// Preserve label ordering.
if (label[i] < label[j])
sub_prob.y[k] = -1;
else
sub_prob.y[k] = +1;
}
for(k=0;k<cj;k++)
{
sub_prob.x[ci+k] = x[sj+k];
// Preserve label ordering.
if (label[j] < label[i])
sub_prob.y[ci+k] = -1;
else
sub_prob.y[ci+k] = +1;
}
if(param.probability == 1)
{
double[] probAB=new double[2];
svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
probA[p]=probAB[0];
probB[p]=probAB[1];
}
f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
for(k=0;k<ci;k++)
if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
nonzero[si+k] = true;
for(k=0;k<cj;k++)
if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
nonzero[sj+k] = true;
++p;
}
// build output
model.nr_class = nr_class;
model.label = new int[nr_class];
for(i=0;i<nr_class;i++)
model.label[i] = label[i];
model.rho = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
model.rho[i] = f[i].rho;
if(param.probability == 1)
{
model.probA = new double[nr_class*(nr_class-1)/2];
model.probB = new double[nr_class*(nr_class-1)/2];
for(i=0;i<nr_class*(nr_class-1)/2;i++)
{
model.probA[i] = probA[i];
model.probB[i] = probB[i];
}
}
else
{
model.probA=null;
model.probB=null;
}
int nnz = 0;
int[] nz_count = new int[nr_class];
model.nSV = new int[nr_class];
for(i=0;i<nr_class;i++)
{
int nSV = 0;
for(int j=0;j<count[i];j++)
if(nonzero[start[i]+j])
{
++nSV;
++nnz;
}
model.nSV[i] = nSV;
nz_count[i] = nSV;
}
System.out.print("Total nSV = "+nnz+"\n");
model.l = nnz;
model.SV = new svm_node[nnz][];
p = 0;
for(i=0;i<l;i++)
if(nonzero[i]) model.SV[p++] = x[i];
int[] nz_start = new int[nr_class];
nz_start[0] = 0;
for(i=1;i<nr_class;i++)
nz_start[i] = nz_start[i-1]+nz_count[i-1];
model.sv_coef = new double[nr_class-1][];
for(i=0;i<nr_class-1;i++)
model.sv_coef[i] = new double[nnz];
p = 0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
// classifier (i,j): coefficients with
// i are in sv_coef[j-1][nz_start[i]...],
// j are in sv_coef[i][nz_start[j]...]
int si = start[i];
int sj = start[j];
int ci = count[i];
int cj = count[j];
int q = nz_start[i];
int k;
for(k=0;k<ci;k++)
if(nonzero[si+k])
model.sv_coef[j-1][q++] = f[p].alpha[k];
q = nz_start[j];
for(k=0;k<cj;k++)
if(nonzero[sj+k])
model.sv_coef[i][q++] = f[p].alpha[ci+k];
++p;
}
}
return model;
}
// Stratified cross validation
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
{
int i;
int[] fold_start = new int[nr_fold+1];
int l = prob.l;
int[] perm = new int[l];
if(param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC)
{
int[] tmp_nr_class = new int[1];
int[][] tmp_label = new int[1][];
int[][] tmp_start = new int[1][];
int[][] tmp_count = new int[1][];
svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
int nr_class = tmp_nr_class[0];
int[] label = tmp_label[0];
int[] start = tmp_start[0];
int[] count = tmp_count[0];
// random shuffle and then data grouped by fold using the array perm
int[] fold_count = new int[nr_fold];
int c;
int[] index = new int[l];
for(i=0;i<l;i++)
index[i]=perm[i];
for (c=0; c<nr_class; c++)
for(i=0;i<count[c];i++)
{
int j = i+(int)(Math.random()*(count[c]-i));
do {int _=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=_;} while(false);
}
for(i=0;i<nr_fold;i++)
{
fold_count[i] = 0;
for (c=0; c<nr_class;c++)
fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
for (c=0; c<nr_class;c++)
for(i=0;i<nr_fold;i++)
{
int begin = start[c]+i*count[c]/nr_fold;
int end = start[c]+(i+1)*count[c]/nr_fold;
for(int j=begin;j<end;j++)
{
perm[fold_start[i]] = index[j];
fold_start[i]++;
}
}
fold_start[0]=0;
for (i=1;i<=nr_fold;i++)
fold_start[i] = fold_start[i-1]+fold_count[i-1];
}
else
{
for(i=0;i<l;i++) perm[i]=i;
for(i=0;i<l;i++)
{
int j = i+(int)(Math.random()*(l-i));
do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
}
for(i=0;i<=nr_fold;i++)
fold_start[i]=i*l/nr_fold;
}
for(i=0;i<nr_fold;i++)
{
int begin = fold_start[i];
int end = fold_start[i+1];
int j,k;
svm_problem subprob = new svm_problem();
subprob.l = l-(end-begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k=0;
for(j=0;j<begin;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for(j=end;j<l;j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
svm_model submodel = svm_train(subprob,param);
if(param.probability==1 &&
(param.svm_type == svm_parameter.C_SVC ||
param.svm_type == svm_parameter.NU_SVC))
{
double[] prob_estimates= new double[svm_get_nr_class(submodel)];
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
}
else
for(j=begin;j<end;j++)
target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
}
}
public static int svm_get_svm_type(svm_model model)
{
return model.param.svm_type;
}
public static int svm_get_nr_class(svm_model model)
{
return model.nr_class;
}
public static void svm_get_labels(svm_model model, int[] label)
{
if (model.label != null)
for(int i=0;i<model.nr_class;i++)
label[i] = model.label[i];
}
public static double svm_get_svr_probability(svm_model model)
{
if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null)
return model.probA[0];
else
{
System.err.print("Model doesn't contain information for SVR probability inference\n");
return 0;
}
}
public static void svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
{
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
{
double[] sv_coef = model.sv_coef[0];
double sum = 0;
for(int i=0;i<model.l;i++)
sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
sum -= model.rho[0];
dec_values[0] = sum;
}
else
{
int i;
int nr_class = model.nr_class;
int l = model.l;
double[] kvalue = new double[l];
for(i=0;i<l;i++)
kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
int[] start = new int[nr_class];
start[0] = 0;
for(i=1;i<nr_class;i++)
start[i] = start[i-1]+model.nSV[i-1];
int p=0;
int pos=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
double sum = 0;
int si = start[i];
int sj = start[j];
int ci = model.nSV[i];
int cj = model.nSV[j];
int k;
double[] coef1 = model.sv_coef[j-1];
double[] coef2 = model.sv_coef[i];
for(k=0;k<ci;k++)
sum += coef1[si+k] * kvalue[si+k];
for(k=0;k<cj;k++)
sum += coef2[sj+k] * kvalue[sj+k];
sum -= model.rho[p++];
dec_values[pos++] = sum;
}
}
}
public static double svm_predict(svm_model model, svm_node[] x)
{
if(model.param.svm_type == svm_parameter.ONE_CLASS ||
model.param.svm_type == svm_parameter.EPSILON_SVR ||
model.param.svm_type == svm_parameter.NU_SVR)
{
double[] res = new double[1];
svm_predict_values(model, x, res);
if(model.param.svm_type == svm_parameter.ONE_CLASS)
return (res[0]>0)?1:-1;
else
return res[0];
}
else
{
int i;
int nr_class = model.nr_class;
double[] dec_values = new double[nr_class*(nr_class-1)/2];
svm_predict_values(model, x, dec_values);
int[] vote = new int[nr_class];
for(i=0;i<nr_class;i++)
vote[i] = 0;
int pos=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
if(dec_values[pos++] > 0)
++vote[i];
else
++vote[j];
}
int vote_max_idx = 0;
for(i=1;i<nr_class;i++)
if(vote[i] > vote[vote_max_idx])
vote_max_idx = i;
return model.label[vote_max_idx];
}
}
public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
{
if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null)
{
int i;
int nr_class = model.nr_class;
double[] dec_values = new double[nr_class*(nr_class-1)/2];
svm_predict_values(model, x, dec_values);
double min_prob=1e-7;
double[][] pairwise_prob=new double[nr_class][nr_class];
int k=0;
for(i=0;i<nr_class;i++)
for(int j=i+1;j<nr_class;j++)
{
pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
pairwise_prob[j][i]=1-pairwise_prob[i][j];
k++;
}
multiclass_probability(nr_class,pairwise_prob,prob_estimates);
int prob_max_idx = 0;
for(i=1;i<nr_class;i++)
if(prob_estimates[i] > prob_estimates[prob_max_idx])
prob_max_idx = i;
return model.label[prob_max_idx];
}
else
return svm_predict(model, x);
}
static final String svm_type_table[] =
{
"c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
};
static final String kernel_type_table[]=
{
"linear","polynomial","rbf","sigmoid", "custom",
};
public static void svm_save_model(String model_file_name, svm_model model) throws IOException
{
DataOutputStream fp = new DataOutputStream(new FileOutputStream(model_file_name));
svm_parameter param = model.param;
fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
if(param.kernel_type == svm_parameter.POLY)
fp.writeBytes("degree "+param.degree+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.RBF ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("gamma "+param.gamma+"\n");
if(param.kernel_type == svm_parameter.POLY ||
param.kernel_type == svm_parameter.SIGMOID)
fp.writeBytes("coef0 "+param.coef0+"\n");
int nr_class = model.nr_class;
int l = model.l;
fp.writeBytes("nr_class "+nr_class+"\n");
fp.writeBytes("total_sv "+l+"\n");
{
fp.writeBytes("rho");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.rho[i]);
fp.writeBytes("\n");
}
if(model.label != null)
{
fp.writeBytes("label");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.label[i]);
fp.writeBytes("\n");
}
if(model.probA != null) // regression has probA only
{
fp.writeBytes("probA");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probA[i]);
fp.writeBytes("\n");
}
if(model.probB != null)
{
fp.writeBytes("probB");
for(int i=0;i<nr_class*(nr_class-1)/2;i++)
fp.writeBytes(" "+model.probB[i]);
fp.writeBytes("\n");
}
if(model.nSV != null)
{
fp.writeBytes("nr_sv");
for(int i=0;i<nr_class;i++)
fp.writeBytes(" "+model.nSV[i]);
fp.writeBytes("\n");
}
fp.writeBytes("SV\n");
double[][] sv_coef = model.sv_coef;
svm_node[][] SV = model.SV;
for(int i=0;i<l;i++)
{
for(int j=0;j<nr_class-1;j++)
fp.writeBytes(sv_coef[j][i]+" ");
svm_node[] p = SV[i];
for (int j = 0; j < p.length; j++)
p[j].write(fp); // Razvan
}
fp.close();
}
public static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
public static int atoi(String s)
{
return Integer.parseInt(s);
}
public static svm_model svm_load_model(String model_file_name)
throws IOException
{
BufferedReader fp = new BufferedReader(new FileReader(model_file_name));
// read parameters
svm_model model = new svm_model();
svm_parameter param = new svm_parameter();
model.param = param;
model.rho = null;
model.probA = null;
model.probB = null;
model.label = null;
model.nSV = null;
while(true)
{
String cmd = fp.readLine();
String arg = cmd.substring(cmd.indexOf(' ')+1);
if(cmd.startsWith("svm_type"))
{
int i;
for(i=0;i<svm_type_table.length;i++)
{
if(arg.indexOf(svm_type_table[i])!=-1)
{
param.svm_type=i;
break;
}
}
if(i == svm_type_table.length)
{
System.err.print("unknown svm type.\n");
return null;
}
}
else if(cmd.startsWith("kernel_type"))
{
int i;
for(i=0;i<kernel_type_table.length;i++)
{
if(arg.indexOf(kernel_type_table[i])!=-1)
{
param.kernel_type=i;
break;
}
}
if(i == kernel_type_table.length)
{
System.err.print("unknown kernel function.\n");
return null;
}
}
else if(cmd.startsWith("degree"))
param.degree = atof(arg);
else if(cmd.startsWith("gamma"))
param.gamma = atof(arg);
else if(cmd.startsWith("coef0"))
param.coef0 = atof(arg);
else if(cmd.startsWith("nr_class"))
model.nr_class = atoi(arg);
else if(cmd.startsWith("total_sv"))
model.l = atoi(arg);
else if(cmd.startsWith("rho"))
{
int n = model.nr_class * (model.nr_class-1)/2;
model.rho = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.rho[i] = atof(st.nextToken());
}
else if(cmd.startsWith("label"))
{
int n = model.nr_class;
model.label = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.label[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("probA"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probA = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probA[i] = atof(st.nextToken());
}
else if(cmd.startsWith("probB"))
{
int n = model.nr_class*(model.nr_class-1)/2;
model.probB = new double[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.probB[i] = atof(st.nextToken());
}
else if(cmd.startsWith("nr_sv"))
{
int n = model.nr_class;
model.nSV = new int[n];
StringTokenizer st = new StringTokenizer(arg);
for(int i=0;i<n;i++)
model.nSV[i] = atoi(st.nextToken());
}
else if(cmd.startsWith("SV"))
{
break;
}
else
{
System.err.print("unknown text in model file\n");
return null;
}
}
// read sv_coef and SV
int m = model.nr_class - 1;
int l = model.l;
model.sv_coef = new double[m][l];
// Razvan begin
if (model.param.kernel_type == svm_parameter.CUSTOM) {
model.SV = new svm_node[l][];
for (int i = 0; i < l; i++) {
String line = fp.readLine();
model.SV[i] = new svm_node[1];
model.SV[i][0] = m_ck.new_svm_node();
model.SV[i][0].read(line, model.sv_coef, m, i);
}
}
else {
model.SV = new svm_node[l][];
for (int i = 0; i < l; i++) {
String line = fp.readLine();
StringTokenizer st =
new StringTokenizer(line," \t\n\r\f:");
for(int k=0;k<m;k++)
model.sv_coef[k][i] = atof(st.nextToken());
int n = st.countTokens()/2;
model.SV[i] = new svm_node[n];
for(int j=0;j<n;j++) {
model.SV[i][j] = new svm_node();
model.SV[i][j].index = atoi(st.nextToken());
model.SV[i][j].value = atof(st.nextToken());
}
}
}
// Razvan end
fp.close();
return model;
}
public static String svm_check_parameter(svm_problem prob, svm_parameter param)
{
// svm_type
int svm_type = param.svm_type;
if(svm_type != svm_parameter.C_SVC &&
svm_type != svm_parameter.NU_SVC &&
svm_type != svm_parameter.ONE_CLASS &&
svm_type != svm_parameter.EPSILON_SVR &&
svm_type != svm_parameter.NU_SVR)
return "unknown svm type";
// kernel_type
int kernel_type = param.kernel_type;
if(kernel_type != svm_parameter.LINEAR &&
kernel_type != svm_parameter.POLY &&
kernel_type != svm_parameter.RBF &&
kernel_type != svm_parameter.SIGMOID &&
kernel_type != svm_parameter.CUSTOM)
return "unknown kernel type";
// cache_size,eps,C,nu,p,shrinking
if(param.cache_size <= 0)
return "cache_size <= 0";
if(param.eps <= 0)
return "eps <= 0";
if(svm_type == svm_parameter.C_SVC ||
svm_type == svm_parameter.EPSILON_SVR ||
svm_type == svm_parameter.NU_SVR)
if(param.C <= 0)
return "C <= 0";
if(svm_type == svm_parameter.NU_SVC ||
svm_type == svm_parameter.ONE_CLASS ||
svm_type == svm_parameter.NU_SVR)
if(param.nu < 0 || param.nu > 1)
return "nu < 0 or nu > 1";
if(svm_type == svm_parameter.EPSILON_SVR)
if(param.p < 0)
return "p < 0";
if(param.shrinking != 0 &&
param.shrinking != 1)
return "shrinking != 0 and shrinking != 1";
if(param.probability != 0 &&
param.probability != 1)
return "probability != 0 and probability != 1";
if(param.probability == 1 &&
svm_type == svm_parameter.ONE_CLASS)
return "one-class SVM probability output not supported yet";
// check whether nu-svc is feasible
if(svm_type == svm_parameter.NU_SVC)
{
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int i;
for(i=0;i<l;i++)
{
int this_label = (int)prob.y[i];
int j;
for(j=0;j<nr_class;j++)
if(this_label == label[j])
{
++count[j];
break;
}
if(j == nr_class)
{
if(nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
System.arraycopy(label,0,new_data,0,label.length);
label = new_data;
new_data = new int[max_nr_class];
System.arraycopy(count,0,new_data,0,count.length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
for(i=0;i<nr_class;i++)
{
int n1 = count[i];
for(int j=i+1;j<nr_class;j++)
{
int n2 = count[j];
if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
return "specified nu is infeasible";
}
}
}
return null;
}
public static int svm_check_probability_model(svm_model model)
{
if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
model.probA!=null && model.probB!=null) ||
((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
model.probA!=null))
return 1;
else
return 0;
}
}
| 58,749 | 21.262221 | 150 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/svm_model.java | //
// svm_model
//
package libsvm;
public class svm_model implements java.io.Serializable
{
svm_parameter param; // parameter
int nr_class; // number of classes, = 2 in regression/one class svm
int l; // total #SV
public svm_node[][] SV; // SVs (SV[l]) // Razvan
double[][] sv_coef; // coefficients for SVs in decision functions (sv_coef[n-1][l])
double[] rho; // constants in decision functions (rho[n*(n-1)/2])
double[] probA; // pariwise probability information
double[] probB;
// for classification only
int[] label; // label of each class (label[n])
int[] nSV; // number of SVs for each class (nSV[n])
// nSV[0] + nSV[1] + ... + nSV[n-1] = l
};
| 681 | 30 | 84 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/svm_node.java | package libsvm;
import java.io.*;
public class svm_node implements java.io.Serializable
{
public int index;
public double value;
public void read(String line, double[][] coef, int m, int index)
{
}
public void write(DataOutputStream fp) throws IOException
{
fp.writeBytes(index + ":" + value + " ");
}
}
| 330 | 16.421053 | 66 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/svm_parameter.java | package libsvm;
public class svm_parameter implements Cloneable,java.io.Serializable
{
/* svm_type */
public static final int C_SVC = 0;
public static final int NU_SVC = 1;
public static final int ONE_CLASS = 2;
public static final int EPSILON_SVR = 3;
public static final int NU_SVR = 4;
/* kernel_type */
public static final int LINEAR = 0;
public static final int POLY = 1;
public static final int RBF = 2;
public static final int SIGMOID = 3;
public static final int CUSTOM = 4;
public int svm_type;
public int kernel_type;
public double degree; // for poly
public double gamma; // for poly/rbf/sigmoid
public double coef0; // for poly/sigmoid
// these are for training only
public double cache_size; // in MB
public double eps; // stopping criteria
public double C; // for C_SVC, EPSILON_SVR and NU_SVR
public int nr_weight; // for C_SVC
public int[] weight_label; // for C_SVC
public double[] weight; // for C_SVC
public double nu; // for NU_SVC, ONE_CLASS, and NU_SVR
public double p; // for EPSILON_SVR
public int shrinking; // use the shrinking heuristics
public int probability; // do probability estimates
public Object clone()
{
try
{
return super.clone();
} catch (CloneNotSupportedException e)
{
return null;
}
}
}
| 1,293 | 25.958333 | 68 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/libsvm/svm_problem.java | package libsvm;
public class svm_problem implements java.io.Serializable
{
public int l;
public double[] y;
public svm_node[][] x;
}
| 136 | 16.125 | 56 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/ssk/FeatureDictionary.java | package ssk;
import java.util.*;
/**
* Class FeatureDictionary. A dictionary (i.e set of unique features) is
* created for each feature type.
*
* Current feature types:
* - words;
* - POS tags;
*
* Other possible feature types:
* - phrase tags;
* - entity types;
* - WordNet synsets;
*
* @author Razvan Bunescu
*/
public class FeatureDictionary {
static public int FEAT_TYPES = 2;
static public int FEAT_WORD = 0;
static public int FEAT_POS = 1;
public HashMap<String, String>[] m_features;
public FeatureDictionary()
{
m_features = (HashMap<String, String>[]) new HashMap[FEAT_TYPES];
for (int i = 0; i < m_features.length; i++)
m_features[i] = new HashMap<String, String>();
}
public String getAddFeature(int nType, String strFeature)
{
String strUnique = m_features[nType].get(strFeature);
if (strUnique == null) {
m_features[nType].put(strFeature, strFeature);
strUnique = strFeature;
}
return strUnique;
}
}
| 1,001 | 19.04 | 73 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/ssk/InstanceExample.java | package ssk;
import java.io.*;
import java.util.*;
/**
* Class InstanceExample represents a training/testing example as an array
* of feature sets (here, a feature set contains a word and its POS tag).
*
* @author Razvan Bunescu
*/
public class InstanceExample implements java.io.Serializable {
int label_;
String[][] sequence_;
public InstanceExample(String text, FeatureDictionary fd)
{
Vector<String[]> sequence = new Vector<String[]>();
StringTokenizer st = new StringTokenizer(text);
while (st.hasMoreTokens()) {
String word_tag = st.nextToken();
// Features are separated by '/'.
int separator = word_tag.lastIndexOf('/');
assert separator != -1;
String word = word_tag.substring(0, separator);
String tag = word_tag.substring(separator + 1);
// Use object with the same value from the dictionary.
// If no such object, add new feature to the dictionary.
String[] features = new String[FeatureDictionary.FEAT_TYPES];
features[FeatureDictionary.FEAT_WORD] =
fd.getAddFeature(FeatureDictionary.FEAT_WORD, word);
features[FeatureDictionary.FEAT_POS] =
fd.getAddFeature(FeatureDictionary.FEAT_POS, tag);
sequence.add(features);
}
sequence_ = sequence.toArray(new String[0][]);
}
public void setLabel(int label)
{
label_ = label;
}
public int getLabel()
{
return label_;
}
public String toString()
{
String result = "";
for (int i = 0; i < sequence_.length; i++) {
result +=
sequence_[i][FeatureDictionary.FEAT_WORD] +
"/" +
sequence_[i][FeatureDictionary.FEAT_POS] +
" ";
}
return result;
}
public void write(DataOutputStream fp) throws IOException
{
fp.writeBytes(toString());
fp.writeBytes("\n");
}
}
| 1,806 | 21.308642 | 74 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/ssk/SubsequenceKernel.java | package ssk;
import libsvm.*;
import java.util.*;
import java.util.Vector;
import java.io.File;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
/**
* Generalized subsequence kernel implementation.
*
* @author Razvan Bunescu
*/
public class SubsequenceKernel extends CustomKernel
{
static final int DEFAULT_MAXLEN = 4;
static final double DEFAULT_LAMBDA = 0.75;
static final boolean DEFAULT_CACHE = true;
static final boolean DEFAULT_NORM = true;
// maximum length of common subsequences
int m_maxlen;
// gap penalty
double m_lambda;
// true if self kernels are cached
boolean m_bCache;
// true if kernels are normalized
boolean m_bNorm;
protected HashMap m_mapStoK;
public SubsequenceKernel(int maxlen, double lambda,
boolean bCache, boolean bNorm)
{
m_maxlen = maxlen;
m_lambda = lambda;
m_bCache = bCache;
m_bNorm = bNorm;
m_mapStoK = new HashMap();
}
public SubsequenceKernel()
{
// Default values.
m_maxlen = DEFAULT_MAXLEN;
m_lambda = DEFAULT_LAMBDA;
m_bCache = DEFAULT_CACHE;
m_bNorm = DEFAULT_NORM;
m_mapStoK = new HashMap();
}
/**
* Computes the (normalized) subsequence kernel between two sequences.
*
* @param ie1 sequence instance 1
* @param ie2 sequence instance 2
* @return kernel value.
*/
public double kernel(InstanceExample ie1, InstanceExample ie2)
{
if (m_bNorm) {
double k1 = selfKernel(ie1.sequence_);
double k2 = selfKernel(ie2.sequence_);
double k = singleKernel(ie1.sequence_, ie2.sequence_);
if (k == 0)
return 0;
assert k1 != 0;
assert k2 != 0;
// normalize
return k / Math.sqrt (k1 * k2);
}
// don't normalize
return singleKernel(ie1.sequence_, ie2.sequence_);
}
/**
* Kernel method, with prototype specified by CustomKernel.
*
* @param x1 first instances.
* @param x2 second instance.
* @return kernel value.
*/
public double kernel(svm_node[] x1, svm_node[] x2)
{
InstanceExample ie1 = ((intex_node) x1[0]).m_value;
InstanceExample ie2 = ((intex_node) x2[0]).m_value;
return kernel(ie1, ie2);
}
public svm_node new_svm_node()
{
return new intex_node();
}
public double selfKernel(String[][] s)
{
if (m_bCache) {
// get cached value
Double dblk = (Double) m_mapStoK.get(s);
if (dblk == null) {
double k = singleKernel(s, s);
m_mapStoK.put(s, new Double(k));
return k;
}
return dblk.doubleValue();
}
return singleKernel(s, s);
}
public double singleKernel(String[][] s1, String[][] s2)
{
double[] sk = stringKernel(s1, s2, m_maxlen, m_lambda);
double result = 0.0;
for (int i = 0; i < sk.length; i++)
result += sk[i];
return result;
}
/**
* Computes the number of common subsequences between two sequences.
*
* @param s first sequence of features.
* @param t second sequence of features.
* @param n maximum subsequence length.
* @param lambda gap penalty.
* @return kernel value K[], one position for every length up to n.
*
* The algorithm corresponds to the recursive computation from Figure 1
* in the paper "Subsequence Kernels for Relation Extraction" (NIPS 2005),
* where:
* - K stands for K;
* - Kp stands for K';
* - Kpp stands for K'';
* - common stands for c;
*/
protected double[] stringKernel(String[][] s, String[][] t,
int n, double lambda)
{
int sl = s.length;
int tl = t.length;
double[][][] Kp = new double[n + 1][sl][tl];
for (int j = 0; j < sl; j++)
for (int k = 0; k < tl; k++)
Kp[0][j][k] = 1;
for (int i = 0; i < n; i++) {
for (int j = 0; j < sl - 1; j++) {
double Kpp = 0.0;
for (int k = 0; k < tl - 1; k++) {
Kpp = lambda * (Kpp + lambda * common(s[j], t[k]) * Kp[i][j][k]);
Kp[i + 1][j + 1][k + 1] = lambda * Kp[i + 1][j][k + 1] + Kpp;
}
}
}
double[] K = new double[n];
for (int l = 0; l < K.length; l++) {
K[l] = 0.0;
for (int j = 0; j < sl; j++) {
for (int k = 0; k < tl; k++)
K[l] += lambda * lambda * common(s[j], t[k]) * Kp[l][j][k];
}
}
return K;
}
/**
* Computes the number of common features between two sets of featurses.
*
* @param s first set of features.
* @param t second set of features.
* @return number of common features.
*
* The use of FeatureDictionary ensures that identical features correspond
* to the same object reference. Hence, the operator '==' can be used to
* speed-up the computation.
*/
protected int common(String[] s, String[] t)
{
assert s.length == t.length;
int nCount = 0;
for (int i = 0; i < s.length; i++)
if (s[i] != null && s[i] == t[i])
nCount++;
return nCount;
}
public static void main (String[] args)
{
ArrayList<String> listb = new ArrayList<>();
ArrayList<String> listi = new ArrayList<>();
File filenameb = new File("base.txt");
try
{
InputStreamReader reader = new InputStreamReader(new FileInputStream(filenameb));
BufferedReader br = new BufferedReader(reader);
String line = null;
while ((line = br.readLine()) != null)
{
listb.add(line);
}
}
catch(IOException e)
{
}
File filenamei = new File("infer.txt");
try
{
InputStreamReader reader = new InputStreamReader(new FileInputStream(filenamei));
BufferedReader br = new BufferedReader(reader);
String line = null;
while ((line = br.readLine()) != null)
{
listi.add(line);
}
}
catch(IOException e)
{
}
System.out.println(listi.size());
System.out.println(listb.size());
File writename = new File("out.txt");
try
{
writename.createNewFile();
BufferedWriter out = new BufferedWriter(new FileWriter(writename));
out.write(listi.size() + " " + listb.size() + "\n");
String texti, textj;
for (int i = 0; i != listi.size(); i++)
{
System.out.println(i);
texti = (String)listi.get(i);
for (int j = 0; j != listb.size(); j++)
{
textj = (String)listb.get(j);
//System.out.println(text);
FeatureDictionary fd = new FeatureDictionary();
InstanceExample ie1 = new InstanceExample(texti, fd);
InstanceExample ie2 = new InstanceExample(textj, fd);
SubsequenceKernel rk = new SubsequenceKernel(2, 1.0, false, false);
out.write(rk.kernel(ie1, ie2) + " ");
out.flush();
}
out.write("\n");
//System.out.println("\n");
}
out.close();
}
catch(IOException e)
{
}
/*FeatureDictionary fd = new FeatureDictionary();
String text1 = "Zori/NNP runs/VBZ after/IN every/DT rabbit/NN ./.";
InstanceExample ie1 = new InstanceExample(text1, fd);
String text2 = "Zori/NNP walks/VBZ the/DT dog/NN every/DT morning/NN ./.";
InstanceExample ie2 = new InstanceExample(text2, fd);
// Maximum subsequence length is 1, gap penalty is 1 (i.e. no penalty),
// no cache, no normalization.
// Should return the number of matching words or tags => 10.
// Obs: Matchings at different positions are counted as different.
// Examples: Zori, after, every, ., NNP, VBZ, DT, ...
SubsequenceKernel rk = new SubsequenceKernel(1, 1.0, false, false);
System.out.println(rk.kernel(ie1, ie2));
// Maximum subsequence length is 2, gap penalty is 1 (i.e. no penalty),
// no cache, no normalization.
// Should return the number of matching subsequences of words and POS tags
// of length up to 2 => 47.
// Examples: all of the above, plus 'Zori ... every', 'Zori ... DT',
// 'Zori ... DT', Zori ... NN', 'VBZ ... NN', ...
rk = new SubsequenceKernel(2, 1.0, false, false);
System.out.println(rk.kernel(ie1, ie2));
// Just for sanity check, associate unique POS tag to each word,
// so the kernel returns only number of common subsequences of words.
text1 = "Zori/T1 runs/T2 after/T3 every/T4 rabbit/T5 ./T6";
ie1 = new InstanceExample(text1, fd);
text2 = "Zori/U1 walks/U2 the/U3 dog/U4 every/U5 morning/U6 ./U7";
ie2 = new InstanceExample(text2, fd);
// Returns number of common words => 3.
// Examples: 'Zori', 'every', '.'.
rk = new SubsequenceKernel(1, 1.0, false, false);
System.out.println(rk.kernel(ie1, ie2));
// Returns number of common subsequences of words of length up to 2 => 6.
// Examples: all three above, plus 'Zori ... every', 'Zori ... .', 'after
// ... .'
rk = new SubsequenceKernel(2, 1.0, false, false);
System.out.println(rk.kernel(ie1, ie2));*/
}
}
| 9,305 | 25.818444 | 89 | java |
USC-DS-RelationExtraction | USC-DS-RelationExtraction-master/code/Model/seq-kernel/ssk_core/ssk/intex_node.java | package ssk;
import libsvm.*;
import java.io.*;
import java.util.*;
/**
* Class intex_node extends svm_node to allow for a custom representation of
* training/testing instances.
*
* @author Razvan Bunescu
*/
public class intex_node extends svm_node {
static public FeatureDictionary m_fd;
static {
m_fd = new FeatureDictionary();
}
public InstanceExample m_value;
public void read(String line, double[][] coef, int m, int index)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f");
// Read SV coefficients.
for (int k = 0; k < m; k++)
coef[k][index] = svm.atof(st.nextToken());
// Put remaining tokens in 'text'.
String text = "";
while (st.hasMoreTokens())
text += st.nextToken() + " ";
// Create instance example from 'text'.
m_value = new InstanceExample(text, m_fd);
}
public void write(DataOutputStream fp) throws IOException
{
m_value.write(fp);
}
}
| 957 | 19.382979 | 76 | java |
null | RevTerm-main/code/C_to_prog/src/C_to_prog.java | import java.io.File;
import java.io.FileWriter;
import java.util.Scanner;
import java.util.Vector;
public class C_to_prog
{
public static void main(String[] args) throws Exception
{
String input=args[0];
String output=args[1];
Convert(input,output);
}
public static void Convert(String input,String output) throws Exception
{
File file=new File(input);
Scanner in=new Scanner(file);
String program="";
boolean comment=false;
Vector <String> scope=new Vector<>();
int cnt=0;
scope.add("main");
while(in.hasNext())
{
String s = in.nextLine();
for(int i=0;i<s.length();i++)
if(s.charAt(i)!=' ' && s.charAt(i)!='\t')
{
s=s.substring(i,s.length());
break;
}
s = s.replace("true","1>=0");
s = s.replace("false","1<=0");
s = s.replace("&&"," and ");
s = s.replace("||"," or ");
s = s.replace("(", " ");
s = s.replace(")", " ");
s = s.replace(";", ";\n");
s = s.replace("__VERIFIER_nondet_int", "_NONDET_");
//System.err.println(s);
for (int i = 0; i < s.length(); i++)
{
if (i + 2 <= s.length() && s.substring(i, i + 2).equals("/*"))
{
comment = true;
continue;
}
else if (i + 2 <= s.length() && s.substring(i, i + 2).equals("*/"))
{
comment = false;
i++;
continue;
}
else if (s.contains("typedef") || s.contains("extern") || s.contains("int") || s.contains("return") || (i + 2 <= s.length() && s.substring(i, i + 2).equals("//")))
break;
if (comment)
continue;
if(s.charAt(i)==';')
cnt++;
if (i + 2 <= s.length() && s.substring(i, i + 2).equals("if") && !Character.isAlphabetic(s.charAt(i+2)) && (i==0 || !Character.isAlphabetic(s.charAt(i-1))))
{
scope.add("if");
program += "if ";
i++;
continue;
}
else if (i + 4 <= s.length() && s.substring(i, i + 4).equals("else"))
{
program = program.substring(0, program.length() - 15) + "\nelse";
scope.add("else");
i += 3;
}
else if (i + 5 <= s.length() && s.substring(i, i + 5).equals("while"))
{
scope.add("while");
program += "while ";
i += 4;
}
else if (s.charAt(i) == '{')
{
//System.err.println(program);
if (scope.lastElement().equals("while"))
program += " do ";
else if (scope.lastElement().equals("if"))
program += " then ";
cnt=0;
continue;
}
else if (s.charAt(i) == '}')
{
while(program.endsWith(" ") || program.endsWith("\n") || program.endsWith("\t") || program.endsWith(";"))
program = program.substring(0, program.length() - 1);
program+="\n";
// System.err.println("------------------");
// System.err.println(program);
// System.err.println("------------------");
String last = scope.lastElement();
scope.removeElementAt(scope.size() - 1);
if(cnt==0)
program+="skip\n";
if (last.equals("if"))
program += "else\nskip\nfi;\n";
else if (last.equals("else"))
program += "fi;\n";
else if (last.equals("while"))
program += "od;\n";
cnt=1;
}
else if (s.charAt(i) == '=')
{
char c = s.charAt(i - 1),t=s.charAt(i+1);
if (c != '!' && c != '<' && c != '>' && c != '=' && t!='=')
program += ":=";
else
program += "=";
}
else
program += s.charAt(i);
}
if (!program.equals("") && program.charAt(program.length() - 1) != '\n')
program += "\n";
}
while(program.endsWith("\n") || program.endsWith(" ") || program.endsWith("\t") || program.endsWith(";"))
program=program.substring(0,program.length()-1);
FileWriter fw=new FileWriter(output);
fw.write(program);
fw.close();
}
}
| 5,115 | 37.179104 | 179 | java |
null | RevTerm-main/code/linear/part1/Main/src/CFGNode.java | import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;
public class CFGNode
{
public static Vector<CFGNode> allCFGNodes = new Vector<>();
public static Map<Integer, CFGNode> idToNode = new TreeMap<>();
public static int greatestNodeIndex = 0;
public Vector<Transition> out;
int id;
boolean visited;
boolean isCutPoint;
Vector<QuadraticPredicate> inv;
CFGNode(int ind)
{
id = ind;
idToNode.put(ind, this);
out = new Vector<>();
allCFGNodes.add(this);
inv = new Vector<>();
visited = isCutPoint =false;
if (ind > greatestNodeIndex)
greatestNodeIndex = ind;
}
void addNecessaryNondet()
{
Vector<Vector<Transition>> groups = new Vector<>();
for (Transition t : out)
if (!t.hasGroup)
{
Vector<Transition> g = new Vector<>();
for (Transition tp : out)
if (t.detGuard.equalsLogic(tp.detGuard))
{
tp.hasGroup = true;
g.add(tp);
}
if (g.size() == 1)
t.hasGroup = false;
else
groups.add(g);
}
for (int i = 0; i < out.size(); i++)
if (out.elementAt(i).hasGroup)
{
Transition.allTransitions.removeElement(out.elementAt(i));
out.removeElementAt(i);
i--;
}
for (Vector<Transition> g : groups)
{
// System.err.println("----------------");
// for (Transition tau : g)
// System.err.println("transition from " + tau.v.id + " to: " + tau.u.id);
// System.err.println("----------------");
LinearPredicate commonGuard = g.firstElement().detGuard.deepCopy();
CFGNode n = new CFGNode(greatestNodeIndex + 1);
String nontdetTmp = "_tmp_";
Parser.allVars.add("_tmp_");
for (int i = 0; i < g.size(); i++)
{
Transition t = new Transition(n, g.elementAt(i).u);
t.varName = g.elementAt(i).varName;
t.update = g.elementAt(i).update;
t.nondetGuard = g.elementAt(i).nondetGuard;
LinearCombination lower = new LinearCombination("_tmp_", Rational.one);
lower.add("1", new Rational(-i, 1)); // _tmp_ >= i
LinearCombination upper = new LinearCombination("_tmp_", Rational.negate(Rational.one));
upper.add("1", new Rational(i, 1)); // _tmp_ <= i
if (i == 0)
t.detGuard.add(upper); //t <= 0
else if (i == g.size() - 1)
t.detGuard.add(lower); //t >= out.size()-1
else
{
t.detGuard.add(upper); // t >= i
t.detGuard.add(lower); // t <= i
}
t.addToGraph();
}
Transition t = new Transition(this, n);
String nondetVar="_r_"+ Parser.nondetCount;
t.detGuard.add(commonGuard);
t.varName.add(nontdetTmp);
t.update.add(new LinearCombination(nondetVar));
t.addToGraph();
}
}
void addTerminalTransitions()
{
// if (out.size() > 0)
// {
// Vector<LinearPredicate> predicates = new Vector<>();
// boolean hasNondet=false;
// for (Transition t : out)
// {
// LinearPredicate lp=t.detGuard.deepCopy();
// if(t.detGuard.isEmpty())
// hasNondet=true;
// predicates.add(lp);
// }
// if (hasNondet)
// return;
// Vector<LinearPredicate> negation = LinearPredicate.negate(predicates);
// CFGNode term = idToNode.get(-2);
// for (LinearPredicate lp : negation)
// {
// Transition tau = new Transition(this, term);
// tau.detGuard = lp;
// tau.addToGraph();
// }
// }
// else
if(out.size()==0)
{
CFGNode term = idToNode.get(-2);
Transition tau = new Transition(this, term);
tau.addToGraph();
}
}
public static CFGNode addNode(int x)
{
if (idToNode.containsKey(x))
return idToNode.get(x);
return new CFGNode(x);
}
// public static CFGNode getCFGNode(int x)
// {
// return idToNode.get(x);
// }
} | 4,695 | 30.099338 | 104 | java |
null | RevTerm-main/code/linear/part1/Main/src/CFGUtil.java | import java.util.Vector;
public class CFGUtil
{
public static Vector<CFGNode> findCutpoints()
{
Vector<CFGNode> ret=new Vector<>();
ret.add(Main.startNode);
Main.startNode.isCutPoint=true;
ret.add(Main.termNode);
Main.termNode.isCutPoint=true;
dfs(Main.startNode,ret,new Vector<CFGNode>());
return ret;
}
private static void dfs(CFGNode v,Vector<CFGNode> res,Vector<CFGNode> currentBranch)
{
v.visited=true;
currentBranch.add(v);
for(Transition t:v.out)
{
if(!t.u.visited)
dfs(t.u, res, currentBranch);
else if(!res.contains(t.u) && currentBranch.contains(t.u))
{
t.u.isCutPoint=true;
res.add(t.u);
}
}
currentBranch.removeElementAt(currentBranch.size()-1);
}
public static void weakestPreCondition(Vector<Transition> path,Farkas farkas) //NOTE: for C-Integer programs this is completely fine but for general T2 transition systems it might have problems
{
for(int i=path.size()-1;i>=0;i--)
{
Transition t=path.elementAt(i);
for(int j=0;j<t.varName.size();j++)
{
String var=t.varName.elementAt(j);
LinearCombination upd=t.update.elementAt(j);
farkas.replaceVarWithLinear(var,upd);
}
farkas.addPredicate(t.detGuard.deepCopy());
farkas.addInvConstraint(t.nondetGuard.deepCopy());
}
}
}
| 1,568 | 30.38 | 198 | java |
null | RevTerm-main/code/linear/part1/Main/src/Farkas.java | import java.util.HashSet;
import java.util.Set;
import java.util.Vector;
public class Farkas
{
public static int countD = 0;
int startDIndex;
int startNode, endNode;
private QuadraticPredicate invConstraint;
private LinearPredicate linearConstraints;
private QuadraticCombination objective;
Farkas(int startNode, int endNode)
{
this.startNode = startNode;
this.endNode = endNode;
linearConstraints = new LinearPredicate();
LinearCombination lc = new LinearCombination();
lc.add("1", Rational.one);
linearConstraints.add(lc); // 1>=0 is always true
invConstraint = new QuadraticPredicate();
startDIndex = countD;
countD++; // for 1>=0
}
public Farkas deepCopy()
{
Farkas ret = new Farkas(startNode, endNode);
countD--; // for 1>=0 which is added in new
ret.startDIndex = startDIndex;
ret.invConstraint.exprs.addAll(invConstraint.exprs);
//countD+=invConstraint.size();
ret.linearConstraints = linearConstraints.deepCopy();
//countD+=linearConstraints.exprs.size()-1; //-1 for 1>=0 which is already added
ret.objective = objective.deepCopy();
return ret;
}
// public Farkas disabled()
// {
// Farkas ret=deepCopy();
//// LinearCombination lc=new LinearCombination("n_"+InvariantGeneration.nCount);
//// QuadraticCombination obj=new QuadraticCombination("1",lc);
//
// InvariantGeneration.nCount++;
// ret.objective=QuadraticCombination.minus1.deepCopy();
// return ret;
// }
void addInvConstraint(QuadraticPredicate inv)
{
invConstraint.add(inv);
countD += inv.exprs.size();
}
void addPredicate(LinearPredicate lp)
{
linearConstraints.add(lp);
countD += lp.exprs.size();
}
void setObjective(QuadraticCombination obj)
{
objective = obj.deepCopy();
}
void replaceVarWithLinear(String var, LinearCombination lc)
{
linearConstraints.replaceVarWithLinear(var,lc);
objective.replaceVarWithLinear(var,lc);
invConstraint.replaceVarWithLinear(var,lc);
}
public Vector<QuadraticCombination> generateEqualities()
{
Vector<QuadraticCombination> ret = new Vector<>();
Set<String> allVars= getAllVars();
for (String var : allVars)
{
QuadraticCombination tmp = makeEquality(var);
ret.add(tmp);
}
return ret;
}
public Set<String> getAllVars()
{
Set<String> ret=new HashSet<>();
for(LinearCombination lc:linearConstraints.exprs)
ret.addAll(lc.coef.keySet());
for(QuadraticCombination qc: invConstraint.exprs)
ret.addAll(qc.coef.keySet());
ret.addAll(objective.coef.keySet());
return ret;
}
public QuadraticCombination makeEquality(String var)
{
QuadraticCombination qc = new QuadraticCombination();
int dIndex = startDIndex;
if (!invConstraint.exprs.isEmpty())
{
//for(int i=0;i<invConstraint.exprs.size();i++)
for (QuadraticCombination invc : invConstraint.exprs)
{
if (invc.coef.containsKey(var))
{
String invMultiplier = "d_" + dIndex;
//InvariantGeneration.addUnknownVar("d_" + dIndex);
LinearCombination lc = invc.coef.get(var);
qc.add(invMultiplier, lc);
}
dIndex++;
}
}
for (LinearCombination lp : linearConstraints.exprs) // lp>=0
{
String multiplier = "d_" + dIndex;
if (lp.coef.containsKey(var))
{
Rational coef = lp.coef.get(var);
qc.add(multiplier, new LinearCombination(coef));
//InvariantGeneration.addUnknownVar("d_" + dIndex);
}
dIndex++;
}
LinearCombination coef = objective.getCoef(var);
//qc=coef <=> qc-coef=0
if (coef != null)
{
LinearCombination lc = coef.negate();
qc.add(lc);
}
// System.err.println("var: "+var+" => "+qc.toNormalString());
return qc;
}
public String toString()
{
String ret = "";
ret += "\n---------------------------------------------\n";
ret += "from: " + startNode + " to: " + endNode + "\n";
int dIndex = startDIndex;
for (int i = 0; i < invConstraint.exprs.size(); i++)
{
ret += "d_" + dIndex + ": " + invConstraint.exprs.elementAt(i).toNormalString() + "\n";
dIndex++;
}
for (LinearCombination lc : linearConstraints.exprs)
{
ret += "\nd_" + dIndex + ": " + lc.toNormalString();
dIndex++;
}
ret += "\n---------------------------------------------\n";
ret += objective.toNormalString();
return ret;
}
} | 5,091 | 28.604651 | 99 | java |
null | RevTerm-main/code/linear/part1/Main/src/InvUtil.java | import java.io.BufferedReader;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.util.Vector;
public class InvUtil
{
public static boolean checkNonTermination(Vector<Farkas> I, CFGNode startNode) throws Exception
{
String Template = "";//InvariantGeneration.cCount+" "+Farkas.countD+" "+Parser.allVars.size()+"\n";
Template += "(set-option :print-success false) \n";
if (Main.solver.equals("bclt"))
{
Template +="(set-option :produce-models true)\n"+
"(set-option :produce-assertions true)\n" +
"(set-logic QF_NIA)\n";
}
for (int i = 0; i < InvariantGeneration.cCount; i++)
Template += "(declare-const c_" + i + " Int)\n";
Template+="(assert (< "+InvariantGeneration.negativeVar+" 0))\n"; //invariant at l_term
for (int i = 0; i < Farkas.countD; i++)
{
Template += "(declare-const d_" + i + " Int)\n";
Template += "(assert (>= d_" + i + " 0))\n"; // d_i>=0
}
for (String var : Parser.allVars)
if (!var.equals("1"))
Template += "(declare-const " + var + " Int)\n";
for (QuadraticCombination qc : startNode.inv.firstElement().exprs)
Template += "(assert (>= " + qc.toString() + " 0))\n";
FileWriter fw = new FileWriter(Main.workingdir+"/"+Main.solver + Main.con+"-"+Main.dis+Main.fileName + ".smt2");
fw.write(Template);
for (Farkas f : I)
{
//System.err.println("------------------------\n"+f+"\n-------------------------\n");
Vector<QuadraticCombination> vqc = f.generateEqualities();
for (QuadraticCombination qc : vqc)
{
//System.err.println(qc.toNormalString());
fw.write("(assert (= 0 " + qc.toString() + "))\n");
//System.err.println(qc.toNormalString());
}
}
fw.write("(check-sat)\n");
fw.write("(get-value (");
for (int i = 0; i < InvariantGeneration.cCount; i++)
fw.write("c_" + i + " ");
for (String var : Parser.allVars)
if (!var.equals("1"))
fw.write(var + " ");
fw.write("))");
fw.close();
return check();
}
public static boolean check() throws Exception
{
// System.err.println("Solver Started");
String[] configs = {"bclt --file", "z3 -smt2", "mathsat"};
int solverInd = -1;
if (Main.solver.equals("bclt"))
solverInd = 0;
else if(Main.solver.equals("z3"))
solverInd=1;
else if(Main.solver.equals("mathsat"))
solverInd=2;
Process process = Runtime.getRuntime().exec("./"+Main.solversDir+"/"+configs[solverInd] + " " + Main.workingdir+"/"+Main.solver + Main.con+"-"+Main.dis+Main.fileName+".smt2");
process.waitFor();
// if(!process.waitFor(10, TimeUnit.SECONDS))
// {
// process.destroy();
// return false;
// }
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
while (bufferedReader.ready())
{
String s = bufferedReader.readLine();
if (s.equals("sat"))
{
// System.err.println("SAT!");
return true;
}
else if (s.equals("unsat"))
{
// System.err.println("UNSAT!");
return false;
}
}
return false;
}
} | 3,634 | 34.637255 | 183 | java |
null | RevTerm-main/code/linear/part1/Main/src/InvariantGeneration.java | import java.util.*;
public class InvariantGeneration
{
public static Map<String, String> nondetVarsC = new HashMap<>();
public static String negativeVar;
public static int totalUnknownVars = 0;
public static int cCount = 0;
public static void MakeTemplate(int con,int dis)//conjunctions, disjunctions
{
for (CFGNode n : CFGNode.allCFGNodes)
{
if (n.id == Main.startNode.id)
{
QuadraticPredicate qp = new QuadraticPredicate();
for (String var : Parser.allVars) // var = c_j
{
if (var.equals("1"))
continue;
QuadraticCombination qc = new QuadraticCombination();
Rational minusOne = Rational.negate(Rational.one);
qc.add(var, new LinearCombination(Rational.one)); // qc = -var
qc.add("1", new LinearCombination("c_" + cCount, minusOne)); // qc = -var + c_cCount
nondetVarsC.put(var,"c_"+cCount);
cCount++;
qp.add(qc); //qc>=0
qp.add(qc.deepCopy().negate()); // -qc>=0
}
n.inv.add(qp);
}
else if(n.id == Main.termNode.id) // -1 >=0
{
QuadraticPredicate qp = new QuadraticPredicate();
QuadraticCombination qc = new QuadraticCombination();
negativeVar="c_"+cCount;
cCount++;
qc.add("1",new LinearCombination(negativeVar,Rational.one));
qp.add(qc);
n.inv.add(qp);
}
else if(n.isCutPoint)
{
for(int k=0;k<dis;k++)
{
QuadraticPredicate qp = new QuadraticPredicate();
for (int j = 0; j < con; j++)
{
QuadraticCombination qc = new QuadraticCombination(); // c_0 * 1 + c_1 * var_1 + c_2 * var2 .... + c_n * var_n >=0
for (String var : Parser.allVars)
{
qc.add(var, new LinearCombination("c_" + cCount)); //qc += var * c_cCount
cCount++;
}
//
qp.add(qc); //qc >=0
}
n.inv.add(qp);
}
}
}
}
public static void generate(Vector<CFGNode> cutPoints,Vector<Farkas> farkasVector)
{
for(CFGNode v:cutPoints)
processPaths(v,v,new Vector<Transition>(),farkasVector);
}
private static void processPaths(CFGNode st,CFGNode v,Vector<Transition> path,Vector<Farkas> farkasVector)
{
Vector<Transition> tran=v.out;
for(Transition t:tran)
{
CFGNode u=t.u;
path.add(t);
if(u.isCutPoint)
{
QuadraticPredicate objPred=u.inv.lastElement();
Vector<QuadraticPredicate> tmp = new Vector<>();
for(QuadraticPredicate qc:u.inv)
if(qc!=objPred)
tmp.add(qc.deepCopy());
Vector<QuadraticPredicate> uinvNegate=QuadraticPredicate.negate(tmp);
if(uinvNegate.isEmpty())
uinvNegate.add(QuadraticPredicate.TRUE);
for(QuadraticPredicate vinv:st.inv)
for(QuadraticPredicate uinv:uinvNegate)
for(QuadraticCombination obj:objPred.exprs)
{
//vinv & uinv & path => obj
Farkas farkas=new Farkas(st.id,u.id);
if(uinv!=QuadraticPredicate.TRUE)
farkas.addInvConstraint(uinv.deepCopy());
farkas.setObjective(obj.deepCopy());
CFGUtil.weakestPreCondition(path,farkas);
Set<String> vars=farkas.getAllVars();
farkas.addInvConstraint(vinv);
farkasVector.add(farkas);
}
}
else
processPaths(st,u,path,farkasVector);
path.removeElementAt(path.size()-1);
}
}
} | 4,409 | 34.28 | 139 | java |
null | RevTerm-main/code/linear/part1/Main/src/LinearCombination.java | import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
public class LinearCombination
{
Map<String, Rational> coef;
public static LinearCombination one = new LinearCombination(Rational.one);
LinearCombination()
{
coef = new HashMap<>();
}
LinearCombination(Rational c)
{
coef = new HashMap<>();
coef.put("1", c);
}
LinearCombination(String var)
{
coef = new HashMap<>();
coef.put(var, Rational.one);
}
LinearCombination(String var, Rational c)
{
coef = new HashMap<>();
coef.put(var, c);
}
public void add(String var, Rational c)
{
if (coef.containsKey(var))
coef.put(var, Rational.add(coef.get(var), c));
else
coef.put(var, c);
}
public void add(LinearCombination lc)
{
for (String var : lc.coef.keySet())
add(var, lc.coef.get(var));
}
public void minus(LinearCombination lc)
{
add(lc.negate());
}
public void multiplyByValue(Rational val)
{
for (String var : coef.keySet())
coef.put(var, Rational.mul(coef.get(var), val));
}
public LinearCombination negate() //does not negate "this". returns the negate of "this".
{
removeZeros();
LinearCombination lc = new LinearCombination();
for (String var : coef.keySet())
lc.coef.put(var, Rational.negate(coef.get(var)));
return lc;
}
public boolean containsNondetVar()
{
for (String var : coef.keySet())
if (var.startsWith("_tmp_") || var.startsWith("_r_"))
return true;
return false;
}
public LinearCombination deepCopy()
{
removeZeros();
LinearCombination lc = new LinearCombination();
for (String var : coef.keySet())
{
Rational c = coef.get(var);
lc.add(var, c);
}
return lc;
}
// public QuadraticCombination mulByVar(String var)
// {
// QuadraticCombination ret = new QuadraticCombination();
// for (String s : coef.keySet())
// ret.add(s, new LinearCombination(var, coef.get(s)));
// return ret;
// }
public void multiplyByLin(LinearCombination lc) throws Exception
{
if (!isConstant() && !lc.isConstant())
throw new Exception("multiplication of two linear Expressions is not linear");
if (isConstant())
{
Rational x = coef.get("1");
if (x == null)
x = Rational.zero;
coef.clear();
for (String var : lc.coef.keySet())
coef.put(var, Rational.mul(lc.coef.get(var), x));
}
else
{
Rational x = lc.coef.get("1");
multiplyByValue(x);
}
}
void replaceVarWithLinear(String var,LinearCombination lc)
{
if(!coef.containsKey(var))
return;
Rational r=coef.get(var);
coef.put(var,Rational.zero);
LinearCombination tmp=lc.deepCopy();
tmp.multiplyByValue(r);
add(tmp);
removeZeros();
}
public boolean isConstant()
{
return coef.size() <= 1 && (coef.size() != 1 || coef.containsKey("1"));
}
public void removeZeros()
{
Vector<String> allVars = new Vector<>(coef.keySet());
for (String s : allVars)
if (coef.get(s).equals(Rational.zero))
coef.remove(s);
}
// public int replaceVarsWithValue(Map<String, Integer> dict) throws Exception
// {
// int ret = 0;
// for (String var : coef.keySet())
// {
// if (!dict.containsKey(var))
// throw new Exception("dictionary cannot support " + toNormalString());
// int varVal = dict.get(var);
// ret += varVal * coef.get(var).numerator / coef.get(var).denominator;
// }
// return ret;
// }
public boolean equals(LinearCombination lc)
{
removeZeros();
lc.removeZeros();
for (String var : coef.keySet())
if (!lc.coef.containsKey(var) || !lc.coef.get(var).equals(this.coef.get(var)))
return false;
for (String var : lc.coef.keySet())
if (!this.coef.containsKey(var) || !lc.coef.get(var).equals(this.coef.get(var)))
return false;
return true;
}
public String toNormalString()
{
removeZeros();
if (coef.size() == 0)
return "0";
String ret = "";
for (String s : coef.keySet())
{
Rational c = coef.get(s);
if (ret.equals(""))
ret += c.toNormalString() + "*" + s;
else if (coef.get(s).compareTo(Rational.zero) < 0)
ret += " - " + (Rational.negate(c)).toNormalString() + "*" + s;
else
ret += " + " + c.toNormalString() + "*" + s;
}
return ret;
}
public String toString()
{
removeZeros();
String ret = "";
for (String s : coef.keySet())
{
Rational c = coef.get(s);
if (c.equals(Rational.one))
ret += " " + s;
else if (s.equals("1"))
{
if (!c.isNonNegative())
ret += " (- " + Rational.negate(c) + ")";
else
ret += " " + c + " ";
}
else if (c.isNonNegative())
ret += " (* " + (c) + " " + s + ")";
else
ret += " (* (- " + Rational.negate(c) + ") " + s + ")";
}
if (ret.equals(""))
return "0";
if (coef.size() > 1)
return "(+ " + ret + ")";
else
return ret;
}
} | 5,915 | 25.769231 | 94 | java |
null | RevTerm-main/code/linear/part1/Main/src/LinearPredicate.java | import java.util.Vector;
public class LinearPredicate
{
Vector<LinearCombination> exprs;
LinearPredicate()
{
exprs = new Vector<>();
}
void add(LinearCombination lc)
{
for(LinearCombination l:exprs)
if(l!=null && l.equals(lc))
return;
exprs.add(lc);
}
void add(LinearPredicate lp)
{
for (LinearCombination lc : lp.exprs)
add(lc.deepCopy());
}
boolean isEmpty()
{
return exprs.isEmpty();
}
void replaceVarWithLinear(String var,LinearCombination lc)
{
for(LinearCombination l:exprs)
l.replaceVarWithLinear(var,lc);
}
public Vector<LinearPredicate> negate()
{
Vector<LinearPredicate> ret = new Vector<>();
for (LinearCombination lc : exprs)
{
LinearCombination l = lc.negate();
l.add("1", Rational.negate(Main.eps));
LinearPredicate lp = new LinearPredicate();
lp.add(l);
ret.add(lp);
}
return ret;
}
public static Vector<LinearPredicate> negate(Vector<LinearPredicate> g)
{
Vector<LinearPredicate> ret = new Vector<>();
if (g.size() == 1)
ret = g.firstElement().negate();
else
{
Vector<LinearPredicate> notLast = g.lastElement().negate();
g.removeElementAt(g.size() - 1);
Vector<LinearPredicate> recurse = negate(g);
for (LinearPredicate lp : notLast)
for (LinearPredicate predicate : recurse)
{
LinearPredicate copy = predicate.deepCopy();
copy.add(lp);
ret.add(copy);
}
}
return ret;
}
public static Vector<LinearPredicate> conjunct(Vector<LinearPredicate> left, Vector<LinearPredicate> right)
{
Vector<LinearPredicate> ret = new Vector<>();
if (left.isEmpty())
{
for (LinearPredicate lp : right)
ret.add(lp.deepCopy());
return ret;
}
if (right.isEmpty())
{
for (LinearPredicate lp : left)
ret.add(lp.deepCopy());
return ret;
}
for (LinearPredicate lp1 : left)
for (LinearPredicate lp2 : right)
{
LinearPredicate lp = new LinearPredicate();
lp.add(lp1);
lp.add(lp2);
ret.add(lp);
}
return ret;
}
public static Vector<LinearPredicate> disjunct(Vector<LinearPredicate> left, Vector<LinearPredicate> right)
{
Vector<LinearPredicate> ret = new Vector<>();
for (LinearPredicate lp : left)
ret.add(lp.deepCopy());
for (LinearPredicate lp : right)
ret.add(lp.deepCopy());
return ret;
}
public LinearPredicate deepCopy()
{
LinearPredicate ret = new LinearPredicate();
ret.add(this);
return ret;
}
public boolean equalsLogic(LinearPredicate lp)
{
for (LinearCombination lc : exprs)
if (!lp.contains(lc))
return false;
for (LinearCombination lc : lp.exprs)
if (!this.contains(lc))
return false;
return true;
}
public boolean contains(LinearCombination lc)
{
for (LinearCombination l : exprs)
if (l.equals(lc))
return true;
return false;
}
public String toString()
{
String ret = "";
for (int i = 0; i < exprs.size(); i++)
{
LinearCombination lc = exprs.elementAt(i);
if (i == 0)
ret += lc.toNormalString() + ">=0";
else
ret += " && " + lc.toNormalString() + ">=0";
}
return ret;
}
}
| 3,935 | 24.230769 | 111 | java |
null | RevTerm-main/code/linear/part1/Main/src/Main.java | import java.util.Vector;
public class Main
{
public static Rational eps = Rational.one;
public static CFGNode startNode, cutPoint, termNode;
public static String fileName = "", solver = "",workingdir="",solversDir="";
public static int con = 0, dis = 0;
public static void main(String[] args) throws Exception
{
con = Integer.parseInt(args[0]);
dis = Integer.parseInt(args[1]);
solver = args[2];
fileName = args[3];
workingdir = args[4];
solversDir = args[5];
termNode = CFGNode.addNode(-2);
startNode = CFGNode.addNode(-1);
long startTime = System.currentTimeMillis();
Parser.readFile(fileName);
Parser.parseProg(0, Parser.getTokenCount() - 1);
for (CFGNode n : CFGNode.allCFGNodes)
{
if (n.id == -1 || n.id == -2)
continue;
n.addTerminalTransitions();
}
int curTotalCFGNodes = CFGNode.allCFGNodes.size();
for (int i = 0; i < curTotalCFGNodes; i++)
{
CFGNode n = CFGNode.allCFGNodes.elementAt(i);
if (n.id == -2 || n.id == -1)
continue;
n.addNecessaryNondet();
}
//this is done after parsing because we need to have the list of all variables.
for(Transition t:Transition.allTransitions)
t.addNondetTemplate();
// System.err.println("Parsing Finished");
Vector<CFGNode> cutPoints=CFGUtil.findCutpoints();
// for(CFGNode x:cutPoints)
// System.err.println("cutPoint: "+x.id);
fileName=fileName.replace("/","_");
// for (Transition t : Transition.allTransitions)
// System.err.println(t);
InvariantGeneration.MakeTemplate(con,dis);
// for(CFGNode n:cutPoints)
// for(int i=0;i<n.inv.size();i++)
// System.err.println("id: "+n.id+" inv: "+n.inv.elementAt(i).toNormalString());
Vector<Farkas> InvFarkas = new Vector<>();
InvariantGeneration.generate(cutPoints,InvFarkas);
// for(Farkas farkas:InvFarkas)
// System.err.println(farkas.toString());
boolean result = InvUtil.checkNonTermination(InvFarkas, startNode);
if(result) //does not terminate
System.out.println("Non-Terminating");
else
System.out.println("Could Not Prove Non-Termination");
long endTime = System.currentTimeMillis();
System.out.println("total time used: " + (endTime - startTime));
int val = (result) ? 3 : 0;
System.exit(val);
}
} | 2,626 | 29.905882 | 95 | java |
null | RevTerm-main/code/linear/part1/Main/src/Node.java | import java.util.Vector;
public class Node
{
public static Vector<Node> allNodes = new Vector<>();
int id;
Node par;
int beginIndex, endIndex;
String type;
Vector<Node> children;
String varName;
LinearCombination expr;
QuadraticCombination nondetLow, nondetUp;
Vector<LinearPredicate> guard;
Node(Node par, int beginIndex, int endIndex, String type)
{
allNodes.add(this);
id = allNodes.size() - 1;
this.par = par;
this.beginIndex = beginIndex;
this.endIndex = endIndex;
this.type = type;
this.expr = null;
this.guard = new Vector<>();
children = new Vector<>();
if (par != null)
par.children.add(this);
}
public String toString()
{
String ret = "";
ret += "Node #" + id + "\n";
if (par != null)
ret += "Par: " + par.id + "\n";
else
ret += "Par: null\n";
ret += "beginIndex=" + beginIndex + "\t" + "endIndex=" + endIndex + "\n";
ret += "type: " + type + "\n";
ret += "guard:";
for (LinearPredicate lp : guard)
ret += lp.toString() + "\n";
return ret;
}
}
| 1,224 | 23.5 | 81 | java |
null | RevTerm-main/code/linear/part1/Main/src/Parser.java | import java.io.File;
import java.util.HashSet;
import java.util.Scanner;
import java.util.Set;
import java.util.Vector;
public class Parser
{
public static Set<String> allVars = new HashSet<>();
public static Vector<String> tokens = new Vector<>();
public static int nondetCount = 0;
static
{
allVars.add("1");
}
public static Node parseProg(int beginIndex, int endIndex) throws Exception
{
if (!getToken(beginIndex).equals("START"))
throw new Exception("program should start with START");
Node cur = new Node(null, beginIndex, endIndex, "prog");
CFGNode start = CFGNode.addNode(Integer.parseInt(getToken(beginIndex + 2)));
Transition fromStart = new Transition(Main.startNode, start);
fromStart.addToGraph();
if (getToken(beginIndex + 4).equals("CUTPOINT"))
Main.cutPoint = CFGNode.addNode(Integer.parseInt(getToken(beginIndex + 6)));
int lastFROM = -1;
for (int i = beginIndex; i <= endIndex; i++)
if (getToken(i).equals("FROM"))
{
if (lastFROM != -1)
throw new Exception(" \"TO: index\" expected before @" + i);
lastFROM = i;
}
else if (getToken(i).equals("TO"))
{
parseTransition(cur, lastFROM, i + 3);
lastFROM = -1;
}
return cur;
}
public static Node parseTransition(Node par, int beginIndex, int endIndex) throws Exception
{
if (!getToken(endIndex).equals(";"))
throw new Exception("Transition must end with ; @" + beginIndex + "-" + endIndex);
Node cur = new Node(par, beginIndex, endIndex, "Transition");
int vIndex = Integer.parseInt(getToken(beginIndex + 2)), uIndex = Integer.parseInt(getToken(endIndex - 1));
CFGNode vNode = CFGNode.addNode(vIndex);
CFGNode uNode = CFGNode.addNode(uIndex);
Vector<Transition> transitionVector = new Vector<>();
transitionVector.add(new Transition(vNode, uNode));
int lastColon = beginIndex + 3;
for (int i = beginIndex + 4; i <= endIndex - 4; i++)
{
if (getToken(i).equals(";"))
{
Node ch = parseStmt(cur, lastColon + 1, i - 1);
if (ch.type.equals("assume"))
{
if (ch.guard.size() == 1)
for (Transition t : transitionVector)
t.detGuard.add(ch.guard.elementAt(0));
else if (ch.guard.size() > 1)
{
Vector<Transition> tmp = new Vector<>();
for (int j = 0; j < ch.guard.size(); j++)
{
for (Transition t : transitionVector)
{
Transition tp = t.deepCopy();
tp.detGuard.add(ch.guard.elementAt(j));
tmp.add(tp);
}
}
transitionVector = tmp;
}
}
else
{
for (Transition t : transitionVector)
{
t.varName.add(ch.varName);
t.update.add(ch.expr);
// if (ch.expr.containsNondetVar())
// {
// LinearCombination lowerBound = ch.expr.deepCopy();
// lowerBound.minus(new LinearCombination(ch.nondetLow, Rational.one)); //var - low >= 0
// t.nondetGuard.add(lowerBound);
//
// LinearCombination upperBound = new LinearCombination(ch.nondetUp, Rational.one);
// upperBound.minus(ch.expr.deepCopy()); //up - var >= 0
// t.nondetGuard.add(upperBound);
//
// }
}
}
lastColon = i;
}
}
for (Transition t : transitionVector)
t.addToGraph();
return cur;
}
public static Node parseStmt(Node par, int beginIndex, int endIndex) throws Exception
{
if (getToken(beginIndex).equals("assume"))
{
Node cur = new Node(par, beginIndex, endIndex, "assume");
Node ch = parseBexpr(cur, beginIndex + 2, endIndex - 1);
cur.guard = ch.guard;
return cur;
}
else
{
Node cur = new Node(par, beginIndex, endIndex, "assignment");
if (!getToken(beginIndex + 1).equals(":="))
throw new Exception("assignment without := @" + beginIndex + "-" + endIndex);
int sgn = beginIndex + 1;
String varName = getToken(beginIndex);
allVars.add(varName);
boolean isNondet = false;
for (int i = sgn + 1; i <= endIndex; i++)
if (getToken(i).equals("nondet"))
isNondet = true;
if (isNondet)
{
cur.varName = varName;
cur.expr = new LinearCombination("_r_"+nondetCount);
nondetCount++;
}
else
{
LinearCombination update = parseExpr(cur, sgn + 1, endIndex).expr;
cur.varName = varName;
cur.expr = update;
}
return cur;
}
}
public static Node parseBexpr(Node par, int beginIndex, int endIndex) throws Exception
{
// System.err.println("parseBexpr: "+beginIndex+"---"+endIndex);
Node cur = new Node(par, beginIndex, endIndex, "Bexpr");
for (int i = beginIndex; i <= endIndex; i++)
if (getToken(i).equals("nondet"))
return cur;
Vector<Integer> ors = new Vector<>();
Vector<Integer> ands = new Vector<>();
ors.add(beginIndex - 1);
ands.add(beginIndex - 1);
int openPar = 0;
for (int i = beginIndex; i <= endIndex; i++)
if (getToken(i).equals("("))
openPar++;
else if (getToken(i).equals(")"))
openPar--;
else if (openPar == 0 && getToken(i).equals("|") && getToken(i + 1).equals("|"))
{
ors.add(i + 1);
i++;
}
else if (openPar == 0 && getToken(i).equals("&") && getToken(i + 1).equals("&"))
{
ands.add(i + 1);
i++;
}
ors.add(endIndex + 2);
ands.add(endIndex + 2);
if (ors.size() > 2)
{
for (int i = 1; i < ors.size(); i++)
{
Node ch = parseBexpr(cur, ors.elementAt(i - 1) + 1, ors.elementAt(i) - 2);
cur.guard = LinearPredicate.disjunct(cur.guard, ch.guard);
}
return cur;
}
if (ands.size() > 2)
{
for (int i = 1; i < ands.size(); i++)
{
Node ch = parseBexpr(cur, ands.elementAt(i - 1) + 1, ands.elementAt(i) - 2);
cur.guard = LinearPredicate.conjunct(cur.guard, ch.guard);
}
return cur;
}
boolean isCompletlyInsidePar = true;
openPar = 0;
for (int i = beginIndex; i <= endIndex; i++)
{
if (getToken(i).equals("("))
openPar++;
else if (getToken(i).equals(")"))
openPar--;
if (openPar == 0 && i != endIndex)
{
isCompletlyInsidePar = false;
break;
}
}
if (isCompletlyInsidePar)
{
Node ch = parseBexpr(cur, beginIndex + 1, endIndex - 1);
cur.guard = LinearPredicate.conjunct(cur.guard, ch.guard);
return cur;
}
if (getToken(beginIndex).equals("!"))
{
Node ch = parseBexpr(cur, beginIndex + 1, endIndex);
cur.guard = LinearPredicate.negate(ch.guard);
return cur;
}
Node ch = parseLiteral(cur, beginIndex, endIndex);
cur.guard = LinearPredicate.conjunct(cur.guard, ch.guard);
return cur;
}
public static Node parseLiteral(Node par, int beginIndex, int endIndex) throws Exception
{
// System.err.println("parseLiteral:"+ beginIndex+"---"+endIndex);
int sgn = -1, type = -1; //types: 0: "<=" 1: ">=" 2: ">" 3: "<" 4: "==" 5: "!="
for (int i = beginIndex; i <= endIndex; i++)
if (getToken(i).equals("<="))
{
sgn = i;
type = 0;
}
else if (getToken(i).equals(">="))
{
sgn = i;
type = 1;
}
else if (getToken(i).equals(">"))
{
sgn = i;
type = 2;
}
else if (getToken(i).equals("<"))
{
sgn = i;
type = 3;
}
else if (getToken(i).equals("=="))
{
sgn = i;
type = 4;
}
else if (getToken(i).equals("!="))
{
sgn = i;
type = 5;
}
if (sgn == beginIndex || sgn == endIndex)
throw new Exception("literal starts or ends with sign @" + beginIndex + "-" + endIndex);
Node cur = new Node(par, beginIndex, endIndex, "literal");
Node left = null;
Node right=null;
if (sgn == -1)
{
type = 5;
left = parseExpr(cur, beginIndex, endIndex);
right = new Node(cur, endIndex, endIndex, "0");
right.expr = new LinearCombination(Rational.zero);
}
else
{
left = parseExpr(cur, beginIndex, sgn - 1);
right = parseExpr(cur, sgn + 1, endIndex);
}
if (type == 0) //left<=right --> right-left>=0
{
LinearCombination lc = right.expr.deepCopy();
lc.minus(left.expr);
LinearPredicate lp = new LinearPredicate();
lp.add(lc);
cur.guard.add(lp);
}
else if (type == 1) //left>=right --> left-right>=0
{
LinearCombination lc = left.expr.deepCopy();
lc.minus(right.expr);
LinearPredicate lp = new LinearPredicate();
lp.add(lc);
cur.guard.add(lp);
}
else if (type == 2) // left > right -> left -right >=eps -> left - right -eps >=0
{
LinearCombination lc = left.expr.deepCopy();
lc.minus(right.expr); // left - right
lc.minus(new LinearCombination(Main.eps)); // left - right - eps
LinearPredicate lp = new LinearPredicate();
lp.add(lc);
cur.guard.add(lp);
}
else if (type == 3) //left < right --> right - left > eps --> right - left -eps >=0
{
LinearCombination lc = right.expr.deepCopy();
lc.minus(left.expr); // right - left
lc.minus(new LinearCombination(Main.eps)); // right - left - eps
LinearPredicate lp = new LinearPredicate();
lp.add(lc);
cur.guard.add(lp);
}
else if (type == 4) //left==right --> left-right>=0 and right-left>=0
{
LinearCombination lc = right.expr.deepCopy();
lc.minus(left.expr);
LinearCombination lc2 = left.expr.deepCopy();
lc2.minus(right.expr);
LinearPredicate lp = new LinearPredicate();
lp.add(lc);
lp.add(lc2);
cur.guard.add(lp);
}
else
{
LinearCombination lc = right.expr.deepCopy();
lc.minus(left.expr);
lc.minus(new LinearCombination(Main.eps));
LinearCombination lc2 = left.expr.deepCopy();
lc2.minus(right.expr);
lc2.minus(new LinearCombination(Main.eps));
LinearPredicate lp1 = new LinearPredicate(), lp2 = new LinearPredicate();
lp1.add(lc);
lp2.add(lc2);
cur.guard.add(lp1);
cur.guard.add(lp2);
}
return cur;
}
public static Node parseExpr(Node par, int beginIndex, int endIndex) throws Exception
{
//System.err.println("parseExpr: "+beginIndex+"----"+endIndex);
Vector<Integer> signIndex = new Vector<>();
Vector<String> signType = new Vector<>();
if (!getToken(beginIndex).equals("-"))
{
signIndex.add(beginIndex - 1);
signType.add("+");
}
int openPar = 0;
for (int i = beginIndex; i <= endIndex; i++)
{
if (getToken(i).equals("("))
openPar++;
else if (getToken(i).equals(")"))
openPar--;
if (openPar == 0 && (getToken(i).equals("+")
|| (getToken(i).equals("-") && (i - 1 < beginIndex || (i - 1 >= beginIndex && !getToken(i - 1).equals("*") && !getToken(i - 1).equals("+"))))))
{
signIndex.add(i);
signType.add(getToken(i));
}
}
signIndex.add(endIndex + 1);
signType.add("+");
Node cur = new Node(par, beginIndex, endIndex, "expr");
cur.expr = new LinearCombination();
for (int i = 0; i + 1 < signIndex.size(); i++)
{
Node ch = parseTerm(cur, signIndex.elementAt(i) + 1, signIndex.elementAt(i + 1) - 1);
if (signType.elementAt(i).equals("+"))
cur.expr.add(ch.expr);
else
cur.expr.minus(ch.expr);
}
return cur;
}
public static Node parseTerm(Node par, int beginIndex, int endIndex) throws Exception
{
//System.err.println("parseTerm: "+beginIndex+"---"+endIndex);
if ((beginIndex == endIndex && isNumeric(getToken(beginIndex)))) //constant
{
Node cur = new Node(par, beginIndex, endIndex, "constant");
int val = Integer.parseInt(getToken(beginIndex));
cur.expr = new LinearCombination();
cur.expr.add("1", new Rational(val, 1));
return cur;
}
else if (beginIndex == endIndex - 1 && isNumeric(getToken(endIndex))) //negative constant
{
Node cur = new Node(par, beginIndex, endIndex, "constant");
int val = -Integer.parseInt(getToken(endIndex));
cur.expr = new LinearCombination();
cur.expr.add("1", new Rational(val, 1));
return cur;
}
else if (beginIndex == endIndex) //var
{
Node cur = new Node(par, beginIndex, endIndex, "var");
String var = getToken(beginIndex);
allVars.add(var);
if (Character.isDigit(var.charAt(0)))
throw new Exception("Incorrect var name @" + beginIndex);
cur.expr = new LinearCombination();
cur.expr.add(var, new Rational(1, 1));
return cur;
}
else // (...) or [] * []
{
Node cur = new Node(par, beginIndex, endIndex, "term mul");
cur.expr = new LinearCombination();
Vector<Integer> sgnIndex = new Vector<>();
Vector<String> sgnType = new Vector<>();
sgnIndex.add(beginIndex - 1);
sgnType.add("*");
int openPar = 0;
for (int i = beginIndex; i <= endIndex; i++)
if (getToken(i).equals("("))
openPar++;
else if (getToken(i).equals(")"))
openPar--;
else if (openPar == 0 && (getToken(i).equals("*") || getToken(i).equals("/")))
{
sgnIndex.add(i);
sgnType.add(getToken(i));
}
else if (getToken(i).equals("%"))
{
throw new Exception("% is not supported. @" + beginIndex + "-" + endIndex);
}
sgnIndex.add(endIndex + 1);
sgnType.add("*");
if (sgnIndex.size() == 2) // (...)
{
Node ch = parseExpr(cur, beginIndex + 1, endIndex - 1);
cur.expr = ch.expr;
return cur;
}
else
{
cur.expr.add("1", Rational.one);
for (int i = 1; i < sgnIndex.size(); i++)
{
Node ch = parseExpr(cur, sgnIndex.elementAt(i - 1) + 1, sgnIndex.elementAt(i) - 1);
if (sgnType.elementAt(i - 1).equals("*"))
cur.expr.multiplyByLin(ch.expr);
else if (ch.expr.isConstant() && ch.expr.coef.containsKey("1"))
cur.expr.multiplyByValue(Rational.inverse(ch.expr.coef.get("1")));
else
throw new Exception("Divison by variable is not possible @" + beginIndex + "-" + endIndex);
}
return cur;
}
}
}
public static boolean isNumeric(String s)
{
for (int i = 0; i < s.length(); i++)
if (!Character.isDigit(s.charAt(i)) && s.charAt(i) != '.')
return false;
return true;
}
public static int getTokenCount()
{
return tokens.size();
}
public static String getToken(int x)
{
return tokens.elementAt(x);
}
public static void readTokens(String program) throws Exception
{
String extraSpace = "";
for (int i = 0; i < program.length(); i++)
{
char c = program.charAt(i);
if (c == '.' || Character.isAlphabetic(c) || Character.isDigit(c) || c == '_')
extraSpace += c;
else
{
extraSpace += " ";
extraSpace += c;
extraSpace += " ";
}
}
Scanner scanner = new Scanner(extraSpace);
while (scanner.hasNext())
{
String s = scanner.next();
if (s.equals("="))
{
if (tokens.size() == 0)
throw new Exception("program cannot start with =");
String last = tokens.lastElement();
if (last.equals(":") || last.equals(">") || last.equals("<") || last.equals("=") || last.equals("!"))
{
tokens.removeElementAt(getTokenCount() - 1);
last += s;
tokens.add(last);
}
else
tokens.add(s);
}
else
tokens.add(s);
}
// for(int i=0;i<tokens.size();i++)
// System.err.println(i+":"+tokens.elementAt(i));
}
public static void readFile(String fileName) throws Exception
{
File file = new File(fileName);
Scanner in = new Scanner(file);
String program = "";
while (in.hasNextLine())
{
String s = in.nextLine();
if (s.contains("//"))
s = s.substring(0, s.indexOf("//"));
if (s.contains("AT("))
{
int ind = s.indexOf("AT(");
int openPar = 0, endOfAT = -1;
for (int i = 0; i < s.length(); i++)
{
if (s.charAt(i) == '(')
openPar++;
else if (s.charAt(i) == ')')
{
openPar--;
if (openPar == 0)
{
endOfAT = i;
break;
}
}
}
s = s.substring(0, ind) + s.substring(endOfAT + 1, s.length());
}
program += s + " ";
}
readTokens(program);
}
} | 20,313 | 34.576182 | 163 | java |
null | RevTerm-main/code/linear/part1/Main/src/QuadraticCombination.java | import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
public class QuadraticCombination
{
Map<String, LinearCombination> coef;
QuadraticCombination()
{
coef = new HashMap<>();
}
QuadraticCombination(String var, LinearCombination lc)
{
coef = new HashMap<>();
add(var, lc);
}
QuadraticCombination(Set<String> vars)
{
coef= new HashMap<>();
for(String var:vars)
{
add(var,new LinearCombination("c_"+InvariantGeneration.cCount));
InvariantGeneration.cCount++;
}
}
public void add(String var, LinearCombination lc)
{
if (coef.containsKey(var))
coef.get(var).add(lc);
else
coef.put(var, lc);
}
public void add(QuadraticCombination qc)
{
for (String var : qc.coef.keySet())
add(var, qc.coef.get(var));
}
public void add(LinearCombination lc)
{
for (String var : lc.coef.keySet())
{
add(var, new LinearCombination(lc.coef.get(var)));
}
}
public void add(Rational val)
{
add("1", new LinearCombination(val));
}
public QuadraticCombination negate()
{
QuadraticCombination qc = new QuadraticCombination();
for (String var : coef.keySet())
qc.add(var, coef.get(var).negate());
return qc;
}
public LinearCombination getCoef(String var)
{
return coef.get(var);
}
public Rational getCoef(String var1, String var2)
{
if (coef.containsKey(var1) && coef.get(var1).coef.containsKey(var2))
return coef.get(var1).coef.get(var2);
else if (coef.containsKey(var2) && coef.get(var2).coef.containsKey(var1))
return coef.get(var2).coef.get(var1);
else
return Rational.zero;
}
public QuadraticCombination deepCopy()
{
QuadraticCombination qc = new QuadraticCombination();
for (String var : coef.keySet())
qc.add(var, coef.get(var).deepCopy());
return qc;
}
public void replaceVarWithLinear(String var, LinearCombination lc)
{
if(!coef.containsKey(var))
return;
LinearCombination l=coef.get(var);
coef.remove(var);
for(String v:lc.coef.keySet())
{
LinearCombination tmp=l.deepCopy();
tmp.multiplyByValue(lc.coef.get(v));
add(v,tmp);
}
}
public String toNormalString()
{
String ret = "";
for (String s : coef.keySet())
{
if (ret.equals(""))
ret += s + "*(" + coef.get(s).toNormalString() + ")";
else
ret += " + " + s + "*(" + coef.get(s).toNormalString() + ")";
}
return ret;
}
public String toString()
{
String ret = "";
if (coef.keySet().size() > 1)
ret = "(+ ";
for (String var : coef.keySet())
{
LinearCombination lc = coef.get(var);
if (ret == "")
ret = "(* " + lc.toString() + " " + var + ")";
else
ret = ret + " (* " + lc.toString() + " " + var + ")";
}
if (coef.keySet().size() > 1)
ret += ")";
if (ret.equals(""))
ret = "0";
return ret;
}
}
| 3,438 | 23.390071 | 81 | java |
null | RevTerm-main/code/linear/part1/Main/src/QuadraticPredicate.java | import java.util.Vector;
public class QuadraticPredicate
{
public static QuadraticPredicate TRUE=new QuadraticPredicate(new QuadraticCombination("1",new LinearCombination("1",Rational.one)));
Vector<QuadraticCombination> exprs;
QuadraticPredicate() //conjunctions
{
exprs = new Vector<>();
}
QuadraticPredicate(QuadraticCombination qc)
{
exprs=new Vector<>();
add(qc);
}
void add(QuadraticCombination qc)
{
exprs.add(qc);
}
void add(QuadraticPredicate qp)
{
exprs.addAll(qp.exprs);
}
void replaceVarWithLinear(String var,LinearCombination update)
{
for(QuadraticCombination qc:exprs)
qc.replaceVarWithLinear(var,update);
}
public static Vector<QuadraticPredicate> negate(Vector<QuadraticPredicate> vqp)
{
Vector<QuadraticPredicate> ret = new Vector<>();
for (QuadraticPredicate qp : vqp)
{
Vector<QuadraticCombination> vqc = qp.negate();
if (ret.isEmpty())
{
for (QuadraticCombination qc : vqc)
{
QuadraticPredicate c = new QuadraticPredicate();
c.add(qc);
ret.add(c);
}
continue;
}
Vector<QuadraticPredicate> tmp = new Vector<>();
for (QuadraticCombination cur : vqc)
for (QuadraticPredicate q : ret)
{
QuadraticPredicate c = q.deepCopy();
c.add(cur);
tmp.add(c);
}
ret.addAll(tmp);
}
return ret;
}
Vector<QuadraticCombination> negate()
{
Vector<QuadraticCombination> ret = new Vector<>();
for (QuadraticCombination qc : exprs)
{
QuadraticCombination q = qc.negate();
q.add("1", new LinearCombination(Rational.negate(Main.eps))); // 1*(-1)
ret.add(q);
}
return ret;
}
QuadraticCombination getTerm(int ind)
{
return exprs.elementAt(ind);
}
public QuadraticPredicate deepCopy()
{
QuadraticPredicate qp = new QuadraticPredicate();
for (QuadraticCombination qc : exprs)
qp.add(qc.deepCopy());
return qp;
}
public String toString()
{
String ret = "";
for (QuadraticCombination qc : exprs)
ret += "(>= " + qc.toString() + " 0) ";
if (exprs.size() > 1)
ret = "(and " + ret + ") ";
return ret;
}
public String toNormalString()
{
String ret = "";
for (QuadraticCombination qc : exprs)
ret += qc.toNormalString() + ">=0 and ";
return ret;
}
} | 2,825 | 25.166667 | 136 | java |
null | RevTerm-main/code/linear/part1/Main/src/Rational.java | import java.util.EnumMap;
public class Rational implements Comparable<Rational>
{
public static final Rational one = new Rational(1, 1), zero = new Rational(0, 1);
int numerator, denominator;
public int gcd(int a, int b)
{
if (b == 0) return a;
return gcd(b, a % b);
}
Rational(int numerator, int denominator)
{
if (numerator == 0)
{
this.numerator = 0;
this.denominator = 1;
return;
}
if (denominator < 0)
{
denominator *= -1;
numerator *= -1;
}
int g = gcd(numerator, denominator);
this.numerator = numerator / g;
this.denominator = denominator / g;
}
public static Rational negate(Rational a)
{
return new Rational(-a.numerator, a.denominator);
}
public static Rational inverse(Rational a) throws Exception
{
if (a.numerator == 0)
throw new Exception("getting inverse of " + a + " which is not defined");
return new Rational(a.denominator, a.numerator);
}
public static Rational add(Rational a, Rational b)
{
return new Rational(a.numerator * b.denominator + b.numerator * a.denominator, a.denominator * b.denominator);
}
public static Rational minus(Rational a, Rational b)
{
return add(a, negate(b));
}
public static Rational mul(Rational a, Rational b)
{
return new Rational(a.numerator * b.numerator, a.denominator * b.denominator);
}
public static Rational div(Rational a, Rational b) throws Exception
{
return mul(a, inverse(b));
}
public boolean equals(Rational a)
{
return (a.numerator == numerator && a.denominator == denominator);
}
public boolean isNonNegative()
{
return (numerator >= 0);
}
@Override
public int compareTo(Rational a)
{
return numerator * a.denominator - a.numerator * denominator;
}
public String toNormalString()
{
if (denominator == 1)
return "" + numerator;
return "(" + numerator + "/" + denominator + ")";
}
public void normalize()
{
if (denominator < 0)
{
numerator *= -1;
denominator *= -1;
}
}
public String toString()
{
normalize();
String num="", den = "" + denominator;
if (numerator < 0)
num = "(- " + (-numerator) + ")";
else
num = "" + numerator;
if (denominator == 1)
return num;
return "(/ " + num + " " + denominator + ")";
}
} | 2,675 | 22.892857 | 118 | java |
null | RevTerm-main/code/linear/part1/Main/src/Transition.java | import java.util.Vector;
public class Transition //from "v.first" to "v.second" with guard "g" and update "varName := update"
{
public static Vector<Transition> allTransitions = new Vector<>();
CFGNode v, u;
LinearPredicate detGuard;
QuadraticPredicate nondetGuard;
Vector<String> varName;
Vector<LinearCombination> update;
boolean hasGroup;
Transition(CFGNode a, CFGNode b)
{
v = a;
u = b;
detGuard = new LinearPredicate();
nondetGuard = new QuadraticPredicate();
varName = new Vector<>();
update = new Vector<>();
hasGroup = false;
}
void addNondetTemplate()
{
for(LinearCombination lc:update)
{
if(lc.toString().contains("_r_")) // lc is a fresh nondet variable
{
// qc <= lc <= qc
// gen: qc1<=qc2
QuadraticCombination qc= new QuadraticCombination(Parser.allVars),qcc=qc.deepCopy();
qc.add(lc.negate()); //qc - lc <=0
nondetGuard.add(qc.negate());
qcc.add(lc.negate()); // qc - lc >=0
nondetGuard.add(qcc);
}
}
}
public void addToGraph()
{
allTransitions.add(this);
v.out.add(this);
}
public Transition deepCopy()
{
Transition ret = new Transition(v, u);
ret.detGuard = detGuard.deepCopy();
ret.nondetGuard = nondetGuard.deepCopy();
for (String var : varName)
ret.varName.add(var.toString());
for (LinearCombination lc : update)
if (lc != null)
ret.update.add(lc.deepCopy());
else
ret.update.add(null);
return ret;
}
public String toString()
{
String res = "";
res += "from: " + v.id + "\nto: " + u.id + "\n";
if (detGuard != null)
res += "detGuard: " + detGuard + "\n";
if (nondetGuard != null)
res += "nondetGuard: " + nondetGuard.toNormalString() + "\n";
for (int i = 0; i < varName.size(); i++)
if (update.elementAt(i) != null)
res += varName.elementAt(i) + " := " + update.elementAt(i).toNormalString() + "\n";
else
res += varName.elementAt(i) + " := nondet()\n";
return res;
}
} | 2,381 | 27.357143 | 103 | java |
null | RevTerm-main/code/linear/part2/Main/src/CFGNode.java | import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;
public class CFGNode
{
public static Vector<CFGNode> allCFGNodes=new Vector<>();
public static Map<Integer,CFGNode> idToNode=new TreeMap<>();
public static int greaTestNodeIndex=0;
public Vector<Transition> out;
public Vector<Transition> rout; //outgoing transitions in BCFG
int id;
boolean isCutPoint,visited;
Vector<QuadraticPredicate> inv;
Vector<LinearPredicate> computedInv;
QuadraticPredicate preCondition;
CFGNode(int ind)
{
id = ind;
idToNode.put(ind, this);
isCutPoint=visited=false;
out = new Vector<>();
rout = new Vector<>();
allCFGNodes.add(this);
inv = new Vector<>();
computedInv = new Vector<>();
preCondition = null;
if(ind>greaTestNodeIndex)
greaTestNodeIndex=ind;
}
void addTransition(Transition t)
{
out.add(t);
}
void addNecessaryNondet()
{
boolean hasEqual=false;
Vector <Vector<Transition>> groups= new Vector<>();
for (Transition t : out)
if(!t.hasGroup)
{
Vector<Transition> g=new Vector<>();
for(Transition tp:out)
if (t.detGuard.equalsLogic(tp.detGuard))
{
tp.hasGroup=true;
g.add(tp);
}
// for(Transition tmp:g)
// System.err.println("from: "+id+" to "+tmp.u.id+" "+g.size());
if(g.size()==1)
t.hasGroup = false;
else
groups.add(g);
}
for(int i=0;i<out.size();i++)
if(out.elementAt(i).hasGroup)
{
Transition.allTransitions.removeElement(out.elementAt(i));
out.removeElementAt(i);
i--;
}
for(Vector<Transition> g:groups)
{
// System.err.println("----------------");
// for(Transition tau:g)
// System.err.println("transition from "+tau.v.id+" to: "+tau.u.id);
// System.err.println("----------------");
LinearPredicate commonGuard=g.firstElement().detGuard.deepCopy();
CFGNode n=new CFGNode(greaTestNodeIndex+1);
String nontdetTmp="_tmp_";
Parser.allVars.add("_tmp_");
for(int i=0;i<g.size();i++)
{
Transition t = new Transition(n,g.elementAt(i).u);
t.varName=g.elementAt(i).varName;
t.update=g.elementAt(i).update;
t.nondetGuard=g.elementAt(i).nondetGuard;
LinearCombination lower = new LinearCombination("_tmp_", Rational.one);
lower.add("1", new Rational(-i, 1)); // _tmp_ >= i
LinearCombination upper = new LinearCombination("_tmp_", Rational.negate(Rational.one));
upper.add("1", new Rational(i, 1)); // _tmp_ <= i
if (i == 0)
t.detGuard.add(upper); //t <= 0
else if (i == g.size() - 1)
t.detGuard.add(lower); //t >= out.size()-1
else
{
t.detGuard.add(upper); // t >= i
t.detGuard.add(lower); // t <= i
}
t.addToGraph();
}
Transition t = new Transition(this, n);
t.detGuard.add(commonGuard);
String nondetr="_r_"+Parser.nondetCount;
t.varName.add(nontdetTmp);
t.update.add(new LinearCombination(nondetr));
Parser.nondetCount++;
t.addToGraph();
}
}
void addTerminalTransitions()
{
// if(out.size()>0)
// {
// Vector<LinearPredicate> predicates = new Vector<>();
// int cnt=0;
// for (Transition t : out)
// {
// LinearPredicate lp=new LinearPredicate();
// for(LinearCombination lc:t.detGuard.exprs)
// lp.add(lc.deepCopy());
// predicates.add(lp);
// cnt+=lp.exprs.size();
// }
// if(cnt==0)
// return;
// Vector<LinearPredicate> negation = LinearPredicate.negate(predicates);
// CFGNode term=idToNode.get(-2);
// for(LinearPredicate lp:negation)
// {
// Transition tau=new Transition(this,term);
// tau.detGuard =lp;
// tau.addToGraph();
// }
// }
// else
if(out.size()==0)
{
CFGNode term=idToNode.get(-2);
Transition tau=new Transition(this,term);
tau.addToGraph();
}
}
public static CFGNode addNode(int x)
{
if(idToNode.containsKey(x))
return idToNode.get(x);
CFGNode n=new CFGNode(x);
return n;
}
public static CFGNode getCFGNode(int x)
{
return idToNode.get(x);
}
}
| 5,150 | 30.601227 | 104 | java |
null | RevTerm-main/code/linear/part2/Main/src/CFGUtil.java | import java.util.Vector;
public class CFGUtil
{
public static Vector<CFGNode> findCutpoints()
{
Vector<CFGNode> ret=new Vector<>();
ret.add(Main.startNode);
Main.startNode.isCutPoint=true;
ret.add(Main.termNode);
Main.termNode.isCutPoint=true;
dfs(Main.startNode,ret,new Vector<CFGNode>());
return ret;
}
private static void dfs(CFGNode v,Vector<CFGNode> res,Vector<CFGNode> currentBranch)
{
v.visited=true;
currentBranch.add(v);
for(Transition t:v.out)
{
if(!t.u.visited)
dfs(t.u, res, currentBranch);
else if(!res.contains(t.u) && currentBranch.contains(t.u))
{
t.u.isCutPoint=true;
res.add(t.u);
}
}
currentBranch.removeElementAt(currentBranch.size()-1);
}
public static void weakestPreCondition(Vector<Transition> path,Farkas farkas,int counter) //NOTE: for C-Integer programs this is completely fine but for general T2 transition systems it might have problems
{
for(int i=path.size()-1;i>=0;i--)
{
Transition t=path.elementAt(i);
if(counter!=0)
farkas.addInvConstraint(t.nondetGuard.deepCopy());
for (int j = 0; j < t.varName.size(); j++)
{
String var = t.varName.elementAt(j);
LinearCombination upd = t.update.elementAt(j);
farkas.replaceVarWithLinear(var, upd);
}
farkas.addPredicate(t.detGuard.deepCopy());
}
}
}
| 1,626 | 29.12963 | 210 | java |
null | RevTerm-main/code/linear/part2/Main/src/Farkas.java | import java.util.HashSet;
import java.util.Set;
import java.util.Vector;
public class Farkas
{
public static int countD=0;
int startDIndex;
int startNode,endNode;
QuadraticPredicate invConstraint;
LinearPredicate linearConstraints;
QuadraticCombination objective;
Farkas(int startNode,int endNode)
{
this.startNode=startNode;
this.endNode=endNode;
linearConstraints=new LinearPredicate();
LinearCombination lc=new LinearCombination();
lc.add("1",Rational.one);
linearConstraints.add(lc); // 1>=0 is always true
invConstraint=new QuadraticPredicate();
startDIndex=countD;
countD++; // for 1>=0
}
public Farkas deepCopy()
{
Farkas ret=new Farkas(startNode,endNode);
countD--; // for 1>=0 which is added in new
ret.startDIndex=startDIndex;
ret.invConstraint.exprs.addAll(invConstraint.exprs);
//countD+=invConstraint.size();
ret.linearConstraints=linearConstraints.deepCopy();
//countD+=linearConstraints.exprs.size()-1; //-1 for 1>=0 which is already added
ret.objective=objective.deepCopy();
return ret;
}
// public Farkas disabled()
// {
// Farkas ret=deepCopy();
//// LinearCombination lc=new LinearCombination("n_"+InvariantGeneration.nCount);
//// QuadraticCombination obj=new QuadraticCombination("1",lc);
//
// InvariantGeneration.nCount++;
// ret.objective=QuadraticCombination.minus1.deepCopy();
// return ret;
// }
void addInvConstraint(QuadraticPredicate inv)
{
invConstraint.add(inv);
countD+=inv.exprs.size();
}
void addPredicate(LinearPredicate lp)
{
linearConstraints.add(lp);
countD+=lp.exprs.size();
}
void setObjective(QuadraticCombination obj)
{
objective=obj.deepCopy();
}
public Vector<QuadraticCombination> generateEqualities()
{
Vector<QuadraticCombination> ret = new Vector<>();
Set<String> allVars= getAllVars();
for (String var : allVars)
{
QuadraticCombination tmp = makeEquality(var);
ret.add(tmp);
}
return ret;
}
Set<String> getAllVars()
{
Set<String> ret=new HashSet<>();
for(LinearCombination lc:linearConstraints.exprs)
ret.addAll(lc.coef.keySet());
for(QuadraticCombination qc: invConstraint.exprs)
ret.addAll(qc.coef.keySet());
ret.addAll(objective.coef.keySet());
return ret;
}
public QuadraticCombination makeEquality(String var)
{
QuadraticCombination qc=new QuadraticCombination();
int dIndex=startDIndex;
if(!invConstraint.exprs.isEmpty())
{
//for(int i=0;i<invConstraint.exprs.size();i++)
for(QuadraticCombination invc:invConstraint.exprs)
{
if (invc.coef.containsKey(var))
{
String invMultiplier = "d_" + dIndex;
LinearCombination lc = invc.coef.get(var);
qc.add(invMultiplier, lc);
}
dIndex++;
}
}
for(LinearCombination lp:linearConstraints.exprs) // lp>=0
{
String multiplier="d_"+dIndex;
if(lp.coef.containsKey(var))
{
Rational coef = lp.coef.get(var);
qc.add(multiplier,new LinearCombination(coef));
}
dIndex++;
}
LinearCombination coef = objective.getCoef(var);
//qc=coef <=> qc-coef=0
if(coef!=null)
{
LinearCombination lc = coef.negate();
qc.add(lc);
}
return qc;
}
void replaceVarWithLinear(String var,LinearCombination lc)
{
linearConstraints.replaceVarWithLinear(var,lc);
invConstraint.replaceVarWithLinear(var,lc);
objective.replaceVarWithLinear(var,lc);
}
public String toString()
{
String ret="";
ret+="\n---------------------------------------------\n";
ret+="from: "+startNode+" to: "+endNode+"\n";
int dIndex=startDIndex;
for(int i=0;i<invConstraint.exprs.size();i++)
{
ret +="d_"+dIndex+": "+invConstraint.exprs.elementAt(i).toNormalString() + "\n";
dIndex++;
}
for(LinearCombination lc:linearConstraints.exprs)
{
ret += "\nd_" + dIndex + ": "+lc.toNormalString();
dIndex++;
}
ret+="\n---------------------------------------------\n";
ret+=objective.toNormalString();
return ret;
}
}
| 4,761 | 26.686047 | 92 | java |
null | RevTerm-main/code/linear/part2/Main/src/InvUtil.java | import java.io.*;
import java.nio.channels.FileLockInterruptionException;
import java.util.Scanner;
import java.util.Vector;
//TODO: update checkNonTermination
public class InvUtil
{
public static boolean checkNonTermination(Vector<Farkas> I,Vector<Farkas> Inductive,CFGNode startNode) throws Exception
{
String Template="";//InvariantGeneration.cCount+" "+Farkas.countD+" "+Parser.allVars.size()+"\n";
Template+="(set-option :print-success false) \n" +
"(set-option :produce-models true)\n" ;
if(Main.solver.equals("bclt"))
{
Template+="(set-option :produce-assertions true)\n" +
"(set-logic QF_NIA)\n";
}
for(int i=0;i<InvariantGeneration.cCount;i++)
Template+="(declare-const c_"+i+" Int)\n";
for(int i=0;i<Farkas.countD;i++)
{
Template += "(declare-const d_" + i + " Int)\n";
Template+="(assert (>= d_"+i+" 0))\n"; // d_i>=0
}
for(String var: Parser.allVars)
if(!var.equals("1"))
Template += "(declare-const " + var + " Int)\n";
for(int i=0;i<Parser.nondetCount;i++)
Template+="(declare-const "+"_r_"+i+" Int)\n";
FileWriter fw = new FileWriter(Main.workingdir+"/"+Main.solver+Main.con+"-"+Main.dis+Main.fileName+".smt2");
fw.write(Template);
//Inductive Backward Invariant:
for(Farkas f:I)
{
Vector<QuadraticCombination> vqc=f.generateEqualities();
for(QuadraticCombination qc:vqc)
fw.write("(assert (= 0 "+qc.toString()+"))\n");
}
//Not Inductive Invariant:
fw.write("(assert (or ");
for(Farkas farkas: Inductive)
{
fw.write("(and ");
for(QuadraticCombination q:farkas.invConstraint.exprs)
fw.write("(>= "+ q.toString()+" 0) ");
for(LinearCombination lc: farkas.linearConstraints.exprs)
if(lc.coef.size()==1 && lc.coef.containsKey("1")) //1>=0 is not necessary
continue;
else
fw.write("(>= "+ lc.toString()+" 0) ");
fw.write("(< "+farkas.objective+" 0)");
fw.write(")\n");
}
fw.write("))\n");
fw.write("(check-sat)\n");
fw.write("(get-value (");
for(int i=0;i<InvariantGeneration.cCount;i++)
fw.write("c_"+i+" ");
for(String var:Parser.allVars)
if(!var.equals("1") && !var.startsWith("_a_") && !var.startsWith("_b_"))
fw.write(var + " ");
fw.write("))\n");
fw.close();
// System.exit(0);
if(check())
return true;
return false;
}
public static boolean check() throws Exception
{
String smtFile=Main.workingdir+"/"+Main.solver+Main.con+"-"+Main.dis+Main.fileName;
String[] configs = {"bclt --file", "mathsat","z3 -smt2 "};
int solverInd = -1;
if (Main.solver.equals("bclt"))
solverInd = 0;
else if(Main.solver.equals("mathsat"))
solverInd=1;
else if(Main.solver.equals("z3"))
solverInd=2;
// System.err.println(smtFile);
Process process = Runtime.getRuntime().exec("./"+Main.solversDir+"/"+configs[solverInd] + " " + smtFile + ".smt2");
process.waitFor();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
FileWriter fw=new FileWriter(smtFile+".result");
boolean isSAT=false;
while (bufferedReader.ready())
{
String s = bufferedReader.readLine();
// System.err.println(s);
fw.write(s+"\n");
if (s.equals("sat"))
{
// System.err.println("SAT!");
isSAT=true;
}
}
fw.close();
if(isSAT==false)
{
return false;
}
else
return safetyUtil.check(smtFile);
}
}
| 4,101 | 32.622951 | 123 | java |
null | RevTerm-main/code/linear/part2/Main/src/InvariantGeneration.java | import java.util.*;
public class InvariantGeneration
{
public static Map<String,String> nondetVarsC=new HashMap<>();
public static int totalUnknownVars=0;
public static int cCount=0;
public static void MakeTemplate(int con,int dis,CFGNode startNode,int counter)//conjunctions, disjunctions
{
for(CFGNode n:CFGNode.allCFGNodes)
{
if(n.id==Main.termNode.id && counter==1)
{
//n.inv does not change. the previous computed invariant is used as the BI of the start location of BCFG.
for(String var:Parser.allVars)
if(var.startsWith("_a_") || var.startsWith("_b_"))
{
String cVar="c_"+cCount;
cCount++;
nondetVarsC.put(var,cVar);
}
}
else if(n.isCutPoint)
{
n.inv.clear();
for(int i=0;i<dis;i++)
{
QuadraticPredicate qp = new QuadraticPredicate();
for (int j = 0; j < con; j++)
{
QuadraticCombination qc = new QuadraticCombination();
for (String var : Parser.allVars)
{
if(var.startsWith("_a_") || var.startsWith("_b_"))
continue;
qc.add(var, new LinearCombination("c_" + cCount));
cCount++;
}
qp.add(qc);
}
n.inv.add(qp);
}
}
}
}
public static void generate(Vector<CFGNode> cutPoints,Vector<Farkas> farkasVector,int counter)
{
if(counter==0) //for startNode
{
CFGNode u = Main.startNode;
QuadraticPredicate objPred=u.inv.lastElement();
Vector<QuadraticPredicate> tmp = new Vector<>();
for(QuadraticPredicate qc:u.inv)
if(qc!=objPred)
tmp.add(qc.deepCopy());
Vector<QuadraticPredicate> uinvNegate=QuadraticPredicate.negate(tmp);
if(uinvNegate.isEmpty())
uinvNegate.add(QuadraticPredicate.TRUE);
for(QuadraticCombination qc:objPred.exprs)
for(QuadraticPredicate qp:uinvNegate)
{
Farkas farkas=new Farkas(-1,-1);
if(qp!=QuadraticPredicate.TRUE)
farkas.addInvConstraint(qp.deepCopy());
farkas.setObjective(qc);
farkasVector.add(farkas);
}
}
for(CFGNode v:cutPoints)
processPaths(v,v,new Vector<Transition>(),farkasVector,counter);
}
private static void processPaths(CFGNode st,CFGNode v,Vector<Transition> path,Vector<Farkas> farkasVector,int counter)
{
//System.err.println("processPaths: st:"+st.id+" v:"+v.id+" v.out="+v.out.size()+" v.rout="+v.rout.size()+" counter="+counter);
Vector<Transition> tran;
if(counter==0)
tran=v.out;
else
tran=v.rout;
for(Transition t:tran)
{
CFGNode u=t.u;
path.add(t);
if(u.isCutPoint)
{
QuadraticPredicate objPred=u.inv.lastElement();
Vector<QuadraticPredicate> tmp = new Vector<>();
for(QuadraticPredicate qc:u.inv)
if(qc!=objPred)
tmp.add(qc.deepCopy());
Vector<QuadraticPredicate> uinvNegate=QuadraticPredicate.negate(tmp);
if(uinvNegate.isEmpty())
uinvNegate.add(QuadraticPredicate.TRUE);
for(QuadraticPredicate vinv:st.inv)
for(QuadraticPredicate uinv:uinvNegate)
for(QuadraticCombination obj:objPred.exprs)
{
//vinv & uinv & path => obj
Farkas farkas=new Farkas(st.id,u.id);
if(uinv!=QuadraticPredicate.TRUE)
farkas.addInvConstraint(uinv.deepCopy());
farkas.setObjective(obj.deepCopy());
CFGUtil.weakestPreCondition(path,farkas,counter);
farkas.addInvConstraint(vinv);
farkasVector.add(farkas);
}
}
else
processPaths(st,u,path,farkasVector,counter);
path.removeElementAt(path.size()-1);
}
}
}
| 4,741 | 36.634921 | 135 | java |
null | RevTerm-main/code/linear/part2/Main/src/LinearCombination.java | import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
public class LinearCombination
{
Map<String,Rational> coef;
public static LinearCombination one=new LinearCombination(Rational.one);
LinearCombination()
{
coef=new HashMap<>();
}
LinearCombination(Rational c)
{
coef=new HashMap<>();
coef.put("1",c);
}
LinearCombination(String var)
{
coef=new HashMap<>();
coef.put(var,Rational.one);
}
LinearCombination(String var,Rational c)
{
coef=new HashMap<>();
coef.put(var,c);
}
public void add(String var,Rational c)
{
if(coef.containsKey(var))
coef.put(var,Rational.add(coef.get(var),c));
else
coef.put(var,c);
}
public void add(LinearCombination lc)
{
for(String var:lc.coef.keySet())
add(var,lc.coef.get(var));
}
public void minus(LinearCombination lc)
{
add(lc.negate());
}
public void multiplyByValue(Rational val)
{
for(String var:coef.keySet())
coef.put(var,Rational.mul(coef.get(var),val));
}
public LinearCombination negate() //does not negate "this". returns the negate of "this".
{
removeZeros();
LinearCombination lc=new LinearCombination();
for(String var:coef.keySet())
lc.coef.put(var,Rational.negate(coef.get(var)));
return lc;
}
public QuadraticCombination toQuadratic()
{
QuadraticCombination qc=new QuadraticCombination();
for(String var:coef.keySet())
qc.add(var,new LinearCombination(coef.get(var)));
return qc;
}
public boolean containsNondetVar()
{
for(String var:coef.keySet())
if(var.startsWith("_a_") || var.startsWith("_b_") || var.startsWith("_tmp_"))
return true;
return false;
}
public LinearCombination deepCopy()
{
removeZeros();
LinearCombination lc=new LinearCombination();
for(String var:coef.keySet())
{
Rational c=coef.get(var);
lc.add(var, c);
}
return lc;
}
public QuadraticCombination mulByVar(String var)
{
QuadraticCombination ret=new QuadraticCombination();
for(String s:coef.keySet())
ret.add(s,new LinearCombination(var,coef.get(s)));
return ret;
}
public void multiplyByLin(LinearCombination lc) throws Exception
{
if(!isConstant() && !lc.isConstant())
throw new Exception("multiplication of two linear Expressions is not linear");
if(isConstant())
{
Rational x=coef.get("1");
if(x==null)
x=Rational.zero;
coef.clear();
for(String var:lc.coef.keySet())
coef.put(var,Rational.mul(lc.coef.get(var),x));
}
else
{
Rational x=lc.coef.get("1");
multiplyByValue(x);
}
}
public boolean isConstant()
{
if(coef.size()>1 || (coef.size()==1 && !coef.containsKey("1")))
return false;
return true;
}
public void removeZeros()
{
Vector<String> allVars=new Vector<>();
allVars.addAll(coef.keySet());
for(String s:allVars)
if(coef.get(s).equals(Rational.zero))
coef.remove(s);
}
public LinearCombination primize()
{
LinearCombination lc=new LinearCombination();
for(String var:coef.keySet())
{
if(!var.equals("1"))
{
if(!var.contains("_prime"))
lc.add(var + "_prime", coef.get(var));
}
else
lc.add(var,coef.get(var));
}
return lc;
}
public LinearCombination unprime()
{
LinearCombination lc=new LinearCombination();
for(String var:coef.keySet())
{
if(var.endsWith("_prime"))
{
String orig=var.substring(0, var.length()-6);
lc.add(orig, coef.get(var));
}
else
lc.add(var,coef.get(var));
}
return lc;
}
void replaceVarWithLinear(String var,LinearCombination lc)
{
if(!coef.containsKey(var))
return;
Rational r=coef.get(var);
coef.put(var,Rational.zero);
LinearCombination tmp=lc.deepCopy();
tmp.multiplyByValue(r);
add(tmp);
removeZeros();
}
public int replaceVarsWithValue(Map<String,Integer> dict) throws Exception
{
int ret=0;
for(String var:coef.keySet())
{
if(!dict.containsKey(var))
throw new Exception("dictionary cannot support "+toNormalString());
int varVal=dict.get(var);
ret+=varVal*coef.get(var).numerator/coef.get(var).denominator;
}
return ret;
}
public boolean equals(LinearCombination lc)
{
lc.removeZeros();
removeZeros();
for(String var:coef.keySet())
if(!lc.coef.containsKey(var) || !lc.coef.get(var).equals(this.coef.get(var)))
return false;
for(String var:lc.coef.keySet())
if(!this.coef.containsKey(var) || !lc.coef.get(var).equals(this.coef.get(var)))
return false;
return true;
}
public String toNormalString()
{
removeZeros();
if(coef.size()==0)
return "0";
String ret="";
for(String s:coef.keySet())
{
Rational c=coef.get(s);
if(ret.equals(""))
ret+=c.toNormalString()+"*"+s;
else if(coef.get(s).compareTo(Rational.zero)<0)
ret+=" - "+(Rational.negate(c)).toNormalString()+"*"+s;
else
ret+=" + "+c.toNormalString()+"*"+s;
}
return ret;
}
public String toString()
{
removeZeros();
String ret="";
for(String s:coef.keySet())
{
Rational c=coef.get(s);
if(c.equals(Rational.one))
ret += " " + s;
else if(s.equals("1"))
{
if (!c.isNonNegative())
ret += " (- " + Rational.negate(c) + ")";
else
ret +=" "+c+" ";
}
else if(c.isNonNegative())
ret += " (* " + (c) + " " + s + ")";
else
ret += " (* (- " + Rational.negate(c) + ") " + s + ")";
}
if(ret.equals(""))
return "0";
if(coef.size()>1)
return "(+ "+ret+")";
else
return ret;
}
}
| 6,870 | 25.125475 | 94 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.