repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/NOAV.java
|
package org.repositoryminer.metrics.codemetric;
import org.repositoryminer.metrics.ast.AST;
import org.repositoryminer.metrics.ast.AbstractMethod;
import org.repositoryminer.metrics.ast.AbstractType;
import org.repositoryminer.metrics.report.ClassReport;
import org.repositoryminer.metrics.report.FileReport;
import org.repositoryminer.metrics.report.MethodReport;
import org.repositoryminer.metrics.report.ProjectReport;
public class NOAV extends CodeMetric {
private static final CodeMetricId[] REQUIRED_METRICS = { CodeMetricId.LVAR, CodeMetricId.PAR };
public NOAV() {
super.id = CodeMetricId.NOAV;
super.requiredMetrics = REQUIRED_METRICS;
}
@Override
public void calculate(AST ast, FileReport fileReport, ProjectReport projectReport) {
for (AbstractType type : ast.getTypes()) {
ClassReport cr = fileReport.getClass(type.getName());
for (AbstractMethod method : type.getMethods()) {
MethodReport mr = cr.getMethodBySignature(method.getName());
mr.getMetricsReport().setCodeMetric(CodeMetricId.NOAV, calculate(method, mr));
}
}
}
public int calculate(AbstractMethod method, MethodReport methodReport) {
int accessFields = LAA.countAccessedFields(method);
int nVar = methodReport.getMetricsReport().getCodeMetric(CodeMetricId.LVAR, Integer.class);
int nParams = method.getParameters().size();
return accessFields + nVar + nParams;
}
@Override
public void clean(ProjectReport projectReport) {}
}
| 1,450
| 34.390244
| 96
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/AMW.java
|
package org.repositoryminer.metrics.codemetric;
import java.math.BigDecimal;
import org.repositoryminer.metrics.ast.AST;
import org.repositoryminer.metrics.ast.AbstractType;
import org.repositoryminer.metrics.report.ClassReport;
import org.repositoryminer.metrics.report.FileReport;
import org.repositoryminer.metrics.report.ProjectReport;
public class AMW extends CodeMetric {
private static final CodeMetricId[] REQUIRED_METRICS = {CodeMetricId.WMC, CodeMetricId.NOM};
public AMW() {
super.id = CodeMetricId.AMW;
super.requiredMetrics = REQUIRED_METRICS;
}
@Override
public void calculate(AST ast, FileReport fileReport, ProjectReport projectReport) {
for (AbstractType type : ast.getTypes()) {
ClassReport cr = fileReport.getClass(type.getName());
int wmc = cr.getMetricsReport().getCodeMetric(CodeMetricId.WMC, Integer.class);
int nom = cr.getMetricsReport().getCodeMetric(CodeMetricId.NOM, Integer.class);
cr.getMetricsReport().setCodeMetric(CodeMetricId.AMW, calculate(wmc, nom));
}
}
public double calculate(int wmc, int nom) {
if (nom == 0) {
return 0l;
}
return new BigDecimal(wmc * 1.0 / nom).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue();
}
@Override
public void clean(ProjectReport projectReport) {}
}
| 1,269
| 30.75
| 93
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/ATFD.java
|
package org.repositoryminer.metrics.codemetric;
import java.util.HashSet;
import java.util.Set;
import org.repositoryminer.metrics.ast.AST;
import org.repositoryminer.metrics.ast.AbstractFieldAccess;
import org.repositoryminer.metrics.ast.AbstractMethod;
import org.repositoryminer.metrics.ast.AbstractMethodInvocation;
import org.repositoryminer.metrics.ast.AbstractStatement;
import org.repositoryminer.metrics.ast.AbstractType;
import org.repositoryminer.metrics.ast.NodeType;
import org.repositoryminer.metrics.report.ClassReport;
import org.repositoryminer.metrics.report.FileReport;
import org.repositoryminer.metrics.report.MethodReport;
import org.repositoryminer.metrics.report.ProjectReport;
public class ATFD extends CodeMetric {
public ATFD() {
super.id = CodeMetricId.ATFD;
}
@Override
public void calculate(AST ast, FileReport fileReport, ProjectReport projectReport) {
for (AbstractType type : ast.getTypes()) {
ClassReport cr = fileReport.getClass(type.getName());
int atfdClass = 0;
for (AbstractMethod method : type.getMethods()) {
int atfdMethod = calculate(type, method);
atfdClass += atfdMethod;
MethodReport mr = cr.getMethodBySignature(method.getName());
mr.getMetricsReport().setCodeMetric(CodeMetricId.ATFD, atfdMethod);
}
cr.getMetricsReport().setCodeMetric(CodeMetricId.ATFD, atfdClass);
}
}
public int calculate(AbstractType currType, AbstractMethod method) {
Set<String> accessedFields = new HashSet<String>();
for (AbstractStatement stmt : method.getStatements()) {
String field = null;
String declarringClass = null;
if (stmt.getNodeType() == NodeType.FIELD_ACCESS) {
AbstractFieldAccess fieldAccess = (AbstractFieldAccess) stmt;
field = fieldAccess.getExpression();
declarringClass = fieldAccess.getDeclaringClass();
} else if (stmt.getNodeType() == NodeType.METHOD_INVOCATION) {
AbstractMethodInvocation methodInvocation = (AbstractMethodInvocation) stmt;
if (!methodInvocation.isAccessor()) {
continue;
}
field = methodInvocation.getAccessedField();
declarringClass = methodInvocation.getDeclaringClass();
} else {
continue;
}
if (!currType.getName().equals(declarringClass)) {
accessedFields.add(declarringClass + '.' + field);
}
}
return accessedFields.size();
}
@Override
public void clean(ProjectReport projectReport) {
}
}
| 2,409
| 31.567568
| 85
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/WMC.java
|
package org.repositoryminer.metrics.codemetric;
import org.repositoryminer.metrics.ast.AST;
import org.repositoryminer.metrics.ast.AbstractMethod;
import org.repositoryminer.metrics.ast.AbstractType;
import org.repositoryminer.metrics.report.ClassReport;
import org.repositoryminer.metrics.report.FileReport;
import org.repositoryminer.metrics.report.MethodReport;
import org.repositoryminer.metrics.report.ProjectReport;
public class WMC extends CodeMetric {
private static final CodeMetricId[] REQUIRED_METRICS = { CodeMetricId.CYCLO };
public WMC() {
super.id = CodeMetricId.WMC;
super.requiredMetrics = REQUIRED_METRICS;
}
@Override
public void calculate(AST ast, FileReport fileReport, ProjectReport projectReport) {
for (AbstractType type : ast.getTypes()) {
ClassReport cr = fileReport.getClass(type.getName());
int wmc = 0;
for (AbstractMethod method : type.getMethods()) {
MethodReport mr = cr.getMethodBySignature(method.getName());
wmc += mr.getMetricsReport().getCodeMetric(CodeMetricId.CYCLO, Integer.class);
}
cr.getMetricsReport().setCodeMetric(CodeMetricId.WMC, wmc);
}
}
@Override
public void clean(ProjectReport projectReport) {}
}
| 1,201
| 30.631579
| 85
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/CodeMetricId.java
|
package org.repositoryminer.metrics.codemetric;
public enum CodeMetricId {
ATFD, CYCLO, LVAR, MAXNESTING, MLOC, NOM, NOA, NOAV, PAR, LOC, TCC, WMC, AMW, DIT, NProtM, BOvR, BUR, WOC, NOPA, NOAM, LAA, FDP;
}
| 209
| 29
| 129
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-metrics/src/main/java/org/repositoryminer/metrics/codemetric/MetricFactory.java
|
package org.repositoryminer.metrics.codemetric;
public class MetricFactory {
public static CodeMetric getMetric(CodeMetricId id) {
switch (id) {
case AMW:
return new AMW();
case ATFD:
return new ATFD();
case CYCLO:
return new CYCLO();
case FDP:
return new FDP();
case LAA:
return new LAA();
case LOC:
return new LOC();
case LVAR:
return new LVAR();
case MAXNESTING:
return new MAXNESTING();
case NOA:
return new NOA();
case NOAM:
return new NOAM();
case NOAV:
return new NOAV();
case NOM:
return new NOM();
case NOPA:
return new NOPA();
case NProtM:
return new NProtM();
case PAR:
return new PAR();
case TCC:
return new TCC();
case WMC:
return new WMC();
case WOC:
return new WOC();
default :
return null;
}
}
}
| 815
| 16
| 54
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/ExCommentConfig.java
|
package org.repositoryminer.excomment;
public class ExCommentConfig {
private String reference;
private String commentsCSV;
private String patternsCSV;
private String heuristicsCSV;
private char delimiter = ';';
public ExCommentConfig() {
}
public ExCommentConfig(String reference, String commentsCSV, String patternsCSV, String heuristicsCSV) {
this.reference = reference;
this.commentsCSV = commentsCSV;
this.patternsCSV = patternsCSV;
this.heuristicsCSV = heuristicsCSV;
}
public boolean isValid() {
return isValidValue(reference) && isValidValue(commentsCSV) && isValidValue(heuristicsCSV)
&& isValidValue(patternsCSV) && delimiter != '\u0000';
}
private boolean isValidValue(String value) {
return value != null && value.length() > 0;
}
public String getReference() {
return reference;
}
public void setReference(String reference) {
this.reference = reference;
}
public String getCommentsCSV() {
return commentsCSV;
}
public void setCommentsCSV(String commentsCSV) {
this.commentsCSV = commentsCSV;
}
public String getPatternsCSV() {
return patternsCSV;
}
public void setPatternsCSV(String patternsCSV) {
this.patternsCSV = patternsCSV;
}
public String getHeuristicsCSV() {
return heuristicsCSV;
}
public void setHeuristicsCSV(String heuristicsCSV) {
this.heuristicsCSV = heuristicsCSV;
}
public char getDelimiter() {
return delimiter;
}
public void setDelimiter(char delimiter) {
this.delimiter = delimiter;
}
}
| 1,504
| 20.197183
| 105
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/ExCommentCSVReader.java
|
package org.repositoryminer.excomment;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.io.FilenameUtils;
import org.repositoryminer.excomment.model.Comment;
import org.repositoryminer.excomment.model.Heuristic;
import org.repositoryminer.excomment.model.Pattern;
public class ExCommentCSVReader {
private static final String[] COMMENTS_HEADER = { "idcomment", "total_pattern", "total_heuristic", "total_score",
"comment", "path", "class", "method" };
private static final String[] PATTERNS_HEADER = { "idcomment", "pattern", "pattern_score", "pattern_class", "theme",
"tdtype" };
private static final String[] HEURISTICS_HEADER = { "idcomment", "heuristic_description", "heuristic_status",
"heuristic_score" };
// the comment id is used as key
private Map<Integer, Comment> commentsMap = new HashMap<Integer, Comment>();
// keeps the relationship between files and comments
private Map<String, List<Integer>> filesMap = new HashMap<String, List<Integer>>();
private ExCommentConfig config;
public ExCommentCSVReader(ExCommentConfig config) {
this.config = config;
}
public Map<Integer, Comment> getCommentsMap() {
return commentsMap;
}
public Map<String, List<Integer>> getFilesMap() {
return filesMap;
}
public void readCSVs() throws IOException {
readComments();
readHeuristics();
readPatterns();
}
private void readComments() throws IOException {
List<CSVRecord> records = readCSV(COMMENTS_HEADER, config.getCommentsCSV());
for (CSVRecord record : records) {
Comment comment = new Comment(Integer.parseInt(record.get(0)),
Double.parseDouble(record.get(1).replaceAll(",", ".")),
Double.parseDouble(record.get(2).replaceAll(",", ".")),
Double.parseDouble(record.get(3).replaceAll(",", ".")), record.get(4), record.get(6), record.get(7));
String filename = FilenameUtils.normalize(record.get(5), true);
if (!filesMap.containsKey(filename)) {
filesMap.put(filename, new ArrayList<Integer>());
}
commentsMap.put(comment.getId(), comment);
filesMap.get(filename).add(comment.getId());
}
}
private void readPatterns() throws IOException {
List<CSVRecord> records = readCSV(PATTERNS_HEADER, config.getPatternsCSV());
for (CSVRecord record : records) {
Pattern pattern = new Pattern(record.get(1), Double.parseDouble(record.get(2).replaceAll(",", ".")),
record.get(3), record.get(4), record.get(5));
Comment comment = commentsMap.get(Integer.parseInt(record.get(0)));
if (comment == null) {
continue;
}
comment.getPatterns().add(pattern);
}
}
private void readHeuristics() throws IOException {
List<CSVRecord> records = readCSV(HEURISTICS_HEADER, config.getHeuristicsCSV());
for (CSVRecord record : records) {
Heuristic heuristic = new Heuristic(record.get(1), Integer.parseInt(record.get(2)),
Double.parseDouble(record.get(3).replaceAll(",", ".")));
Comment comment = commentsMap.get(Integer.parseInt(record.get(0)));
if (comment == null) {
continue;
}
comment.getHeuristics().add(heuristic);
}
}
private List<CSVRecord> readCSV(String[] header, String filename) throws IOException {
FileReader fileReader = new FileReader(filename);
CSVFormat format = CSVFormat.DEFAULT.withDelimiter(config.getDelimiter()).withHeader(header)
.withSkipHeaderRecord();
CSVParser csvParser = new CSVParser(fileReader, format);
List<CSVRecord> records = csvParser.getRecords();
fileReader.close();
csvParser.close();
return records;
}
}
| 3,769
| 29.650407
| 117
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/RepositoryMinerExComment.java
|
package org.repositoryminer.excomment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import org.bson.Document;
import org.repositoryminer.RepositoryMinerException;
import org.repositoryminer.domain.Commit;
import org.repositoryminer.excomment.model.Comment;
import org.repositoryminer.excomment.persistence.ExCommentDAO;
import org.repositoryminer.plugin.MiningPlugin;
import org.repositoryminer.scm.ISCM;
import org.repositoryminer.scm.SCMFactory;
import org.repositoryminer.util.StringUtils;
public class RepositoryMinerExComment extends MiningPlugin<ExCommentConfig> {
public RepositoryMinerExComment(String repositoryKey) {
super(repositoryKey);
}
@Override
public void mine(ExCommentConfig config) {
if (config == null || !config.isValid()) {
throw new RepositoryMinerException(
"Invalid configuration, set the CSV files, the delimiter and a reference correctly.");
}
ISCM scm = SCMFactory.getSCM(repository.getScm());
scm.open(repository.getPath());
Commit commit = scm.resolve(config.getReference());
scm.close();
ExCommentDAO dao = new ExCommentDAO();
dao.deleteByCommit(commit.getHash());
ExCommentCSVReader csvReader = new ExCommentCSVReader(config);
try {
csvReader.readCSVs();
} catch (IOException e) {
throw new RepositoryMinerException("An IO error had occurred while reading the files.");
}
List<Document> documents = new ArrayList<Document>(csvReader.getFilesMap().size());
for (Entry<String, List<Integer>> entry : csvReader.getFilesMap().entrySet()) {
Document doc = new Document();
doc.append("reference", config.getReference()).
append("commit", commit.getHash()).
append("commit_date", commit.getCommitterDate()).
append("repository", repository.getId()).
append("filename", entry.getKey()).
append("filehash", StringUtils.encodeToCRC32(entry.getKey()));
List<Comment> commentsAux = new ArrayList<Comment>(entry.getValue().size());
for (Integer i : entry.getValue()) {
commentsAux.add(csvReader.getCommentsMap().get(i));
}
doc.append("comments", Comment.toDocumentList(commentsAux));
documents.add(doc);
}
dao.insertMany(documents);
}
}
| 2,494
| 33.652778
| 100
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/persistence/ExCommentDAO.java
|
package org.repositoryminer.excomment.persistence;
import java.util.List;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.repositoryminer.persistence.GenericDAO;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.Filters;
public class ExCommentDAO extends GenericDAO {
private static final String COLLECTION_NAME = "excomment_comments_analysis";
public ExCommentDAO() {
super(COLLECTION_NAME);
}
public Document findByFile(long filehash, String commit, Bson projection) {
Bson clause1 = new BasicDBObject("filehash", filehash);
Bson clause2 = new BasicDBObject("commit", commit);
return findOne(Filters.and(clause1, clause2), projection);
}
public void deleteByCommit(String hash) {
deleteMany(Filters.eq("commit", hash));
}
public List<Document> findByCommit(String hash, Bson projection) {
return findMany(Filters.eq("commit", hash), projection);
}
}
| 925
| 25.457143
| 77
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/model/Comment.java
|
package org.repositoryminer.excomment.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.bson.Document;
public class Comment {
private int id;
private double totalPattern;
private double totalHeuristic;
private double totalScore;
private String comment;
private String clazz;
private String method;
private List<Pattern> patterns = new ArrayList<Pattern>();
private List<Heuristic> heuristics = new ArrayList<Heuristic>();
public static List<Document> toDocumentList(Collection<Comment> comments) {
List<Document> docs = new ArrayList<Document>();
if (comments != null) {
for (Comment comment : comments) {
docs.add(comment.toDocument());
}
}
return docs;
}
public Document toDocument() {
Document doc = new Document();
doc.append("id", id).
append("total_pattern", totalPattern).
append("total_heuristic", totalHeuristic).
append("total_score", totalScore).
append("comment", comment).
append("class", clazz).
append("method", method).
append("patterns", Pattern.toDocumentList(patterns)).
append("heuristics", Heuristic.toDocumentList(heuristics));
return doc;
}
public Comment() { }
public Comment(int id, double totalPattern, double totalHeuristic, double totalScore, String comment,
String clazz, String method) {
this.id = id;
this.totalPattern = totalPattern;
this.totalHeuristic = totalHeuristic;
this.totalScore = totalScore;
this.comment = comment;
this.clazz = clazz;
this.method = method;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public double getTotalPattern() {
return totalPattern;
}
public void setTotalPattern(double totalPattern) {
this.totalPattern = totalPattern;
}
public double getTotalHeuristic() {
return totalHeuristic;
}
public void setTotalHeuristic(double totalHeuristic) {
this.totalHeuristic = totalHeuristic;
}
public double getTotalScore() {
return totalScore;
}
public void setTotalScore(double totalScore) {
this.totalScore = totalScore;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public String getClazz() {
return clazz;
}
public void setClazz(String clazz) {
this.clazz = clazz;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public List<Pattern> getPatterns() {
return patterns;
}
public void setPatterns(List<Pattern> patterns) {
this.patterns = patterns;
}
public List<Heuristic> getHeuristics() {
return heuristics;
}
public void setHeuristics(List<Heuristic> heuristics) {
this.heuristics = heuristics;
}
}
| 2,755
| 20.038168
| 102
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/model/Heuristic.java
|
package org.repositoryminer.excomment.model;
import java.util.ArrayList;
import java.util.List;
import org.bson.Document;
public class Heuristic {
private String description;
private int status;
private double score;
public static List<Document> toDocumentList(List<Heuristic> heuristics) {
List<Document> docs = new ArrayList<Document>();
if (heuristics != null) {
for (Heuristic heuristic : heuristics) {
docs.add(heuristic.toDocument());
}
}
return docs;
}
public Document toDocument() {
Document doc = new Document();
doc.append("description", description).
append("status", status).
append("score", score);
return doc;
}
public Heuristic() { }
public Heuristic(String description, int status, double score) {
this.description = description;
this.status = status;
this.score = score;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public double getScore() {
return score;
}
public void setScore(double score) {
this.score = score;
}
}
| 1,226
| 17.876923
| 74
|
java
|
repositoryminer
|
repositoryminer-master/repositoryminer-excomment/src/main/java/org/repositoryminer/excomment/model/Pattern.java
|
package org.repositoryminer.excomment.model;
import java.util.ArrayList;
import java.util.List;
import org.bson.Document;
public class Pattern {
private String name;
private double score;
private String clazz;
private String theme;
private String tdType;
public static List<Document> toDocumentList(List<Pattern> patterns) {
List<Document> docs = new ArrayList<Document>();
if (patterns != null) {
for (Pattern pattern : patterns) {
docs.add(pattern.toDocument());
}
}
return docs;
}
public Document toDocument() {
Document doc = new Document();
doc.append("name", name).
append("score", score).
append("class", clazz).
append("theme", theme).
append("tdtype", tdType);
return doc;
}
public Pattern() { }
public Pattern(String name, double score, String clazz, String theme, String tdType) {
this.name = name;
this.score = score;
this.clazz = clazz;
this.theme = theme;
this.tdType = tdType;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public double getScore() {
return score;
}
public void setScore(double score) {
this.score = score;
}
public String getClazz() {
return clazz;
}
public void setClazz(String clazz) {
this.clazz = clazz;
}
public String getTheme() {
return theme;
}
public void setTheme(String theme) {
this.theme = theme;
}
public String getTdType() {
return tdType;
}
public void setTdType(String tdType) {
this.tdType = tdType;
}
}
| 1,523
| 16.517241
| 87
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/DFSOrderGenerator.java
|
package com.ibm.wala.codeNet;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Iterator;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import com.ibm.wala.util.collections.HashMapFactory;
import com.ibm.wala.util.collections.NonNullSingletonIterator;
import com.ibm.wala.util.graph.Graph;
import com.ibm.wala.util.graph.traverse.DFS;
public class DFSOrderGenerator {
public static void main(String... args) throws JSONException, FileNotFoundException {
JSONObject parseTreeJson =
(JSONObject)new JSONTokener(new FileInputStream(System.getProperty("parseTreeFile")))
.nextValue();
Graph<JSONObject> parseTree = new WalaSPTGraph(parseTreeJson);
Map<JSONObject,Integer> dfs = HashMapFactory.make();
JSONArray nodes = parseTreeJson
.getJSONObject("graph")
.getJSONArray("nodes");
int dfsNumber = 0;
Iterator<JSONObject> search = DFS.iterateFinishTime(parseTree, new NonNullSingletonIterator<JSONObject>(nodes.getJSONObject(0)));
while (search.hasNext()) {
dfs.put(search.next(), dfsNumber++);
}
nodes.forEach(n -> {
JSONObject node = (JSONObject) n;
System.out.println(node.getInt("id") + " " + dfs.get(node));
});
}
}
| 1,308
| 27.456522
| 131
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/GraphAugmentor.java
|
package com.ibm.wala.codeNet;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import com.ibm.wala.cast.java.ecj.util.SourceDirCallGraph;
import com.ibm.wala.cast.java.ipa.callgraph.JavaSourceAnalysisScope;
import com.ibm.wala.cast.java.ipa.modref.AstJavaModRef;
import com.ibm.wala.cast.loader.AstMethod;
import com.ibm.wala.cast.loader.AstMethod.DebuggingInformation;
import com.ibm.wala.cast.tree.CAstSourcePositionMap.Position;
import com.ibm.wala.cast.util.SourceBuffer;
import com.ibm.wala.ipa.callgraph.CGNode;
import com.ibm.wala.ipa.callgraph.CallGraphBuilderCancelException;
import com.ibm.wala.ipa.callgraph.propagation.InstanceKey;
import com.ibm.wala.ipa.callgraph.propagation.PropagationCallGraphBuilder;
import com.ibm.wala.ipa.cha.ClassHierarchyException;
import com.ibm.wala.ipa.slicer.Dependency;
import com.ibm.wala.ipa.slicer.NormalStatement;
import com.ibm.wala.ipa.slicer.ParamCallee;
import com.ibm.wala.ipa.slicer.ParamCaller;
import com.ibm.wala.ipa.slicer.SDG;
import com.ibm.wala.ipa.slicer.Slicer;
import com.ibm.wala.ipa.slicer.Statement;
import com.ibm.wala.ssa.SSAInstruction;
import com.ibm.wala.util.collections.HashMapFactory;
import com.ibm.wala.util.graph.Graph;
import com.ibm.wala.util.graph.GraphSlicer;
public class GraphAugmentor {
public static void main(String... args) throws ClassHierarchyException, IllegalArgumentException, CallGraphBuilderCancelException, IOException {
NodeFinder nf = new NodeFinder(System.getProperty("tokenFile"), System.getProperty("parseTreeFile"));
(new SourceDirCallGraph()).doit(args, (cg, builder, time) -> {
SDG<? extends InstanceKey> sdg =
new SDG<>(
cg,
((PropagationCallGraphBuilder)builder).getPointerAnalysis(),
new AstJavaModRef<>(),
Slicer.DataDependenceOptions.NO_HEAP_NO_EXCEPTIONS,
Slicer.ControlDependenceOptions.NO_EXCEPTIONAL_EDGES);
Graph<Statement> srcSdg = GraphSlicer.prune(sdg,
n -> n.getNode().getMethod().getReference().getDeclaringClass().getClassLoader() == JavaSourceAnalysisScope.SOURCE);
Map<Dependency, JSONArray> sdgEdgesForSpt = HashMapFactory.make();
srcSdg.forEach(srcNode -> {
JSONObject srcJson = findNodeForStatement(nf, srcNode);
if (srcJson != null) {
srcSdg.getSuccNodes(srcNode).forEachRemaining(dstNode -> {
JSONObject dstJson = findNodeForStatement(nf, dstNode);
if (dstJson != null && !srcJson.equals(dstJson)) {
JSONArray ea = new JSONArray(new int[] {srcJson.getInt("id"), dstJson.getInt("id")});
JSONObject e = new JSONObject();
e.put("between", ea);
sdg.getEdgeLabels(srcNode, dstNode).forEach(l -> {
if (! sdgEdgesForSpt.containsKey(l)) {
sdgEdgesForSpt.put(l, new JSONArray());
}
sdgEdgesForSpt.get(l).put(e);
});
}
});
}
});
sdgEdgesForSpt.entrySet().forEach(es -> {
try {
JSONObject json = ((JSONObject)new JSONTokener(new FileInputStream(System.getProperty("parseTreeFile"))).nextValue());
json.getJSONObject("graph").put("edges", es.getValue());
json.getJSONObject("graph").put("num-of-edges", es.getValue().length());
try (FileWriter f = new FileWriter(System.getProperty("parseTreeFile") + "." + es.getKey())) {
json.write(f, 2, 0);
}
} catch (JSONException | IOException e) {
assert false : e;
}
});
});
}
private static JSONObject findNodeForStatement(NodeFinder nf, Statement srcNode) {
Position srcPos = getPosition(srcNode);
JSONObject srcJson = nf.getCoveringNode(srcPos.getFirstOffset(), srcPos.getLastOffset());
try {
System.err.println(srcJson.getInt("id") + " : " + new SourceBuffer(srcPos) + " : " + srcNode);
} catch (IOException e) {
}
return srcJson;
}
static Position getPosition(Statement srcNode) {
Position srcPos;
CGNode srcCG = srcNode.getNode();
DebuggingInformation debugInfo = ((AstMethod)srcCG.getMethod()).debugInfo();
if (srcNode.getKind() == Statement.Kind.NORMAL) {
SSAInstruction srcInst = ((NormalStatement)srcNode).getInstruction();
srcPos = debugInfo.getInstructionPosition(srcInst.iIndex());
} else if (srcNode.getKind() == Statement.Kind.PARAM_CALLER) findParamCaller: {
SSAInstruction call = ((ParamCaller)srcNode).getInstruction();
int vn = ((ParamCaller)srcNode).getValueNumber();
for(int i = 0; i < call.getNumberOfUses(); i++) {
if (call.getUse(i) == vn) {
srcPos = debugInfo.getOperandPosition(call.iIndex(), i);
break findParamCaller;
}
}
assert false;
return null;
} else if (srcNode.getKind() == Statement.Kind.PARAM_CALLEE) {
int arg = ((ParamCallee)srcNode).getValueNumber() - 1;
srcPos = debugInfo.getParameterPosition(arg);
} else {
return null;
}
return srcPos;
}
}
| 5,034
| 37.143939
| 145
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/GraphGenerator.java
|
package com.ibm.wala.codeNet;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.SortedSet;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.TreeSet;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.stream.StreamSupport;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.ibm.wala.cast.java.ecj.util.SourceDirCallGraph;
import com.ibm.wala.cast.java.ipa.callgraph.JavaSourceAnalysisScope;
import com.ibm.wala.ipa.callgraph.CallGraphBuilderCancelException;
import com.ibm.wala.ipa.cha.ClassHierarchyException;
import com.ibm.wala.ssa.SSACFG;
import com.ibm.wala.util.graph.NumberedGraph;
public class GraphGenerator {
static class OrderedJSONObject extends JSONObject {
private final List<String> keys = new LinkedList<>();
@Override
protected SortedSet<Entry<String, Object>> entrySet() {
TreeSet<Entry<String, Object>> set = new TreeSet<Entry<String, Object>>(new Comparator<Entry<String, Object>>() {
@Override
public int compare(Entry<String, Object> o1, Entry<String, Object> o2) {
return keys.indexOf(o2.getKey()) - keys.indexOf(o1.getKey());
}
});
set.addAll(super.entrySet());
return set;
}
@Override
public JSONObject put(String key, Object value) throws JSONException {
if (keys.contains(key)) {
keys.remove(key);
}
keys.add(0, key);
return super.put(key, value);
}
}
@FunctionalInterface
interface NodeLabel<T> {
String label(T n);
}
@FunctionalInterface
interface EdgeLabel<T> {
String label(T a, T b);
}
private static final BinaryOperator<JSONArray> arrayAppend = (a1, a2) -> {
JSONArray a = new JSONArray();
a1.forEach(x -> a.put(x));
a2.forEach(x -> a.put(x));
return a;
};
private static final BiFunction<JSONArray, JSONObject, JSONArray> arrayAdd = (a, jn) -> {
a.put(jn); return a;
};
public static <T> JSONObject toJSON(String method, NumberedGraph<T> G, T root, NodeLabel<T> nodeLabels, EdgeLabel<T> edgeLabels) {
JSONObject outer = new OrderedJSONObject();
JSONObject jg = new OrderedJSONObject();
outer.put("graph", jg);
outer.put("method", method);
jg.put("version", "1.0");
jg.put("directed", true);
jg.put("root", G.getNumber(root));
jg.put("nodes",
G.stream().map(n -> {
JSONObject jn = new OrderedJSONObject();
jn.put("id", G.getNumber(n));
jn.put("label", nodeLabels.label(n));
return jn;
}).reduce(new JSONArray(), arrayAdd, arrayAppend));
jg.put("edges",
G.stream().flatMap(n -> {
return StreamSupport.stream(
Spliterators.spliterator(G.getSuccNodes(n), G.getSuccNodeCount(n), Spliterator.ORDERED),
false).map(s -> {
JSONObject eo = new OrderedJSONObject();
JSONArray edge = new JSONArray();
edge.put(G.getNumber(n));
edge.put(G.getNumber(s));
eo.put("between", edge);
eo.put("label", edgeLabels.label(n, s));
return eo;
});
}).reduce(new JSONArray(), arrayAdd, arrayAppend));
return outer;
}
public static void main(String... args) throws ClassHierarchyException, IllegalArgumentException, CallGraphBuilderCancelException, IOException {
(new SourceDirCallGraph()).doit(args, (cg, builder, time) -> {
cg.stream()
.filter(n -> n.getMethod().getDeclaringClass().getClassLoader().getReference().equals(JavaSourceAnalysisScope.SOURCE))
.map(n -> {
SSACFG cfg = n.getIR().getControlFlowGraph();
return toJSON(n.getMethod().getSignature(),
cfg,
cfg.entry(),
bb -> { return String.valueOf(bb.getNumber()); },
(bb, sb) -> { return bb.getNumber() + " -> " + sb.getNumber(); });
}).forEach(jcfg -> {
String methodName = jcfg.getString("method");
System.out.println("method " + methodName);
try {
jcfg.write(new FileWriter("/tmp/" + methodName.replace('/', '_').replace(' ', '_').replace('.', '_') + ".json"), 2, 0).flush();
} catch (JSONException | IOException e) {
e.printStackTrace();
}
});
}
);
}
}
| 4,238
| 29.941606
| 145
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/NodeFinder.java
|
package com.ibm.wala.codeNet;
import java.io.FileReader;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.json.JSONException;
import org.json.JSONObject;
import com.ibm.wala.util.collections.HashMapFactory;
import com.ibm.wala.util.collections.HashSetFactory;
import com.ibm.wala.util.collections.Pair;
public class NodeFinder {
private final Map<Integer, Pair<Integer, Integer>> tokenMap = HashMapFactory.make();
private final Map<JSONObject, Pair<Integer, Integer>> locations = HashMapFactory.make();
private final SortedMap<Integer,Set<JSONObject>> offsetToNodes = new TreeMap<>(new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return o1 - o2;
}
});
private WalaSPTGraph parseTree;
NodeFinder(String tokenFile, String parseTreeFile) throws JSONException, IOException {
CSVParser csvParser = CSVFormat.DEFAULT.withHeader().parse(new FileReader(tokenFile));
for (CSVRecord token : csvParser) {
int id = Integer.valueOf(token.get("seqnr"));
int startOffset = Integer.valueOf(token.get("start"));
int endOffsetInclusive = Integer.valueOf(token.get("stop"));
tokenMap.put(id, Pair.make(startOffset, endOffsetInclusive));
}
parseTree = new WalaSPTGraph(parseTreeFile);
}
private Pair<Integer,Integer> location(JSONObject node) {
Pair<Integer,Integer> result;
if (locations.containsKey(node)) {
return locations.get(node);
} else {
if (node.getString("node-type").equals("Token")) {
result = tokenMap.get(node.getInt("token-id"));
} else {
int start = Integer.MAX_VALUE;
int end = Integer.MIN_VALUE;
Iterator<JSONObject> ss = parseTree.getSuccNodes(node);
while (ss.hasNext()) {
Pair<Integer,Integer> s = location(ss.next());
if (s.fst < start) {
start = s.fst;
}
if (s.snd > end) {
end = s.snd;
}
}
result = Pair.make(start, end);
}
locations.put(node, result);
for(int i = result.fst; i <= result.snd; i++) {
if (! offsetToNodes.containsKey(i)) {
offsetToNodes.put(i, HashSetFactory.make());
}
offsetToNodes.get(i).add(node);
}
return result;
}
}
public JSONObject getCoveringNode(int startOffset, int endOffset) {
JSONObject node = parseTree.getNode(parseTree.root);
Pair<Integer,Integer> loc = location(node);
descend: while(loc.fst <= startOffset && loc.snd >= endOffset) {
Iterator<JSONObject> children = parseTree.getSuccNodes(node);
while (children.hasNext()) {
JSONObject c = children.next();
Pair<Integer,Integer> cl = location(c);
if (cl.fst <= startOffset && cl.snd >= endOffset) {
loc = cl;
node = c;
continue descend;
}
}
return node;
}
assert false;
return null;
}
}
| 3,007
| 28.203883
| 107
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/WalaSPTGraph.java
|
package com.ibm.wala.codeNet;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import com.ibm.wala.util.collections.EmptyIterator;
import com.ibm.wala.util.collections.HashMapFactory;
import com.ibm.wala.util.collections.HashSetFactory;
import com.ibm.wala.util.graph.AbstractNumberedGraph;
import com.ibm.wala.util.graph.NumberedEdgeManager;
import com.ibm.wala.util.graph.NumberedNodeManager;
import com.ibm.wala.util.intset.IntSet;
import com.ibm.wala.util.intset.IntSetUtil;
import com.ibm.wala.util.intset.MutableIntSet;
public class WalaSPTGraph extends AbstractNumberedGraph<JSONObject> {
NumberedNodeManager<JSONObject> nodes;
private NumberedEdgeManager<JSONObject> edges;
public final int root;
public WalaSPTGraph(String parseTreeFile) throws JSONException, FileNotFoundException {
this((JSONObject)new JSONTokener(new FileInputStream(parseTreeFile)).nextValue());
}
public WalaSPTGraph(JSONObject parseTreeJson) {
root = parseTreeJson
.getJSONObject("graph")
.getInt("root");
nodes = new NumberedNodeManager<JSONObject>() {
private Map<Integer,JSONObject> a = HashMapFactory.make();
{
parseTreeJson
.getJSONObject("graph")
.getJSONArray("nodes")
.forEach(n -> a.put(((JSONObject)n).getInt("id"), (JSONObject)n));
}
@Override
public Stream<JSONObject> stream() {
return a.values().stream();
}
@Override
public int getNumberOfNodes() {
return a.size();
}
@Override
public void addNode(JSONObject n) {
throw new UnsupportedOperationException();
}
@Override
public void removeNode(JSONObject n) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public boolean containsNode(JSONObject n) {
return a.values().contains(n);
}
@Override
public int getNumber(JSONObject N) {
return N.getInt("id");
}
@Override
public JSONObject getNode(int number) {
return a.get(number);
}
@Override
public int getMaxNumber() {
return a.size();
}
@Override
public Iterator<JSONObject> iterateNodes(IntSet s) {
Set<JSONObject> result = HashSetFactory.make();
s.foreach(n -> result.add(getNode(n)));
return result.iterator();
}
};
edges = new NumberedEdgeManager<JSONObject>() {
private Map<JSONObject,Set<JSONObject>> forward = HashMapFactory.make();
private Map<JSONObject,Set<JSONObject>> backward = HashMapFactory.make();
{
Map<Integer,JSONObject> idToNode = HashMapFactory.make();
JSONArray nodes = parseTreeJson
.getJSONObject("graph")
.getJSONArray("nodes");
for(int i = 0; i < nodes.length(); i++) {
JSONObject node = nodes.getJSONObject(i);
idToNode.put(node.getInt("id"), node);
}
parseTreeJson
.getJSONObject("graph")
.getJSONArray("edges")
.forEach(n -> {
JSONObject e = (JSONObject)n;
JSONArray edge = e.getJSONArray("between");
JSONObject src = idToNode.get(edge.getInt(0));
JSONObject dst = idToNode.get(edge.getInt(1));
if (! forward.containsKey(src)) {
forward.put(src, HashSetFactory.make());
}
forward.get(src).add(dst);
if (! backward.containsKey(dst)) {
backward.put(dst, HashSetFactory.make());
}
backward.get(dst).add(src);
});
}
@Override
public Iterator<JSONObject> getPredNodes(JSONObject n) {
return backward.get(n).iterator();
}
@Override
public int getPredNodeCount(JSONObject n) {
return backward.get(n).size();
}
@Override
public Iterator<JSONObject> getSuccNodes(JSONObject n) {
if (forward.containsKey(n)) {
return forward.get(n).iterator();
} else {
return EmptyIterator.instance();
}
}
@Override
public int getSuccNodeCount(JSONObject N) {
return forward.get(N).size();
}
@Override
public void addEdge(JSONObject src, JSONObject dst) {
throw new UnsupportedOperationException();
}
@Override
public void removeEdge(JSONObject src, JSONObject dst) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void removeAllIncidentEdges(JSONObject node) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void removeIncomingEdges(JSONObject node) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void removeOutgoingEdges(JSONObject node) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public boolean hasEdge(JSONObject src, JSONObject dst) {
return forward.get(src).contains(dst);
}
@Override
public IntSet getSuccNodeNumbers(JSONObject node) {
MutableIntSet ns = IntSetUtil.make();
getSuccNodes(node).forEachRemaining(s -> ns.add(getNumber(s)));
return ns;
}
@Override
public IntSet getPredNodeNumbers(JSONObject node) {
MutableIntSet ns = IntSetUtil.make();
getPredNodes(node).forEachRemaining(s -> ns.add(getNumber(s)));
return ns;
}
};
}
@Override
protected NumberedNodeManager<JSONObject> getNodeManager() {
return nodes;
}
@Override
protected NumberedEdgeManager<JSONObject> getEdgeManager() {
return edges;
}
}
| 5,583
| 25.590476
| 96
|
java
|
null |
Project_CodeNet-main/tools/analysis-graph-generator/src/main/java/com/ibm/wala/codeNet/WalaToGNNFiles.java
|
package com.ibm.wala.codeNet;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.UTFDataFormatException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import com.ibm.wala.cast.java.ecj.util.SourceDirCallGraph;
import com.ibm.wala.cast.java.ipa.callgraph.JavaSourceAnalysisScope;
import com.ibm.wala.cast.java.ipa.modref.AstJavaModRef;
import com.ibm.wala.cast.tree.CAstSourcePositionMap.Position;
import com.ibm.wala.ipa.callgraph.CGNode;
import com.ibm.wala.ipa.callgraph.CallGraph;
import com.ibm.wala.ipa.callgraph.CallGraphBuilder;
import com.ibm.wala.ipa.callgraph.CallGraphBuilderCancelException;
import com.ibm.wala.ipa.callgraph.propagation.InstanceKey;
import com.ibm.wala.ipa.callgraph.propagation.PropagationCallGraphBuilder;
import com.ibm.wala.ipa.cfg.BasicBlockInContext;
import com.ibm.wala.ipa.cfg.InterproceduralCFG;
import com.ibm.wala.ipa.cha.ClassHierarchyException;
import com.ibm.wala.ipa.slicer.MethodEntryStatement;
import com.ibm.wala.ipa.slicer.MethodExitStatement;
import com.ibm.wala.ipa.slicer.NormalReturnCallee;
import com.ibm.wala.ipa.slicer.NormalReturnCaller;
import com.ibm.wala.ipa.slicer.NormalStatement;
import com.ibm.wala.ipa.slicer.ParamCallee;
import com.ibm.wala.ipa.slicer.ParamCaller;
import com.ibm.wala.ipa.slicer.PhiStatement;
import com.ibm.wala.ipa.slicer.SDG;
import com.ibm.wala.ipa.slicer.Slicer;
import com.ibm.wala.ipa.slicer.Statement;
import com.ibm.wala.ssa.ISSABasicBlock;
import com.ibm.wala.ssa.SSAAbstractInvokeInstruction;
import com.ibm.wala.ssa.SSABinaryOpInstruction;
import com.ibm.wala.ssa.SSAConditionalBranchInstruction;
import com.ibm.wala.ssa.SSAInstruction;
import com.ibm.wala.ssa.SSANewInstruction;
import com.ibm.wala.ssa.SSAUnaryOpInstruction;
import com.ibm.wala.util.collections.FilterIterator;
import com.ibm.wala.util.collections.HashMapFactory;
import com.ibm.wala.util.collections.HashSetFactory;
import com.ibm.wala.util.collections.Iterator2Collection;
import com.ibm.wala.util.collections.Iterator2Iterable;
import com.ibm.wala.util.graph.Graph;
import com.ibm.wala.util.graph.GraphSlicer;
import com.ibm.wala.util.graph.labeled.LabeledGraph;
import com.ibm.wala.util.graph.labeled.SlowSparseNumberedLabeledGraph;
import com.ibm.wala.util.graph.traverse.BoundedBFSIterator;
import com.ibm.wala.util.graph.traverse.DFS;
public class WalaToGNNFiles {
static File outPath;
static {
outPath = new File(System.getProperty("outputDir"));
assert outPath.isDirectory();
}
private static void withOutput(String outFile, Consumer<PrintWriter> doit) {
try (PrintWriter f = new PrintWriter(new FileWriter(new File(outPath, outFile), true))) {
doit.accept(f);
} catch (IOException e) {
assert false;
}
}
private static Set<String> ipcfgFeatures(BasicBlockInContext<ISSABasicBlock> n) {
Set<String> fss = HashSetFactory.make();
n.iterator().forEachRemaining(inst -> {
if (inst instanceof SSAAbstractInvokeInstruction) {
try {
fss.add(((SSAAbstractInvokeInstruction) inst).getDeclaredTarget().getName().toUnicodeString());
} catch (UTFDataFormatException e) {
assert false : e;
}
} else if (inst instanceof SSANewInstruction) {
fss.add(((SSANewInstruction)inst).getConcreteType().getName().toUnicodeString());
}
});
return fss;
}
public static void main(String... args) throws ClassHierarchyException, IllegalArgumentException, CallGraphBuilderCancelException, IOException {
if (System.getProperty("readGraph") != null) {
readGraph(System.getProperty("readGraph"));
} else {
new SourceDirCallGraph().doit(args, (cg, builder, time) -> {
if (Boolean.getBoolean("SDG")) {
sdgToGNNFiles(cg, builder);
} else {
ipcfgToGNNFiles(cg, builder);
}
});
}
}
private static Set<String> sdgFeatures(Statement n) {
if (n instanceof MethodEntryStatement) {
return Collections.singleton("entry " + n.getNode().getMethod().getName());
} else if (n instanceof MethodExitStatement) {
return Collections.singleton("exit " + n.getNode().getMethod().getName());
} else if (n instanceof PhiStatement ||
n instanceof ParamCaller ||
n instanceof ParamCallee ||
n instanceof NormalReturnCallee ||
n instanceof NormalReturnCaller) {
return Collections.singleton("flow");
} else if (n instanceof NormalStatement) {
SSAInstruction inst = ((NormalStatement)n).getInstruction();
if (inst instanceof SSABinaryOpInstruction) {
return Collections.singleton(((SSABinaryOpInstruction)inst).getOperator().toString());
} else if (inst instanceof SSAUnaryOpInstruction) {
return Collections.singleton(((SSAUnaryOpInstruction)inst).getOpcode().toString());
} else if (inst instanceof SSAConditionalBranchInstruction) {
return Collections.singleton(((SSAConditionalBranchInstruction)inst).getOperator().toString());
} else if (inst instanceof SSAAbstractInvokeInstruction) {
return Collections.singleton(((SSAAbstractInvokeInstruction)inst).getDeclaredTarget().getName().toString());
} else if (inst instanceof SSANewInstruction) {
return Collections.singleton(((SSANewInstruction)inst).getConcreteType().getName().toString());
} else {
return Collections.emptySet();
}
} else {
return Collections.emptySet();
}
}
private static void sdgToGNNFiles(CallGraph cg, CallGraphBuilder<?> builder) {
SDG<? extends InstanceKey> sdg =
new SDG<>(
cg,
((PropagationCallGraphBuilder)builder).getPointerAnalysis(),
new AstJavaModRef<>(),
Slicer.DataDependenceOptions.NO_HEAP_NO_EXCEPTIONS,
Slicer.ControlDependenceOptions.NO_EXCEPTIONAL_EDGES);
Graph<Statement> srcSdg = GraphSlicer.prune(sdg,
n -> n.getNode().getMethod().getReference().getDeclaringClass().getClassLoader() == JavaSourceAnalysisScope.SOURCE);
Supplier<Iterator<Statement>> getEntries =
() -> cg.getEntrypointNodes().stream().map(n -> { return (Statement)new MethodEntryStatement(n); }).iterator();
writeGraph(getEntries, srcSdg,
WalaToGNNFiles::sdgFeatures,
(p, s) -> String.valueOf(sdg.getEdgeLabels(p, s).iterator().next()),
(n) -> {
Position p = GraphAugmentor.getPosition(n);
if (p != null) {
return new int[] { p.getFirstOffset(), p.getLastOffset() };
} else {
return null;
}
});
dump(srcSdg, (n) -> n.toString(), (p, s) -> s.toString() + ":" + sdg.getEdgeLabels(p, s));
}
private static void ipcfgToGNNFiles(CallGraph cg, CallGraphBuilder<?> builder) {
Collection<CGNode> roots = cg.getEntrypointNodes();
assert roots.size() == 1 : roots;
InterproceduralCFG full_ipcfg = new InterproceduralCFG(cg,
n -> n.getMethod().getReference().getDeclaringClass().getClassLoader() == JavaSourceAnalysisScope.SOURCE ||
n == cg.getFakeRootNode() ||
n == cg.getFakeWorldClinitNode());
BasicBlockInContext<ISSABasicBlock> entry = full_ipcfg.getEntry(roots.iterator().next());
Graph<BasicBlockInContext<ISSABasicBlock>> ipcfg =
GraphSlicer.prune(full_ipcfg,
n -> n.getMethod().getReference().getDeclaringClass().getClassLoader() == JavaSourceAnalysisScope.SOURCE);
Supplier<Iterator<BasicBlockInContext<ISSABasicBlock>>> entryPoints =
() -> new FilterIterator<>(ipcfg.iterator(), n -> n.equals(entry) || (n.isEntryBlock() && n.getMethod().isClinit()));
writeGraph(entryPoints, ipcfg, WalaToGNNFiles::ipcfgFeatures, null, null);
dump(ipcfg,
(n) -> {
StringBuffer sb = new StringBuffer();
sb.append(n.toString()).append('\n');
n.iterator().forEachRemaining(inst -> {
sb.append(" ").append(inst).append("\n");
});
return sb.toString();
},
(p, s) -> s.toString());
}
private static <T> void dump(Graph<T> g,
Function<T, String> printNode,
BiFunction<T, T, String> printEdge)
{
StringBuilder sb = new StringBuilder();
for (T n : g) {
sb.append(printNode.apply(n)).append('\n');
for (T s : Iterator2Iterable.make(g.getSuccNodes(n))) {
sb.append(" --> ").append(printEdge.apply(n, s));
sb.append('\n');
}
sb.append('\n');
}
System.err.println(sb.toString());
}
private static void readGraph(String fileName) {
try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(outPath, fileName)))) {
@SuppressWarnings("unchecked")
LabeledGraph<Node, String> G = (LabeledGraph<Node, String>) ois.readObject();
writeGraph(() -> G.stream().filter(Node::isEntryPoint).iterator(), G, Node::features, (p, s) -> G.getEdgeLabels(p, s).iterator().next(), Node::position);
} catch (IOException | ClassNotFoundException e) {
assert false : e;
}
}
static class Node implements Serializable {
private static final long serialVersionUID = -3181259880503889140L;
public Node(int index, Set<String> features, int[] position, boolean isEntryPoint) {
this.index = index;
this.features = features;
this.position = position;
this.isEntryPoint = isEntryPoint;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((features == null) ? 0 : features.hashCode());
result = prime * result + index;
result = prime * result + (isEntryPoint ? 1231 : 1237);
result = prime * result + Arrays.hashCode(position);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Node other = (Node) obj;
if (features == null) {
if (other.features != null)
return false;
} else if (!features.equals(other.features))
return false;
if (index != other.index)
return false;
if (isEntryPoint != other.isEntryPoint)
return false;
if (!Arrays.equals(position, other.position))
return false;
return true;
}
int index;
int index() { return index; }
Set<String> features;
Set<String> features() { return features; }
int[] position;
int[] position() { return position; }
boolean isEntryPoint;
boolean isEntryPoint() { return isEntryPoint; }
}
private static <T> void serializeGraph(Supplier<Iterator<T>> entryPoints,
Graph<T> ipcfg,
Function<T, Set<String>> features,
BiFunction<T, T, String> edgeLabels,
Function<T,int[]> pos) {
Set<T> entries = Iterator2Collection.toSet(entryPoints.get());
Map<T,Integer> index = HashMapFactory.make();
LabeledGraph<Node, String> out = new SlowSparseNumberedLabeledGraph<>();
Iterator<T> nodes = ipcfg.iterator();
int i = -1;
Function<T, Node> toNode = (node) -> new Node(index.get(node), features.apply(node), pos.apply(node), entries.contains(node));
while (nodes.hasNext()) {
T node = nodes.next();
index.put(node, ++i);
out.addNode(toNode.apply(node));
}
ipcfg.forEach(pred -> {
Node outp = toNode.apply(pred);
ipcfg.getSuccNodes(pred).forEachRemaining(succ -> {
Node outs = toNode.apply(succ);
out.addEdge(outp, outs, edgeLabels.apply(pred, succ));
});
});
try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File(outPath, System.getProperty("graphLabel") + ".javaData")))) {
oos.writeObject(out);
} catch (IOException e) {
assert false : e;
}
}
private static <T> void writeGraph(Supplier<Iterator<T>> entryPoints,
Graph<T> ipcfg,
Function<T, Set<String>> features,
BiFunction<T, T, String> edgeLabels,
Function<T, int[]> pos)
{
if (Boolean.getBoolean("serialize")) {
serializeGraph(entryPoints, ipcfg, features, edgeLabels, pos);
} else {
int dfsNumber = 0;
Map<T,Integer> dfsFinish = HashMapFactory.make();
Iterator<T> search = DFS.iterateFinishTime(ipcfg, entryPoints.get());
while (search.hasNext()) {
dfsFinish.put(search.next(), dfsNumber++);
}
int reverseDfsNumber = 0;
Map<T,Integer> dfsStart = HashMapFactory.make();
Iterator<T> reverseSearch = DFS.iterateDiscoverTime(ipcfg, entryPoints.get());
while (reverseSearch.hasNext()) {
dfsStart.put(reverseSearch.next(), reverseDfsNumber++);
}
Map<T,Integer> bfsDepth = HashMapFactory.make();
BoundedBFSIterator<T> depths = new BoundedBFSIterator<>(ipcfg, entryPoints.get(), 1000);
while(depths.hasNext()) {
T n = depths.next();
bfsDepth.put(n, depths.getCurrentHops());
}
// nodes files
withOutput("num-node-list.csv", f -> {
f.println("" + dfsFinish.size());
});
withOutput("node_dfs_order.csv", f -> {
ipcfg.stream()
.filter(n -> dfsFinish.containsKey(n))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(n -> f.println("" + dfsFinish.get(n)));
f.flush();
});
withOutput("node_depth.csv", f -> {
ipcfg.stream()
.filter(n -> dfsFinish.containsKey(n))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(n -> {
assert bfsDepth.containsKey(n) : n;
f.println("" + bfsDepth.get(n));
});
f.flush();
});
withOutput("node-feat", f -> {
ipcfg.stream()
.filter(n -> dfsFinish.containsKey(n))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(n -> {
if (features.apply(n).isEmpty()) {
f.print("none");
} else {
features.apply(n).forEach(s -> f.print(s + " "));
}
f.println();
});
f.flush();
});
withOutput("node_is_attributed.csv", f -> {
ipcfg.stream()
.filter(n -> dfsFinish.containsKey(n))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(n -> {
f.println(features.apply(n).isEmpty()? 0: 1);
});
f.flush();
});
withOutput("node_doc.txt", f -> {
ipcfg.stream()
.filter(n -> dfsFinish.containsKey(n))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(n -> f.println(dfsFinish.get(n) + ":" + n));
f.flush();
});
// edge files
class EdgeProcessor {
void doit(BiFunction<T,T,Void> edges) {
ipcfg.stream()
.filter(p -> dfsFinish.containsKey(p))
.sorted((l, r) -> dfsFinish.get(l) - dfsFinish.get(r))
.forEach(p -> {
ipcfg.getSuccNodes(p).forEachRemaining(s -> {
if (dfsFinish.containsKey(s) &&
!( (dfsStart.get(p) >= dfsStart.get(s)) &&
(dfsFinish.get(p) <= dfsFinish.get(s)) )) {
edges.apply(p, s);
}
});
});
}
};
withOutput("edge.csv", f -> {
new EdgeProcessor().doit((p, s) -> {
f.println(dfsFinish.get(p) + "," + dfsFinish.get(s));
return null;
});
});
if (edgeLabels != null) {
withOutput("edge_type", f -> {
new EdgeProcessor().doit((p, s) -> {
f.println(edgeLabels.apply(p, s));
return null;
});
});
}
withOutput("num-edge-list.csv", f -> {
class Box {
int i = 0;
}
Box b = new Box();
new EdgeProcessor().doit((p, s) -> {
b.i++;
return null;
});
f.println(b.i);
});
// graph files
withOutput("graph-label", f -> {
f.println("" + System.getProperty("graphLabel"));
});
}
}
}
| 15,496
| 32.326882
| 156
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/SPTGenerator.java
|
package com.ibm.ai4code.parser;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.util.BitSet;
import java.util.List;
import javax.management.RuntimeErrorException;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.DefaultErrorStrategy;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.tree.ParseTree;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import com.ibm.ai4code.parser.c.*;
import com.ibm.ai4code.parser.c_multi.C11Lexer;
import com.ibm.ai4code.parser.c_multi.C11Parser;
import com.ibm.ai4code.parser.c_multi.C11ReservedWordDecider;
import com.ibm.ai4code.parser.c_multi.C11Tokens;
import com.ibm.ai4code.parser.cpp_multi.*;
import com.ibm.ai4code.parser.cymbol.CymbolLexer;
import com.ibm.ai4code.parser.cymbol.CymbolParser;
import com.ibm.ai4code.parser.cymbol.CymbolReservedWordDecider;
import com.ibm.ai4code.parser.java_multi.*;
import com.ibm.ai4code.parser.commons.Args;
import com.ibm.ai4code.parser.commons.JsonUtils;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
import com.ibm.ai4code.parser.commons.SPT;
import com.ibm.ai4code.parser.cpp.CPP14Lexer;
import com.ibm.ai4code.parser.cpp.CPP14Parser;
import com.ibm.ai4code.parser.cpp.CPPReservedWordDecider;
import com.ibm.ai4code.parser.java.JavaLexer;
import com.ibm.ai4code.parser.java.JavaParser;
import com.ibm.ai4code.parser.java.JavaReservedWordDecider;
import com.ibm.ai4code.parser.python.PythonLexer;
import com.ibm.ai4code.parser.python.PythonParser;
import com.ibm.ai4code.parser.python.PythonReservedWordDecider;
import com.ibm.ai4code.parser.cobol.*;
public class SPTGenerator {
/**
*
* @param srcFileName
* @param dstFileName
* @return True, when successfully processed an SPT
* @throws IOException
*/
public static boolean generate(String srcFileName, String dstFileName) throws IOException {
String[] fileInfo = Utils.getFileInfo(srcFileName);
String fileType = fileInfo[1];
InputStream is = new FileInputStream(srcFileName);
CharStream input = CharStreams.fromStream(is);
ParseTree tree = null;
CommonTokenStream tokenStream = null;
ReservedWordDeciderI rwdi = null;
Lexer lexer = null;
String[] ruleNames = null;
Vocabulary vocabulary = null;
//System.err.println("[SPT Info] " + srcFileName + " started");
try {
if (fileType.equals("c")) {
/*lexer = new CLexer(input);
tokenStream = new CommonTokenStream(lexer);
CParser parser = new CParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new CReservedWordDecider();*/
if(!Args.MULTI) {
lexer = new com.ibm.ai4code.parser.c_multi.C11Lexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
C11Parser parser = new com.ibm.ai4code.parser.c_multi.C11Parser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new com.ibm.ai4code.parser.c_multi.C11ReservedWordDecider();
}else {
lexer = new com.ibm.ai4code.parser.c.CLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.c.CParser parser = new com.ibm.ai4code.parser.c.CParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new com.ibm.ai4code.parser.c.CReservedWordDecider();
}
} else if (fileType.equals("cpp")) {
if(!Args.MULTI) {
lexer = new com.ibm.ai4code.parser.cpp.CPP14Lexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
CPP14Parser parser = new com.ibm.ai4code.parser.cpp.CPP14Parser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.translationUnit();
rwdi = new com.ibm.ai4code.parser.cpp.CPPReservedWordDecider();
}else {
lexer = new com.ibm.ai4code.parser.cpp_multi.CPP14Lexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.cpp_multi.CPP14Parser parser = new com.ibm.ai4code.parser.cpp_multi.CPP14Parser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.translationUnit();
rwdi = new com.ibm.ai4code.parser.cpp_multi.CPPReservedWordDecider();
}
} else if (fileType.equals("java")) {
if(!Args.MULTI) {
lexer = new com.ibm.ai4code.parser.java.JavaLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.java.JavaParser parser = new com.ibm.ai4code.parser.java.JavaParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new com.ibm.ai4code.parser.java.JavaReservedWordDecider();
}else {
lexer = new com.ibm.ai4code.parser.java_multi.JavaLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.java_multi.JavaParser parser = new com.ibm.ai4code.parser.java_multi.JavaParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new com.ibm.ai4code.parser.java_multi.JavaReservedWordDecider();
}
} else if (fileType.equals("py")) {
if(!Args.MULTI) {
lexer = new PythonLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
PythonParser parser = new PythonParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.root();
rwdi = new PythonReservedWordDecider();
}else {
lexer = new com.ibm.ai4code.parser.python_multi.PythonLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.python_multi.PythonParser parser = new com.ibm.ai4code.parser.python_multi.PythonParser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.root();
rwdi = new PythonReservedWordDecider();
}
} else if (fileType.equals("cbl")) {
lexer = new Cobol85Lexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
Cobol85Parser parser = new Cobol85Parser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new CobolReservedWordDecider();
} else if(fileType.equals("c11")) {
lexer = new C11Lexer(input);
//lexer = new C11Tokens(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
C11Parser parser = new C11Parser(tokenStream);
parser.removeErrorListeners();
parser.addErrorListener(new SPTParserBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.compilationUnit();
rwdi = new C11ReservedWordDecider();
} else if(fileType.equals("cymbol")) {
if(!Args.MULTI) {
lexer = new CymbolLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
CymbolParser parser = new CymbolParser(tokenStream);
//parser.removeErrorListeners();
//parser.addErrorListener(new SPTBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.file();
rwdi = new CymbolReservedWordDecider();
}else {
lexer = new com.ibm.ai4code.parser.cymbol_multi.CymbolLexer(input);
lexer.removeErrorListeners(); // weiz 2021-03-07
lexer.addErrorListener(new SPTLexerBaseErrorListener(srcFileName)); // weiz 2021-03-07
tokenStream = new CommonTokenStream(lexer);
com.ibm.ai4code.parser.cymbol_multi.CymbolParser parser = new com.ibm.ai4code.parser.cymbol_multi.CymbolParser(tokenStream);
//parser.removeErrorListeners();
//parser.addErrorListener(new SPTBaseErrorListener(srcFileName));
ruleNames = parser.getRuleNames();
tree = parser.file();
rwdi = new com.ibm.ai4code.parser.cymbol_multi.CymbolReservedWordDecider();
}
}
else {
throw new RuntimeErrorException(new Error("Unknow file type " + fileType));
}
// step 3 build PT and SPT
vocabulary = lexer.getVocabulary();
SPT spt = new SPT(tree, ruleNames, vocabulary);
spt.setSrcFileName(srcFileName); //weiz 2021-03-06
spt.simplify();
spt.indexing();
if (rwdi != null) {
spt.labeling(rwdi); // weiz 2020-10-29, labeling
}
JsonUtils.serializeSPT(spt, dstFileName);
System.out.println(dstFileName + " is generated!");
//System.err.println("[SPT Info] " + srcFileName + " finished");
return true;
} catch (RuntimeException re) {
System.err.println("[SPT Warning] " + srcFileName + " cannot be processed.");
return false;
}
}
static class SPTParserBaseErrorListener extends BaseErrorListener{
String srcFileName;
public SPTParserBaseErrorListener(String srcFileName) {
this.srcFileName = srcFileName;
}
@Override
public void syntaxError(Recognizer<?, ?> recognizer,
Object offendingSymbol,
int line,
int charPositionInLine,
String msg,
RecognitionException e)
{
String errMsg = "[SPT Error] " + srcFileName + " parser syntax error!";
System.err.println(errMsg);
throw new RuntimeException(errMsg);
}
}
static class SPTLexerBaseErrorListener extends BaseErrorListener{
String srcFileName;
public SPTLexerBaseErrorListener(String srcFileName) {
this.srcFileName = srcFileName;
}
@Override
public void syntaxError(Recognizer<?, ?> recognizer,
Object offendingSymbol,
int line,
int charPositionInLine,
String msg,
RecognitionException e)
{
String errMsg = "[SPT Error] " + srcFileName + " lexer syntax error!";
System.err.println(errMsg);
throw new RuntimeException(errMsg);
}
}
class BailErrorStrategy extends DefaultErrorStrategy{
String srcFileName;
public BailErrorStrategy(String srcFileName) {
this.srcFileName = srcFileName;
}
@Override
public void recover(Parser recognizer, RecognitionException e) {
throw new RuntimeException("recover exception");
}
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
throw new RuntimeException("recoverInline exception");
}
/** Make sure we don't attempt to recover from problems in subrules. */
@Override
public void sync(Parser recognizer) throws RecognitionException {
// TODO Auto-generated method stub
//super.sync(recognizer);
}
}
public static void handleSingle(String srcFileName, String dstDir) throws IOException {
// always first spt-ize
String [] info = Utils.getFileInfo(srcFileName);
String dstFileName = dstDir + "/" + info[0] + ".json";
boolean sptSuccess = SPTGenerator.generate(srcFileName, dstFileName);
if(!sptSuccess) { // if spt not successful, then forgo the tokenize part
return;
}
// tokenize
dstFileName = dstDir + "/" +info[0] +".csv"; // weiz 2020-11-20, use csv file to represent tokens
Tokenizer.tokenize(srcFileName, dstFileName);
}
public static void handleBatch(String srcBatchFileName) throws IOException{
try (BufferedReader br = new BufferedReader(new FileReader(srcBatchFileName))) {
String line;
while ((line = br.readLine()) != null) {
String [] lines = line.split(" |\t");
assert(lines.length == 2);
String srcFileName = lines[0];
String dstDir = lines[1];
handleSingle(srcFileName, dstDir);
}
}
}
public static void handleArgs(String[] args) throws ParseException, IOException {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
Option helpOpt = Option.builder("h").longOpt("help").desc("Usage").build();
Option multiOpt = Option.builder("multi").longOpt("multi").desc("Use multiple g4 files to have less fine grained toke type").build();
Option dstOpt = Option.builder("d").longOpt("dest").desc("Destination directory ").hasArg().build();
Option batchOpt = Option.builder("b").longOpt("batch").desc("Batch processing").build();
options.addOption(helpOpt);
options.addOption(multiOpt);
options.addOption(dstOpt);
options.addOption(batchOpt);
CommandLine cli = parser.parse(options, args);
Args.parse(cli); // weiz 2021-02-15, add options so it takes the multi option
if(cli.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "SPTGenerator input_file(either source code or batch processing input)", options );
return;
}
List<String> leftoverArgs = cli.getArgList();
assert(leftoverArgs.size() == 1); // We should always get one input file (either it is src code or the batch input file)
String srcFileName = leftoverArgs.get(0);
if(cli.hasOption("b")) { // batch procesing
String srcBatchFileName = srcFileName;
handleBatch(srcBatchFileName);
}else { // single file processing
String dstDir = null;
String src = srcFileName;
if(cli.hasOption("d")) {
dstDir = cli.getOptionValue("d");
}
handleSingle(src, dstDir);
}
}
public static void main(String[] args) throws IOException, ParseException {
handleArgs(args);
}
}
| 16,234
| 39.185644
| 135
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/Tokenizer.java
|
package com.ibm.ai4code.parser;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.List;
import javax.management.RuntimeErrorException;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.Vocabulary;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import com.ibm.ai4code.parser.c.*;
import com.ibm.ai4code.parser.c_multi.C11Tokens;
import com.ibm.ai4code.parser.cobol.Cobol85Lexer;
import com.ibm.ai4code.parser.commons.CSVUtils;
import com.ibm.ai4code.parser.cpp.CPP14Lexer;
import com.ibm.ai4code.parser.cymbol.CymbolLexer;
import com.ibm.ai4code.parser.java.JavaLexer;
import com.ibm.ai4code.parser.python.PythonLexer;
import com.ibm.ai4code.parser.commons.Args;
public class Tokenizer {
public static void tokenize(String srcFileName, String dstFileName) throws IOException {
String[] fileInfo = Utils.getFileInfo(srcFileName);
String fileType = fileInfo[1];
InputStream is = new FileInputStream(srcFileName);
CharStream input = CharStreams.fromStream(is);
Lexer lexer = null;
CommonTokenStream tokenStream = null;
if (fileType.equals("c")) {
/*lexer = new CLexer(input);
tokenStream = new CommonTokenStream(lexer);*/
if(!Args.MULTI) {
lexer = new com.ibm.ai4code.parser.c.CLexer(input);
}else {
lexer = new com.ibm.ai4code.parser.c_multi.C11Tokens(input);
}
tokenStream = new CommonTokenStream(lexer);
} else if (fileType.equals("cpp")) {
if(!Args.MULTI) {
lexer = new com.ibm.ai4code.parser.cpp.CPP14Lexer(input);
}else {
lexer = new com.ibm.ai4code.parser.cpp_multi.CPP14Tokens(input);
}
tokenStream = new CommonTokenStream(lexer);
} else if (fileType.equals("java")) {
if(!Args.MULTI){
lexer = new com.ibm.ai4code.parser.java.JavaLexer(input);
}else {
lexer = new com.ibm.ai4code.parser.java_multi.JavaTokens(input);
}
tokenStream = new CommonTokenStream(lexer);
} else if (fileType.equals("py")) {
if(!Args.MULTI) {
lexer = new PythonLexer(input);
tokenStream = new CommonTokenStream(lexer);
}else {
lexer = new com.ibm.ai4code.parser.python_multi.PythonTokens(input);
tokenStream = new CommonTokenStream(lexer);
}
}else if (fileType.equals("cbl")) {
lexer = new Cobol85Lexer(input);
tokenStream = new CommonTokenStream(lexer);
}else if (fileType.equals("c11")) { // weiz 2020-12-08, Geert's split c grammar
lexer = new C11Tokens(input);
tokenStream = new CommonTokenStream(lexer);
} else if (fileType.equals("cymbol")) {
if(!Args.MULTI) {
lexer = new CymbolLexer(input);
tokenStream = new CommonTokenStream(lexer);
}else {
lexer = new com.ibm.ai4code.parser.cymbol_multi.CymbolTokens(input);
tokenStream = new CommonTokenStream(lexer);
}
}
else {
throw new RuntimeErrorException(new Error("Unknow file type " + fileType));
}
tokenStream.fill(); // weiz 2020-11-16, we need to fill the tokenStream so that token index can be generated.
List<Token> tokens = tokenStream.getTokens();
Vocabulary vocabulary = lexer.getVocabulary();
// “CSV header: seqnr, start, stop, text, class, channel, line, column”
CSVUtils.openFile(dstFileName, "seqnr", "start", "stop",
"text", "class", "channel", "line", "column");
for(Token token:tokens) {
//Utils.showHierarchy(token.getClass());
int tokenIdx=token.getTokenIndex();
int startIdx = token.getStartIndex();
int stopIdx = token.getStopIndex();
String txt = token.getText();
if ( txt!=null ) { // weiz 2020-11-17, this logic is copied from CommonToken.toString() method
txt = txt.replace("\n","\\n");
txt = txt.replace("\r","\\r");
txt = txt.replace("\t","\\t");
}
else {
txt = "<no text>";
}
String tokenSymbolicName = vocabulary.getSymbolicName(token.getType());
String displayName = vocabulary.getDisplayName(token.getType()).toLowerCase();
//System.out.println(tokenSymbolicName+"-----"+displayName);
int line = token.getLine();
int channel = token.getChannel(); // weiz 2020-12-10
int positionInLine = token.getCharPositionInLine();
//CSVUtils.writeRecord("@"+tokenIdx, ""+startIdx+":"+stopIdx, txt, tokenSymbolicName, ""+line+":"+positionInLine);
// “CSV header: seqnr, start, stop, text, class, channel, line, column”
CSVUtils.writeRecord(tokenIdx,
startIdx, stopIdx, txt, displayName, channel, line, positionInLine);
}
if(dstFileName != null) {
System.out.println(dstFileName + " is generated.");
}
CSVUtils.closeFile();
}
public static void handleSingle(String srcFileName, String dstDir) throws IOException {
String [] info = Utils.getFileInfo(srcFileName);
String dstFileName = (dstDir == null) ? null:(dstDir+ "/"+ info[0] + ".csv");
Tokenizer.tokenize(srcFileName, dstFileName);
}
public static void handleBatch(String srcBatchFileName) throws IOException{
try (BufferedReader br = new BufferedReader(new FileReader(srcBatchFileName))) {
String line;
while ((line = br.readLine()) != null) {
String [] lines = line.split(" ");
assert(lines.length == 2);
String srcFileName = lines[0];
String dstDir = lines[1];
handleSingle(srcFileName, dstDir);
}
}
}
public static void handleArgs(String[] args) throws ParseException, IOException {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
Option helpOpt = Option.builder("h").longOpt("help").desc("Usage").build();
Option multiOpt = Option.builder("multi").longOpt("multi").desc("Use multiple g4 files to have less fine grained toke type").build();
Option dstOpt = Option.builder("d").longOpt("dest").desc("Destination directory ").hasArg().build();
Option batchOpt = Option.builder("b").longOpt("batch").desc("Batch processing").build();
options.addOption(helpOpt);
options.addOption(multiOpt);
options.addOption(dstOpt);
options.addOption(batchOpt);
CommandLine cli = parser.parse(options, args);
Args.parse(cli); // weiz 2021-02-15, add options so it takes the multi option
if(cli.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "Tokenize input_file(either source code or batch processing input)", options );
return;
}
List<String> leftoverArgs = cli.getArgList();
assert(leftoverArgs.size() == 1); // We should always get one input file (either it is src code or the batch input file)
String srcFileName = leftoverArgs.get(0);
if(cli.hasOption("b")) { // batch procesing
String srcBatchFileName = srcFileName;
handleBatch(srcBatchFileName);
}else { // single file processing
String dstDir = null;
String src = srcFileName;
if(cli.hasOption("d")) {
dstDir = cli.getOptionValue("d");
}
handleSingle(src, dstDir);
}
}
public static void main(String[] args) throws IOException, ParseException {
handleArgs(args);
}
}
| 7,468
| 35.434146
| 135
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/Utils.java
|
package com.ibm.ai4code.parser;
import javax.management.RuntimeErrorException;
public class Utils {
/**
* Assuming there is at least one period which seperates the real name and the file extension
* return file_name and file_type
* @param fileName
* @return results[] results[0]: basename (base name without file extension) results[1]: file_type (file extension)
*/
public static String [] getFileInfo(String fileName) throws RuntimeException{
int len = fileName.length();
int periodIdx = -1;
int fwdSlashIdx = -1; // really only work for linux
int i = 0;
for(i = len -1; i >= 0; i--) {
if(fileName.charAt(i) == '.') {
periodIdx = i;
break;
}
}
if(periodIdx == -1) {
throw new RuntimeException(fileName+ " periodIdx == -1");
}
for(; i>=0; i--) {
if(fileName.charAt(i) == '/') {
fwdSlashIdx = i;
break;
}
}
fwdSlashIdx++;
String baseName = fileName.substring(fwdSlashIdx, periodIdx);
String fileNameType = fileName.substring(periodIdx+1);
String results[] = {baseName, fileNameType};
return results;
}
public static void showHierarchy(Class<?> c) {
if (c.getSuperclass() == null) {
System.out.println(c.getName());
return;
}
showHierarchy(c.getSuperclass());
System.out.println(c.getName());
}
public static void main(String[] args) {
//String fileName = "./examples/c/helloworld.c";
String fileName = "helloworld.c";
String [] results = getFileInfo(fileName);
System.out.println(results[0]);
System.out.println(results[1]);
}
}
| 1,575
| 26.649123
| 117
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/c/CReservedWordDecider.java
|
package com.ibm.ai4code.parser.c;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CReservedWordDecider implements ReservedWordDeciderI{
// cat ckws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords = {
"auto",
"break",
"case",
"char",
"const",
"continue",
"default",
"do",
"double",
"else",
"enum",
"extern",
"float",
"for",
"goto",
"if",
"inline",
"int",
"long",
"register",
"restrict",
"return",
"short",
"signed",
"sizeof",
"static",
"struct",
"switch",
"typedef",
"union",
"unsigned",
"void",
"volatile",
"while",
"_Alignas",
"_Alignof",
"_Atomic",
"_Bool",
"_Complex",
"_Generic",
"_Imaginary",
"_Noreturn",
"_Static_assert",
"_Thread_local"
};
// cat cops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
"(",
")",
"[",
"]",
"{",
"}",
"<",
"<=",
">",
">=",
"<<",
">>",
"+",
"++",
"-",
"--",
"*",
"/",
"%",
"&",
"|",
"&&",
"||",
"^",
"!",
"~",
"?",
":", // weiz 2020-10-29
";", // weiz 2020-10-29
",",
"=",
"*=",
"/=",
"%=",
"+=",
"-=",
"<<=",
">>=",
"&=",
"^=",
"|=",
"==",
"!=",
"->",
".",
"..."
};
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public CReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 2,071
| 14.816794
| 81
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/c_multi/C11ReservedWordDecider.java
|
package com.ibm.ai4code.parser.c_multi;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class C11ReservedWordDecider implements ReservedWordDeciderI{
// cat ckws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords = {
"auto",
"break",
"case",
"char",
"const",
"continue",
"default",
"do",
"double",
"else",
"enum",
"extern",
"float",
"for",
"goto",
"if",
"inline",
"int",
"long",
"register",
"restrict",
"return",
"short",
"signed",
"sizeof",
"static",
"struct",
"switch",
"typedef",
"union",
"unsigned",
"void",
"volatile",
"while",
"_Alignas",
"_Alignof",
"_Atomic",
"_Bool",
"_Complex",
"_Generic",
"_Imaginary",
"_Noreturn",
"_Static_assert",
"_Thread_local"
};
// cat cops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
"(",
")",
"[",
"]",
"{",
"}",
"<",
"<=",
">",
">=",
"<<",
">>",
"+",
"++",
"-",
"--",
"*",
"/",
"%",
"&",
"|",
"&&",
"||",
"^",
"!",
"~",
"?",
":", // weiz 2020-10-29
";", // weiz 2020-10-29
",",
"=",
"*=",
"/=",
"%=",
"+=",
"-=",
"<<=",
">>=",
"&=",
"^=",
"|=",
"==",
"!=",
"->",
".",
"..."
};
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public C11ReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 2,081
| 14.89313
| 81
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cobol/CobolReservedWordDecider.java
|
package com.ibm.ai4code.parser.cobol;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CobolReservedWordDecider implements ReservedWordDeciderI{
// grep ":" cobolkws_orig.txt | cut -d':' -f1 | awk '{ print "\""$0"\","}' | sed 's/ //g' | sed 's/_/-/g'
// note we replace all _ with -, thanks to David!
public static final String [] keywords = {
"ABORT",
"ACCEPT",
"ACCESS",
"ADD",
"ADDRESS",
"ADVANCING",
"AFTER",
"ALIGNED",
"ALL",
"ALPHABET",
"ALPHABETIC",
"ALPHABETIC-LOWER",
"ALPHABETIC-UPPER",
"ALPHANUMERIC",
"ALPHANUMERIC-EDITED",
"ALSO",
"ALTER",
"ALTERNATE",
"AND",
"ANY",
"ARE",
"AREA",
"AREAS",
"AS",
"ASCENDING",
"ASCII",
"ASSIGN",
"ASSOCIATED-DATA",
"ASSOCIATED-DATA-LENGTH",
"AT",
"ATTRIBUTE",
"AUTHOR",
"AUTO",
"AUTO-SKIP",
"BACKGROUND-COLOR",
"BACKGROUND-COLOUR",
"BASIS",
"BEEP",
"BEFORE",
"BEGINNING",
"BELL",
"BINARY",
"BIT",
"BLANK",
"BLINK",
"BLOCK",
"BOUNDS",
"BOTTOM",
"BY",
"BYFUNCTION",
"BYTITLE",
"CALL",
"CANCEL",
"CAPABLE",
"CCSVERSION",
"CD",
"CF",
"CH",
"CHAINING",
"CHANGED",
"CHANNEL",
"CHARACTER",
"CHARACTERS",
"CLASS",
"CLASS-ID",
"CLOCK-UNITS",
"CLOSE",
"CLOSE-DISPOSITION",
"COBOL",
"CODE",
"CODE-SET",
"COLLATING",
"COL",
"COLUMN",
"COM-REG",
"COMMA",
"COMMITMENT",
"COMMON",
"COMMUNICATION",
"COMP",
"COMP-1",
"COMP-2",
"COMP-3",
"COMP-4",
"COMP-5",
"COMPUTATIONAL",
"COMPUTATIONAL-1",
"COMPUTATIONAL-2",
"COMPUTATIONAL-3",
"COMPUTATIONAL-4",
"COMPUTATIONAL-5",
"COMPUTE",
"CONFIGURATION",
"CONTAINS",
"CONTENT",
"CONTINUE",
"CONTROL",
"CONTROL-POINT",
"CONTROLS",
"CONVENTION",
"CONVERTING",
"COPY",
"CORR",
"CORRESPONDING",
"COUNT",
"CRUNCH",
"CURRENCY",
"CURSOR",
"DATA",
"DATA-BASE",
"DATE",
"DATE-COMPILED",
"DATE-WRITTEN",
"DAY",
"DAY-OF-WEEK",
"DBCS",
"DE",
"DEBUG-CONTENTS",
"DEBUG-ITEM",
"DEBUG-LINE",
"DEBUG-NAME",
"DEBUG-SUB-1",
"DEBUG-SUB-2",
"DEBUG-SUB-3",
"DEBUGGING",
"DECIMAL-POINT",
"DECLARATIVES",
"DEFAULT",
"DEFAULT-DISPLAY",
"DEFINITION",
"DELETE",
"DELIMITED",
"DELIMITER",
"DEPENDING",
"DESCENDING",
"DESTINATION",
"DETAIL",
"DFHRESP",
"DFHVALUE",
"DISABLE",
"DISK",
"DISPLAY",
"DISPLAY-1",
"DIVIDE",
"DIVISION",
"DONTCARE",
"DOUBLE",
"DOWN",
"DUPLICATES",
"DYNAMIC",
"EBCDIC",
"EGCS",
"EGI",
"ELSE",
"EMI",
"EMPTY-CHECK",
"ENABLE",
"END",
"END-ACCEPT",
"END-ADD",
"END-CALL",
"END-COMPUTE",
"END-DELETE",
"END-DIVIDE",
"END-EVALUATE",
"END-IF",
"END-MULTIPLY",
"END-OF-PAGE",
"END-PERFORM",
"END-READ",
"END-RECEIVE",
"END-RETURN",
"END-REWRITE",
"END-SEARCH",
"END-START",
"END-STRING",
"END-SUBTRACT",
"END-UNSTRING",
"END-WRITE",
"ENDING",
"ENTER",
"ENTRY",
"ENTRY-PROCEDURE",
"ENVIRONMENT",
"EOP",
"EQUAL",
"ERASE",
"ERROR",
"EOL",
"EOS",
"ESCAPE",
"ESI",
"EVALUATE",
"EVENT",
"EVERY",
"EXCEPTION",
"EXCLUSIVE",
"EXHIBIT",
"EXIT",
"EXPORT",
"EXTEND",
"EXTENDED",
"EXTERNAL",
"FALSE",
"FD",
"FILE",
"FILE-CONTROL",
"FILLER",
"FINAL",
"FIRST",
"FOOTING",
"FOR",
"FOREGROUND-COLOR",
"FOREGROUND-COLOUR",
"FROM",
"FULL",
"FUNCTION",
"FUNCTIONNAME",
"FUNCTION-POINTER",
"GENERATE",
"GOBACK",
"GIVING",
"GLOBAL",
"GO",
"GREATER",
"GRID",
"GROUP",
"HEADING",
"HIGHLIGHT",
"HIGH-VALUE",
"HIGH-VALUES",
"I-O",
"I-O-CONTROL",
"ID",
"IDENTIFICATION",
"IF",
"IMPLICIT",
"IMPORT",
"IN",
"INDEX",
"INDEXED",
"INDICATE",
"INITIAL",
"INITIALIZE",
"INITIATE",
"INPUT",
"INPUT-OUTPUT",
"INSPECT",
"INSTALLATION",
"INTEGER",
"INTO",
"INVALID",
"INVOKE",
"IS",
"JUST",
"JUSTIFIED",
"KANJI",
"KEPT",
"KEY",
"KEYBOARD",
"LABEL",
"LANGUAGE",
"LAST",
"LB",
"LD",
"LEADING",
"LEFT",
"LEFTLINE",
"LENGTH",
"LENGTH-CHECK",
"LESS",
"LIBACCESS",
"LIBPARAMETER",
"LIBRARY",
"LIMIT",
"LIMITS",
"LINAGE",
"LINAGE-COUNTER",
"LINE",
"LINES",
"LINE-COUNTER",
"LINKAGE",
"LIST",
"LOCAL",
"LOCAL-STORAGE",
"LOCK",
"LONG-DATE",
"LONG-TIME",
"LOWER",
"LOWLIGHT",
"LOW-VALUE",
"LOW-VALUES",
"MEMORY",
"MERGE",
"MESSAGE",
"MMDDYYYY",
"MODE",
"MODULES",
"MORE-LABELS",
"MOVE",
"MULTIPLE",
"MULTIPLY",
"NAMED",
"NATIONAL",
"NATIONAL-EDITED",
"NATIVE",
"NEGATIVE",
"NETWORK",
"NEXT",
"NO",
"NO-ECHO",
"NOT",
"NULL",
"NULLS",
"NUMBER",
"NUMERIC",
"NUMERIC-DATE",
"NUMERIC-EDITED",
"NUMERIC-TIME",
"OBJECT-COMPUTER",
"OCCURS",
"ODT",
"OF",
"OFF",
"OMITTED",
"ON",
"OPEN",
"OPTIONAL",
"OR",
"ORDER",
"ORDERLY",
"ORGANIZATION",
"OTHER",
"OUTPUT",
"OVERFLOW",
"OVERLINE",
"OWN",
"PACKED-DECIMAL",
"PADDING",
"PAGE",
"PAGE-COUNTER",
"PASSWORD",
"PERFORM",
"PF",
"PH",
"PIC",
"PICTURE",
"PLUS",
"POINTER",
"POSITION",
"POSITIVE",
"PORT",
"PRINTER",
"PRINTING",
"PRIVATE",
"PROCEDURE",
"PROCEDURE-POINTER",
"PROCEDURES",
"PROCEED",
"PROCESS",
"PROGRAM",
"PROGRAM-ID",
"PROGRAM-LIBRARY",
"PROMPT",
"PURGE",
"QUEUE",
"QUOTE",
"QUOTES",
"RANDOM",
"READER",
"REMOTE",
"RD",
"REAL",
"READ",
"RECEIVE",
"RECEIVED",
"RECORD",
"RECORDING",
"RECORDS",
"RECURSIVE",
"REDEFINES",
"REEL",
"REF",
"REFERENCE",
"REFERENCES",
"RELATIVE",
"RELEASE",
"REMAINDER",
"REMARKS",
"REMOVAL",
"REMOVE",
"RENAMES",
"REPLACE",
"REPLACING",
"REPORT",
"REPORTING",
"REPORTS",
"REQUIRED",
"RERUN",
"RESERVE",
"REVERSE-VIDEO",
"RESET",
"RETURN",
"RETURN-CODE",
"RETURNING",
"REVERSED",
"REWIND",
"REWRITE",
"RF",
"RH",
"RIGHT",
"ROUNDED",
"RUN",
"SAME",
"SAVE",
"SCREEN",
"SD",
"SEARCH",
"SECTION",
"SECURE",
"SECURITY",
"SEGMENT",
"SEGMENT-LIMIT",
"SELECT",
"SEND",
"SENTENCE",
"SEPARATE",
"SEQUENCE",
"SEQUENTIAL",
"SET",
"SHARED",
"SHAREDBYALL",
"SHAREDBYRUNUNIT",
"SHARING",
"SHIFT-IN",
"SHIFT-OUT",
"SHORT-DATE",
"SIGN",
"SIZE",
"SORT",
"SORT-CONTROL",
"SORT-CORE-SIZE",
"SORT-FILE-SIZE",
"SORT-MERGE",
"SORT-MESSAGE",
"SORT-MODE-SIZE",
"SORT-RETURN",
"SOURCE",
"SOURCE-COMPUTER",
"SPACE",
"SPACES",
"SPECIAL-NAMES",
"STANDARD",
"STANDARD-1",
"STANDARD-2",
"START",
"STATUS",
"STOP",
"STRING",
"SUB-QUEUE-1",
"SUB-QUEUE-2",
"SUB-QUEUE-3",
"SUBTRACT",
"SUM",
"SUPPRESS",
"SYMBOL",
"SYMBOLIC",
"SYNC",
"SYNCHRONIZED",
"TABLE",
"TALLY",
"TALLYING",
"TASK",
"TAPE",
"TERMINAL",
"TERMINATE",
"TEST",
"TEXT",
"THAN",
"THEN",
"THREAD",
"THREAD-LOCAL",
"THROUGH",
"THRU",
"TIME",
"TIMER",
"TIMES",
"TITLE",
"TO",
"TODAYS-DATE",
"TODAYS-NAME",
"TOP",
"TRAILING",
"TRUE",
"TRUNCATED",
"TYPE",
"TYPEDEF",
"UNDERLINE",
"UNIT",
"UNSTRING",
"UNTIL",
"UP",
"UPON",
"USAGE",
"USE",
"USING",
"VALUE",
"VALUES",
"VARYING",
"VIRTUAL",
"WAIT",
"WHEN",
"WHEN-COMPILED",
"WITH",
"WORDS",
"WORKING-STORAGE",
"WRITE",
"YEAR",
"YYYYMMDD",
"YYYYDDD",
"ZERO",
"ZERO-FILL",
"ZEROS",
"ZEROES"
};
// cat cobolops_orig.txt | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
"&",
"*",
"**",
":", // weiz
",",
"*>CE",
"*>",
"$",
"\"",
//"." ("\r" | "\n" | "\f" | "\t" | " ")+ | "." EOF, // weiz 2020-11-19, not sure how to handle this
".",
"=",
"*>EXECCICS",
"*>EXECSQL",
"*>EXECSQLIMS",
"<",
"<=",
"(",
"-",
">",
">=",
"<>",
"+",
"\'",
")",
"/",
};
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public CobolReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 8,909
| 14.101695
| 106
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/Args.java
|
package com.ibm.ai4code.parser.commons;
import org.apache.commons.cli.*;
public class Args {
public static boolean MULTI = false; // use "single" g4 file, or use "multi" g4 file, where we have different toke types
public static void parse(CommandLine cmd) {
if(cmd.hasOption("multi")) {
MULTI = true;
}
}
}
| 319
| 25.666667
| 121
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/CSVUtils.java
|
package com.ibm.ai4code.parser.commons;
import java.io.FileWriter;
import java.io.IOException;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
public class CSVUtils {
private static CSVPrinter printer = null;
public static void openFile(String fileName, String ... header) throws IOException {
if(fileName == null) {
printer = new CSVPrinter(System.out, CSVFormat.RFC4180.withHeader(header).withRecordSeparator("\n")); // weiz 2020-12-13, support output to stdout for csv file
}else {
printer = new CSVPrinter(new FileWriter(fileName), CSVFormat.RFC4180.withHeader(header).withRecordSeparator("\n")); // use \n instead of \r\n (DOS) as line seperator
}
}
public static void writeRecord(Object... arg0 ) throws IOException {
printer.printRecord(arg0);
}
public static void closeFile() throws IOException {
assert(printer != null);
printer.close();
}
}
| 926
| 28.903226
| 169
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/JsonUtils.java
|
package com.ibm.ai4code.parser.commons;
import java.io.File;
import java.util.ArrayList;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Queue;
import javax.json.JsonReader;
import javax.json.JsonValue;
import javax.json.JsonWriter;
import javax.json.JsonWriterFactory;
import javax.json.stream.JsonGenerator;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
import javax.json.JsonNumber;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
public class JsonUtils {
public static void readJson(String fileName) throws FileNotFoundException {
File jsonInputFile = new File(fileName);
InputStream is;
is = new FileInputStream(jsonInputFile);
JsonReader reader = Json.createReader(is);
JsonObject empObj = reader.readObject();
reader.close();
JsonObject graphObj = empObj.getJsonObject("graph");
System.out.println(graphObj.get("version"));
JsonArray nodes = graphObj.getJsonArray("nodes");
//System.out.println(nodes);
for(JsonValue jv: nodes) {
//System.err.println(jv.getValueType());
JsonObject jo = (JsonObject) jv;
//System.out.println(jo);
System.out.println(jo.getInt("id"));
System.out.println(jo.getString("label"));
}
JsonArray edges = graphObj.getJsonArray("edges");
for(JsonValue jv: edges) {
//System.err.println(jv.getValueType());
JsonObject jo = (JsonObject) jv;
JsonArray jarr = jo.getJsonArray("between");
for(JsonValue jarrV: jarr) {
//System.err.println(jarrV.getValueType());
JsonNumber _jo = (JsonNumber) jarrV;
System.out.print(_jo+" ");
}
System.out.println();
}
}
public static void writeJson(String fileName) throws IOException {
// step 1 build graph object
JsonObjectBuilder jGraphBuilder = Json.createObjectBuilder();
jGraphBuilder.add("version", "1.0");
jGraphBuilder.add("type", "tree");
jGraphBuilder.add("directed", true);
jGraphBuilder.add("root", 0);
//step 2 build nodes array
JsonArrayBuilder nodesArrBuilder = Json.createArrayBuilder();
nodesArrBuilder.add(Json.createObjectBuilder().add("id", 0).add("label", "Top"));
nodesArrBuilder.add(Json.createObjectBuilder().add("id", 1).add("label", "Child1"));
nodesArrBuilder.add(Json.createObjectBuilder().add("id", 2).add("label", "Child2"));
nodesArrBuilder.add(Json.createObjectBuilder().add("id", 3).add("label", "Grandchild"));
jGraphBuilder.add("nodes", nodesArrBuilder);
// step 3 builds edges array
JsonArrayBuilder edgesArrBuilder = Json.createArrayBuilder();
edgesArrBuilder.add(Json.createObjectBuilder().add("between", Json.createArrayBuilder().add(0).add(1)));
edgesArrBuilder.add(Json.createObjectBuilder().add("between", Json.createArrayBuilder().add(0).add(2)));
edgesArrBuilder.add(Json.createObjectBuilder().add("between", Json.createArrayBuilder().add(2).add(3)));
jGraphBuilder.add("edges", edgesArrBuilder);
// step 4 build top object
JsonObjectBuilder jTopBuilder = Json.createObjectBuilder();
jTopBuilder.add("graph", jGraphBuilder);
// step 5 write to disk
Map<String, Object> properties = new HashMap<>(1);
properties.put(JsonGenerator.PRETTY_PRINTING, true);
JsonWriterFactory writerFactory = Json.createWriterFactory(properties);
JsonWriter writer = writerFactory.createWriter(new FileWriter(new File(fileName)));
writer.writeObject(jTopBuilder.build());
writer.close();
}
public static void serializeSPT(SPT tree, String fileName) throws IOException {
// step 1 build graph object
JsonObjectBuilder jGraphBuilder = Json.createObjectBuilder();
jGraphBuilder.add("version", "1.0");
jGraphBuilder.add("src-file", tree.getSrcFileName()); // weiz 2021-03-06
jGraphBuilder.add("type", "tree");
jGraphBuilder.add("directed", true);
jGraphBuilder.add("order", "bfs");
jGraphBuilder.add("num-of-nodes", tree.getNumNodes());
jGraphBuilder.add("num-of-edges", tree.getNumEdges());
jGraphBuilder.add("root", tree.getRoot().getBFSIdx()); // weiz 2021-01-19 revert back to the idea that uses
// traversal id (BFS) as the ID, thus id of the root should always be 0
// jGraphBuilder.add("root", tree.getRoot().getTokenIdx()); // weiz 2020-11-18 , use the token index as "id"
// step 1.5 get the layerwise traversal queue
Queue<SPTNode> queue = tree.getLayerWiseTraversalQueue();
assert(queue.size() == tree.getNumNodes());
// step 2 build nodes array
JsonArrayBuilder nodesArrBuilder = Json.createArrayBuilder();
for(SPTNode n: queue) {
//nodesArrBuilder.add(Json.createObjectBuilder().add("id", n.getIdx()).add("label", n.toString()));// TODO maybe implement getLabel instead of toString
if(n.getType().equals("Token")) {
nodesArrBuilder.add(Json.createObjectBuilder().add("id", n.getBFSIdx()).add("label", n.getLabel()).
add("node-type", n.getType()).add("type-rule-name", n.getRuleName())
.add("type-rule-index", n.getRuleIndex()).add("reserved-word-flag", n.getReservedWordFlag())
.add("dfs-index", n.getDFSIndex()).add("depth", n.getDepth()) // weiz 2021-03-05, add more fields
.add("token-id", n.getTokenIdx()));// weiz 2020-11-18, use token index as "id"
}else {
nodesArrBuilder.add(Json.createObjectBuilder().add("id", n.getBFSIdx()).add("label", n.getLabel()).
add("node-type", n.getType()).add("type-rule-name", n.getRuleName()) // weiz 2021-01-19, for non-leaf nodes, we don't output token_id
.add("type-rule-index", n.getRuleIndex()).add("reserved-word-flag", n.getReservedWordFlag())
.add("dfs-index", n.getDFSIndex()).add("depth", n.getDepth())); // weiz 2021-03-05, add more fields
}
/* nodesArrBuilder.add(Json.createObjectBuilder().add("id", n.getTokenIdx()).add("label", n.getLabel()).
add("node-type", n.getType()).add("type-rule-name", n.getRuleName()).add("traversal_id", n.getIdx())); // weiz 2020-11-18, use token index as "id"
*/
}
jGraphBuilder.add("nodes", nodesArrBuilder);
// step 3 builds edges array
int edgeNum =0;
JsonArrayBuilder edgesArrBuilder = Json.createArrayBuilder();
for(SPTNode n: queue) {
ArrayList<SPTNode> children = n.getChildren();
if(children != null) {
int source = n.getBFSIdx(); // weiz 2021-01-19 revert it back to use BFS id
//int source = n.getTokenIdx(); // weiz 2020-11-18 use token index as index
for(SPTNode c: children) {
int dst = c.getBFSIdx(); // weiz 2020-01-19 revert it back to use BFS id
//int dst = c.getTokenIdx(); // weiz 2020-11-18 use token index as index
edgesArrBuilder.add(Json.createObjectBuilder().add("between", Json.createArrayBuilder().add(source).add(dst)));
edgeNum++;
}
}
}
assert(tree.getNumEdges() == edgeNum);
jGraphBuilder.add("edges", edgesArrBuilder);
// step 4 build top object
JsonObjectBuilder jTopBuilder = Json.createObjectBuilder();
jTopBuilder.add("graph", jGraphBuilder);
// step 5 write to disk
Map<String, Object> properties = new HashMap<>(1);
properties.put(JsonGenerator.PRETTY_PRINTING, true);
JsonWriterFactory writerFactory = Json.createWriterFactory(properties);
//JsonWriter writer = writerFactory.createWriter(new FileWriter(new File(fileName)));
FileWriter rawFileWriter = new FileWriter(new File(fileName));
JsonWriter writer = writerFactory.createWriter(rawFileWriter);
writer.writeObject(jTopBuilder.build());
rawFileWriter.write(System.lineSeparator());
writer.close();
}
public static void main(String [] args) throws IOException {
//String fileName = "/Users/weiz/eclipse-workspace/ai4code/resources/tree.json";
//readJson(fileName);
String fileName = "/Users/weiz/eclipse-workspace/ai4code/resources/tree_weiz.json";
writeJson(fileName);
}
}
| 7,941
| 41.698925
| 154
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/ReservedWordDeciderI.java
|
package com.ibm.ai4code.parser.commons;
public interface ReservedWordDeciderI {
public boolean isReserved(String word);
}
| 125
| 17
| 40
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/SPT.java
|
package com.ibm.ai4code.parser.commons;
import java.util.Queue;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.tree.ParseTree;
/**
* Simplified ParseTree
* @author weiz
*
*/
public class SPT {
private SPTNode root;
private Queue<SPTNode> shadowQueue = null;
private int numNodes; // weiz 2021-03-05, for GNN training
private int numEdges; // weiz 2021-03-05
private String srcFileName; // weiz 2021-03-06, add file name
public SPT() {
root = null;
}
public SPT(SPTNode root){
this.root = root;
}
// weiz 2020-11-17 add support for ruleNames and vocabulary
public SPT(ParseTree tree, String [] ruleNames, Vocabulary vocabulary) {
SPTNode.resetStaticFields(); // weiz 2020-11-18, add the reset static fields (clear out tokenIdxMax)
this.root = SPTNode.buildSPTNode(tree, null, ruleNames, vocabulary);
}
public SPTNode getRoot() {
return this.root;
}
public void simplify() {
SPTNode.simplify(root, null);
}
public int count() {
return SPTNode.count(root);
}
public void indexing() {
this.shadowQueue = SPTNode.bfsIndexing(root);
this.numNodes = SPTNode.dfsIndexing(root); // weiz, add dfs index
this.numEdges = this.numNodes - 1;
}
public int getNumNodes() {
return this.numNodes;
}
public int getNumEdges() {
return this.numEdges;
}
public void setSrcFileName(String srcFileName) {
this.srcFileName = srcFileName;
}
public String getSrcFileName() {
return this.srcFileName;
}
public void labeling(ReservedWordDeciderI rwdi) {
SPTNode.aromaLabeling(root, rwdi);
}
public Queue<SPTNode> getLayerWiseTraversalQueue(){
assert(shadowQueue != null); // when this method queue is called, indexing() must have been called
return shadowQueue;
}
}
| 1,757
| 21.831169
| 102
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/SPTNode.java
|
package com.ibm.ai4code.parser.commons;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Stack;
import org.antlr.v4.runtime.tree.TerminalNodeImpl;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.Vocabulary;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.RuleNode;
public class SPTNode implements TreeNodeIntf<SPTNode>{
private String name="";
private String label=""; // weiz 2020-10-29, the label that we will use to build trees similar to the ones in Aroma
private String ruleName="";
private int ruleIndex=-1; // weiz 2021-03-05, this is used to encode rule name so that it can be consumed by NN
private boolean isLeaf = true;
private boolean reservedWordFlag=false; // weiz 2020-10-29, used updated when labeling
private int bfsIdx = 0; // weiz 2021-03-04, renamed from idx to bfsIdx, as we will also add dfsIdx later
private int dfsIdx = 0; // weiz 2021-03-04, required for the DAGNN model
private int tokenIdx = 0; // weiz 2020-11-18 add the token index, so users can use .json file to locate parent-children relationship
private int depth = 0;
private ArrayList<SPTNode> children = null;
private SPTNode parentNode = null;
// static fields
private static int tokenIdxMax=-1; // weiz 2020-11-18 remember the
public SPTNode() {
}
public SPTNode (String name, boolean isLeaf) {
this.name = name;
this.label = name;
this.isLeaf = isLeaf;
}
public SPTNode(String name, boolean isLeaf, ArrayList<SPTNode> children) {
this.name = name;
this.label = name;
this.isLeaf = isLeaf;
this.children = children;
if(children == null) {
assert(this.isLeaf);
}else {
assert(!this.isLeaf);
assert(children.size() > 0);
}
}
public void setParentNode(SPTNode parentNode) {
this.parentNode = parentNode;
}
public void addChildNode(SPTNode node) {
children.add(node);
this.isLeaf = false;
}
public int getChildCnt() {
if(null == this.children) {
return 0;
}
return children.size();
}
public ArrayList<SPTNode> getChildren(){
return this.children;
}
public SPTNode getChildAtIdx(int idx) {
return this.children.get(idx);
}
public void setChildren(ArrayList<SPTNode> children) {
this.children = children;
}
public void setBFSIdx(int idx) {
this.bfsIdx = idx;
}
public int getBFSIdx() {
return this.bfsIdx;
}
public void setDFSIndex(int idx) {
this.dfsIdx = idx;
}
public int getDFSIndex() {
return this.dfsIdx;
}
public void setRuleIndex(int ruleIndex) {
this.ruleIndex = ruleIndex;
}
public int getRuleIndex() {
return this.ruleIndex;
}
public void setTokenIdx(int tIdx) {
this.tokenIdx = tIdx;
}
public int getTokenIdx() {
return this.tokenIdx;
}
public void setDepth(int depth) {
this.depth = depth;
}
public int getDepth() {
return this.depth;
}
public void setReservedWordFlag(boolean flag) {
this.reservedWordFlag = flag;
}
public boolean getReservedWordFlag() {
return this.reservedWordFlag;
}
/**
* just simplify some non-leaf node, if it has only one child, then it will become the simplified version of its child,
* recursively done so that only leaf nodes and non-leaf nodes have more than 1 child will survive, with parents nodes being updated correctly
* After simplifying, two invariants are kept:
* (1) Out degree of each non-leaf node is greater than 1
* (2) Out degree of each non-leaf node is the same as itself prior to simplification
*/
public static SPTNode simplify(SPTNode self, SPTNode parent) {
if(self.isLeaf) {
return self;
}else {
if(self.getChildCnt() == 1) {
SPTNode childNode = self.getChildAtIdx(0);
SPTNode simplifiedNode = simplify(childNode, self);
simplifiedNode.setParentNode(self); // recursively makes the surviving nodes update its parent
return simplifiedNode;
}else {
ArrayList<SPTNode> simplifiedChildren = new ArrayList<SPTNode>();
for(SPTNode childNode : self.children) {
SPTNode simplifiedChildNode = simplify(childNode, self);
simplifiedChildNode.setParentNode(self);
simplifiedChildren.add(simplifiedChildNode); // recursively makes the surviving nodes update its parent
}
self.setChildren(simplifiedChildren);
return self;
}
}
}
/**
* depth-first building tree
* @param tree
* @param parent
* @param ruleNames
* @param vocabulary
* @return
*/
public static SPTNode buildSPTNode(ParseTree tree, SPTNode parent, String [] ruleNames, Vocabulary vocabulary) {
if (tree instanceof TerminalNodeImpl) {
TerminalNodeImpl tni = (TerminalNodeImpl) tree;
String name = tni.getText(); // weiz 2020-10-29, get the text, which we hope will be compared against the reserved words
// TODO need to check if it is in the reserved word list
SPTNode self = new SPTNode(name, true, null);
Token token = tni.getSymbol(); // weiz 2020-11-17, get token name
self.tokenIdx = token.getTokenIndex(); // weiz 2021-01-19, token idx is only meaningful for tokens
if(tokenIdxMax < self.tokenIdx) {
tokenIdxMax = self.tokenIdx;
}
//self.ruleName = vocabulary.getSymbolicName(token.getType()); // leaf node's rule name is the type name
self.ruleIndex = token.getType(); // weiz 2021-03-04 type of token
self.ruleName = vocabulary.getDisplayName(token.getType()); // leaf node's rule name is the type name
self.setParentNode(parent);
return self;
}else {
assert (tree instanceof RuleNode);
RuleNode rn = (RuleNode) tree;
//Utils.showHierarchy(rn.getClass());
/*System.out.println(rn.getClass());
System.out.println(ruleNames[rn.getRuleContext().getRuleIndex()]);
System.out.println("----------");*/
SPTNode self = new SPTNode("nonleaf", false, new ArrayList<SPTNode>());
self.ruleIndex = rn.getRuleContext().getRuleIndex(); // weiz 2021-03-04, add the rule index
self.ruleName = ruleNames[rn.getRuleContext().getRuleIndex()]; // weiz 2020-11-16, add rule names for non-leaf node
int childCnt = tree.getChildCount();
for(int i = 0; i < childCnt; ++i) {
SPTNode childNode = buildSPTNode(tree.getChild(i), self, ruleNames, vocabulary);
self.addChildNode(childNode);
}
self.setParentNode(parent);
return self;
}
}
/**
* Count how many nodes (including leaves and non-leaves) in the tree
* @param root
* @return
*/
public static int count(SPTNode root) {
if(root.isLeaf) {
return 1;
}else {
int cnt=1; // already included itself
ArrayList<SPTNode> children = root.getChildren();
for(SPTNode child: children) {
cnt+=count(child);
}
return cnt;
}
}
/**
* index the tree via layer-wise traversal (i.e. BFS)
* @param root
*/
public static Queue<SPTNode> bfsIndexing(SPTNode root) {
Queue<SPTNode> queue = new LinkedList<SPTNode>();
Queue<SPTNode> shadowQueue = new LinkedList<SPTNode>();
queue.add(root);
// step1, layer-wise traversal and build the printQueue
while (!queue.isEmpty()) {
SPTNode node = queue.remove();
int idx = shadowQueue.size();
node.bfsIdx = idx; // BFS index
if(!node.isLeaf) {
node.tokenIdx = ++(tokenIdxMax); // weiz 2020-11-18, generate tokenIdx for json file so that user can locate parent idx for token
// weiz 2021-01-19, for non-terminal node, this tokenIdx doesn't make much sense except it carries
// the relative BFS position to other non-terminal nodes in the parse tree. In the output of Json file, we
// don't output tokenIdx for non-terminal nodes (i.e. rule nodes)
}
shadowQueue.add(node);
int depth = node.depth;
ArrayList<SPTNode> children = node.getChildren();
if (children != null) { // weiz 2020-10-29 the children could be null, because it could be a leaf node
for (SPTNode child : children) {
child.depth = depth+1;
queue.add(child);
}
}
}
//System.out.println("Queue Size: " + shadowQueue.size());
return shadowQueue;
}
/**
*
* @param root
* @return number of nodes
*/
public static int dfsIndexing(SPTNode root) {
Stack<SPTNode> stack = new Stack<SPTNode>();
stack.push(root);
int index = 0;
while(!stack.isEmpty()) {
SPTNode node = stack.pop();
node.dfsIdx = index++;
if(!node.isLeaf) {
ArrayList<SPTNode> children = node.getChildren();
for(int idx = children.size()-1; idx>=0;idx--) { // note we push children from right to left in the stack, so when it pops, it pops from right to left
stack.push(children.get(idx));
}
}
}
return index;
}
/**
* A Facebook Aroma type labeling scheme
* @param root
* @param rwdi
*/
public static void aromaLabeling(SPTNode root, ReservedWordDeciderI rwdi) {
// step 1, just to figure out which ones that are in the reserved word list
aromaLabelingStageOne(root, rwdi);
// step 2, to label each non-leaf node
aromaLabelingStageTwo(root);
}
/**
* AromaLabeling stage one, just find out all the reserved words
* @param root
* @param rwdi
*/
private static void aromaLabelingStageOne(SPTNode root, ReservedWordDeciderI rwdi) {
if(root.isLeaf) {
if(rwdi.isReserved(root.name)) {
root.reservedWordFlag = true;
}
root.label = root.name; // a leaf node (i.e., token) always gets to keep its name as label
}else {
ArrayList<SPTNode> children = root.getChildren();
for(SPTNode child: children) {
aromaLabelingStageOne(child, rwdi);
}
}
}
private static void aromaLabelingStageTwo(SPTNode root) {
if(root.isLeaf) {
return;
}else {
String label="";
ArrayList<SPTNode> children = root.getChildren();
for(SPTNode child : children) {
if(child.reservedWordFlag) {
label+=child.label;
}else {
label +="#";
}
}
root.label = label;
for(SPTNode child:children) {
aromaLabelingStageTwo(child);
}
}
}
/**
* reset static fields (e.g., tokenIdxMax) before building SPT tree
*/
public static void resetStaticFields() {
tokenIdxMax = -1;
}
public String getLabel() {
return this.label;
//return this.label+ " ("+this.dfsIdx + ","+ this.depth+")";
/*if(this.isLeaf) {
return this.label;
}else {
return this.ruleName;
}*/
}
public String getRuleName() {
return this.ruleName;
}
public String getType() {
if(this.isLeaf) {
return "Token";
}else {
return "Rule";
}
}
public String toString(){
//return this.name+"(" + idx+","+ depth+")";
//return this.name;
if(this.isLeaf) {
return this.label+":" + this.tokenIdx+ ":"+this.ruleName;
}
return this.label+":"+this.tokenIdx+":"+this.ruleName;
}
}
| 10,642
| 27.687332
| 154
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/TreeNodeIntf.java
|
package com.ibm.ai4code.parser.commons;
import java.util.ArrayList;
public interface TreeNodeIntf <T>{
ArrayList<T> getChildren();
}
| 139
| 13
| 39
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/commons/TreePrinter.java
|
package com.ibm.ai4code.parser.commons;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Queue;
public class TreePrinter<T extends TreeNodeIntf<T>> {
private T root;
private Queue<TreePrinterNode> queue;
private Queue<TreePrinterNode> printQueue;
public TreePrinter(T root) {
this.root = root;
TreePrinterNode tpn= new TreePrinterNode(root, 0);
this.queue = new LinkedList<TreePrinterNode>();
this.printQueue = new LinkedList<TreePrinterNode>();
queue.add(tpn);
}
public void print() {
// step1, layer-wise traversal and build the printQueue
while(!queue.isEmpty()) {
TreePrinterNode tpn = queue.remove();
printQueue.add(tpn);
int level = tpn.getLevel();
ArrayList<T> children = tpn.node.getChildren();
if(children != null) {
for(T child: children) {
tpn = new TreePrinterNode(child, level+1);
queue.add(tpn);
}
}
}
System.out.println("Queue Size: " + printQueue.size());
// step 2, print elements in printQueue
int level=0;
while(!printQueue.isEmpty()) {
TreePrinterNode tpn = printQueue.remove();
if(tpn.level > level) {
level = tpn.level;
System.out.println();
}
System.out.print(tpn.node+"\t");
}
}
private class TreePrinterNode{
T node;
int level;
public TreePrinterNode() {
this.node = null;
this.level = -1;
}
public TreePrinterNode(T node, int level) {
this.node = node;
this.level = level;
}
public int getLevel() {
return this.level;
}
public void setLevel(int level) {
this.level = level;
}
}
}
| 1,572
| 22.477612
| 57
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cpp/CPPReservedWordDecider.java
|
package com.ibm.ai4code.parser.cpp;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CPPReservedWordDecider implements ReservedWordDeciderI{
// cat cpp14kws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords=
{
"alignas",
"alignof",
"asm",
"auto",
"bool",
"break",
"case",
"catch",
"char",
"char16_t",
"char32_t",
"class",
"const",
"constexpr",
"const_cast",
"continue",
"decltype",
"default",
"delete",
"do",
"double",
"dynamic_cast",
"else",
"enum",
"explicit",
"export",
"extern",
"false",
"final",
"float",
"for",
"friend",
"goto",
"if",
"inline",
"int",
"long",
"mutable",
"namespace",
"new",
"noexcept",
"nullptr",
"operator",
"override",
"private",
"protected",
"public",
"register",
"reinterpret_cast",
"return",
"short",
"signed",
"sizeof",
"static",
"static_assert",
"static_cast",
"struct",
"switch",
"template",
"this",
"thread_local",
"throw",
"true",
"try",
"typedef",
"typeid",
"typename",
"union",
"unsigned",
"using",
"virtual",
"void",
"volatile",
"wchar_t",
"while"
};
// cat cpp14ops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String[] ops = {
"(",
")",
"[",
"]",
"{",
"}",
"+",
"-",
"*",
"/",
"%",
"^",
"&",
"|",
"~",
"!" ,
"not",
"=",
"<",
">",
"+=",
"-=",
"*=",
"/=",
"%=",
"^=",
"&=",
"|=",
"<<",
">>",
"<<=",
">>=",
"==",
"!=",
"<=",
">=",
"&&",
"and",
"||" ,
"or",
"++",
"--",
",",
"->*",
"->",
"?",
":", // weiz 2020-10-29, this is a tricky one, as my grep cut is based on ':'
"::", // weiz 2020-10-29, this is a tricky one, as my grep cut is based on ':'
";", //weiz 2020-10-29, because in my shell script, i did s/;/,/g" to make java happy with its array declaration thus, i need to put ; back
".",
".*",
"..."
};
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
public CPPReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 2,925
| 16.520958
| 143
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cpp_multi/CPPReservedWordDecider.java
|
package com.ibm.ai4code.parser.cpp_multi;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CPPReservedWordDecider implements ReservedWordDeciderI{
// cat cpp14kws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords=
{
"alignas",
"alignof",
"asm",
"auto",
"bool",
"break",
"case",
"catch",
"char",
"char16_t",
"char32_t",
"class",
"const",
"constexpr",
"const_cast",
"continue",
"decltype",
"default",
"delete",
"do",
"double",
"dynamic_cast",
"else",
"enum",
"explicit",
"export",
"extern",
"false",
"final",
"float",
"for",
"friend",
"goto",
"if",
"inline",
"int",
"long",
"mutable",
"namespace",
"new",
"noexcept",
"nullptr",
"operator",
"override",
"private",
"protected",
"public",
"register",
"reinterpret_cast",
"return",
"short",
"signed",
"sizeof",
"static",
"static_assert",
"static_cast",
"struct",
"switch",
"template",
"this",
"thread_local",
"throw",
"true",
"try",
"typedef",
"typeid",
"typename",
"union",
"unsigned",
"using",
"virtual",
"void",
"volatile",
"wchar_t",
"while"
};
// cat cpp14ops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String[] ops = {
"(",
")",
"[",
"]",
"{",
"}",
"+",
"-",
"*",
"/",
"%",
"^",
"&",
"|",
"~",
"!" ,
"not",
"=",
"<",
">",
"+=",
"-=",
"*=",
"/=",
"%=",
"^=",
"&=",
"|=",
"<<",
">>",
"<<=",
">>=",
"==",
"!=",
"<=",
">=",
"&&",
"and",
"||" ,
"or",
"++",
"--",
",",
"->*",
"->",
"?",
":", // weiz 2020-10-29, this is a tricky one, as my grep cut is based on ':'
"::", // weiz 2020-10-29, this is a tricky one, as my grep cut is based on ':'
";", //weiz 2020-10-29, because in my shell script, i did s/;/,/g" to make java happy with its array declaration thus, i need to put ; back
".",
".*",
"..."
};
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
public CPPReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 2,931
| 16.556886
| 143
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/CymbolBaseListener.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
* This class provides an empty implementation of {@link CymbolListener},
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
public class CymbolBaseListener implements CymbolListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFile(CymbolParser.FileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFile(CymbolParser.FileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVarDecl(CymbolParser.VarDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVarDecl(CymbolParser.VarDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterType(CymbolParser.TypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitType(CymbolParser.TypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionDecl(CymbolParser.FunctionDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionDecl(CymbolParser.FunctionDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFormalParameters(CymbolParser.FormalParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFormalParameters(CymbolParser.FormalParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFormalParameter(CymbolParser.FormalParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFormalParameter(CymbolParser.FormalParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBlock(CymbolParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBlock(CymbolParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStat(CymbolParser.StatContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStat(CymbolParser.StatContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpr(CymbolParser.ExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpr(CymbolParser.ExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExprList(CymbolParser.ExprListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExprList(CymbolParser.ExprListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(ErrorNode node) { }
}
| 4,224
| 25.572327
| 90
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/CymbolLexer.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CymbolLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, ASSIGN=2, SEMICOLON=3, LPAREN=4, RPAREN=5, LBRACK=6, RBRACK=7,
LBRACE=8, RBRACE=9, COMMA=10, BANG=11, ADD=12, SUB=13, MUL=14, EQUAL=15,
FLOAT=16, INTEGER=17, VOID=18, IF=19, THEN=20, RETURN=21, ID=22, INT=23,
WS=24, SL_COMMENT=25;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"T__0", "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "LETTER", "INT", "WS",
"SL_COMMENT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'then'", "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"','", "'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'",
"'if'", "'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public CymbolLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Cymbol.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\33\u009a\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5"+
"\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16"+
"\3\16\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22"+
"\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\25\3\25\3\25\3\25"+
"\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\7\27{\n\27\f\27"+
"\16\27~\13\27\3\30\3\30\3\31\6\31\u0083\n\31\r\31\16\31\u0084\3\32\6\32"+
"\u0088\n\32\r\32\16\32\u0089\3\32\3\32\3\33\3\33\3\33\3\33\7\33\u0092"+
"\n\33\f\33\16\33\u0095\13\33\3\33\3\33\3\33\3\33\3\u0093\2\34\3\3\5\4"+
"\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22"+
"#\23%\24\'\25)\26+\27-\30/\2\61\31\63\32\65\33\3\2\5\3\2\62;\4\2C\\c|"+
"\5\2\13\f\17\17\"\"\2\u009d\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3"+
"\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2"+
"\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37"+
"\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3"+
"\2\2\2\2-\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\3\67\3\2\2\2"+
"\5<\3\2\2\2\7>\3\2\2\2\t@\3\2\2\2\13B\3\2\2\2\rD\3\2\2\2\17F\3\2\2\2\21"+
"H\3\2\2\2\23J\3\2\2\2\25L\3\2\2\2\27N\3\2\2\2\31P\3\2\2\2\33R\3\2\2\2"+
"\35T\3\2\2\2\37V\3\2\2\2!Y\3\2\2\2#_\3\2\2\2%c\3\2\2\2\'h\3\2\2\2)k\3"+
"\2\2\2+p\3\2\2\2-w\3\2\2\2/\177\3\2\2\2\61\u0082\3\2\2\2\63\u0087\3\2"+
"\2\2\65\u008d\3\2\2\2\678\7v\2\289\7j\2\29:\7g\2\2:;\7p\2\2;\4\3\2\2\2"+
"<=\7?\2\2=\6\3\2\2\2>?\7=\2\2?\b\3\2\2\2@A\7*\2\2A\n\3\2\2\2BC\7+\2\2"+
"C\f\3\2\2\2DE\7]\2\2E\16\3\2\2\2FG\7_\2\2G\20\3\2\2\2HI\7}\2\2I\22\3\2"+
"\2\2JK\7\177\2\2K\24\3\2\2\2LM\7.\2\2M\26\3\2\2\2NO\7#\2\2O\30\3\2\2\2"+
"PQ\7-\2\2Q\32\3\2\2\2RS\7/\2\2S\34\3\2\2\2TU\7,\2\2U\36\3\2\2\2VW\7?\2"+
"\2WX\7?\2\2X \3\2\2\2YZ\7h\2\2Z[\7n\2\2[\\\7q\2\2\\]\7c\2\2]^\7v\2\2^"+
"\"\3\2\2\2_`\7k\2\2`a\7p\2\2ab\7v\2\2b$\3\2\2\2cd\7x\2\2de\7q\2\2ef\7"+
"k\2\2fg\7f\2\2g&\3\2\2\2hi\7k\2\2ij\7h\2\2j(\3\2\2\2kl\7g\2\2lm\7n\2\2"+
"mn\7u\2\2no\7g\2\2o*\3\2\2\2pq\7t\2\2qr\7g\2\2rs\7v\2\2st\7w\2\2tu\7t"+
"\2\2uv\7p\2\2v,\3\2\2\2w|\5/\30\2x{\5/\30\2y{\t\2\2\2zx\3\2\2\2zy\3\2"+
"\2\2{~\3\2\2\2|z\3\2\2\2|}\3\2\2\2}.\3\2\2\2~|\3\2\2\2\177\u0080\t\3\2"+
"\2\u0080\60\3\2\2\2\u0081\u0083\t\2\2\2\u0082\u0081\3\2\2\2\u0083\u0084"+
"\3\2\2\2\u0084\u0082\3\2\2\2\u0084\u0085\3\2\2\2\u0085\62\3\2\2\2\u0086"+
"\u0088\t\4\2\2\u0087\u0086\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u0087\3\2"+
"\2\2\u0089\u008a\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\b\32\2\2\u008c"+
"\64\3\2\2\2\u008d\u008e\7\61\2\2\u008e\u008f\7\61\2\2\u008f\u0093\3\2"+
"\2\2\u0090\u0092\13\2\2\2\u0091\u0090\3\2\2\2\u0092\u0095\3\2\2\2\u0093"+
"\u0094\3\2\2\2\u0093\u0091\3\2\2\2\u0094\u0096\3\2\2\2\u0095\u0093\3\2"+
"\2\2\u0096\u0097\7\f\2\2\u0097\u0098\3\2\2\2\u0098\u0099\b\33\2\2\u0099"+
"\66\3\2\2\2\b\2z|\u0084\u0089\u0093\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 7,028
| 41.08982
| 97
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/CymbolListener.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol;
import org.antlr.v4.runtime.tree.ParseTreeListener;
/**
* This interface defines a complete listener for a parse tree produced by
* {@link CymbolParser}.
*/
public interface CymbolListener extends ParseTreeListener {
/**
* Enter a parse tree produced by {@link CymbolParser#file}.
* @param ctx the parse tree
*/
void enterFile(CymbolParser.FileContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#file}.
* @param ctx the parse tree
*/
void exitFile(CymbolParser.FileContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#varDecl}.
* @param ctx the parse tree
*/
void enterVarDecl(CymbolParser.VarDeclContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#varDecl}.
* @param ctx the parse tree
*/
void exitVarDecl(CymbolParser.VarDeclContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#type}.
* @param ctx the parse tree
*/
void enterType(CymbolParser.TypeContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#type}.
* @param ctx the parse tree
*/
void exitType(CymbolParser.TypeContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#functionDecl}.
* @param ctx the parse tree
*/
void enterFunctionDecl(CymbolParser.FunctionDeclContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#functionDecl}.
* @param ctx the parse tree
*/
void exitFunctionDecl(CymbolParser.FunctionDeclContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#formalParameters}.
* @param ctx the parse tree
*/
void enterFormalParameters(CymbolParser.FormalParametersContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#formalParameters}.
* @param ctx the parse tree
*/
void exitFormalParameters(CymbolParser.FormalParametersContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#formalParameter}.
* @param ctx the parse tree
*/
void enterFormalParameter(CymbolParser.FormalParameterContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#formalParameter}.
* @param ctx the parse tree
*/
void exitFormalParameter(CymbolParser.FormalParameterContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#block}.
* @param ctx the parse tree
*/
void enterBlock(CymbolParser.BlockContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#block}.
* @param ctx the parse tree
*/
void exitBlock(CymbolParser.BlockContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#stat}.
* @param ctx the parse tree
*/
void enterStat(CymbolParser.StatContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#stat}.
* @param ctx the parse tree
*/
void exitStat(CymbolParser.StatContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#expr}.
* @param ctx the parse tree
*/
void enterExpr(CymbolParser.ExprContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#expr}.
* @param ctx the parse tree
*/
void exitExpr(CymbolParser.ExprContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#exprList}.
* @param ctx the parse tree
*/
void enterExprList(CymbolParser.ExprListContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#exprList}.
* @param ctx the parse tree
*/
void exitExprList(CymbolParser.ExprListContext ctx);
}
| 3,496
| 30.790909
| 74
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/CymbolParser.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CymbolParser extends Parser {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, ASSIGN=2, SEMICOLON=3, LPAREN=4, RPAREN=5, LBRACK=6, RBRACK=7,
LBRACE=8, RBRACE=9, COMMA=10, BANG=11, ADD=12, SUB=13, MUL=14, EQUAL=15,
FLOAT=16, INTEGER=17, VOID=18, IF=19, THEN=20, RETURN=21, ID=22, INT=23,
WS=24, SL_COMMENT=25;
public static final int
RULE_file = 0, RULE_varDecl = 1, RULE_type = 2, RULE_functionDecl = 3,
RULE_formalParameters = 4, RULE_formalParameter = 5, RULE_block = 6, RULE_stat = 7,
RULE_expr = 8, RULE_exprList = 9;
private static String[] makeRuleNames() {
return new String[] {
"file", "varDecl", "type", "functionDecl", "formalParameters", "formalParameter",
"block", "stat", "expr", "exprList"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'then'", "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"','", "'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'",
"'if'", "'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "Cymbol.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public CymbolParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class FileContext extends ParserRuleContext {
public List<FunctionDeclContext> functionDecl() {
return getRuleContexts(FunctionDeclContext.class);
}
public FunctionDeclContext functionDecl(int i) {
return getRuleContext(FunctionDeclContext.class,i);
}
public List<VarDeclContext> varDecl() {
return getRuleContexts(VarDeclContext.class);
}
public VarDeclContext varDecl(int i) {
return getRuleContext(VarDeclContext.class,i);
}
public FileContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_file; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFile(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFile(this);
}
}
public final FileContext file() throws RecognitionException {
FileContext _localctx = new FileContext(_ctx, getState());
enterRule(_localctx, 0, RULE_file);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(22);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
setState(22);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,0,_ctx) ) {
case 1:
{
setState(20);
functionDecl();
}
break;
case 2:
{
setState(21);
varDecl();
}
break;
}
}
setState(24);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0) );
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class VarDeclContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode SEMICOLON() { return getToken(CymbolParser.SEMICOLON, 0); }
public TerminalNode ASSIGN() { return getToken(CymbolParser.ASSIGN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public VarDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_varDecl; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterVarDecl(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitVarDecl(this);
}
}
public final VarDeclContext varDecl() throws RecognitionException {
VarDeclContext _localctx = new VarDeclContext(_ctx, getState());
enterRule(_localctx, 2, RULE_varDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(26);
type();
setState(27);
match(ID);
setState(30);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==ASSIGN) {
{
setState(28);
match(ASSIGN);
setState(29);
expr(0);
}
}
setState(32);
match(SEMICOLON);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class TypeContext extends ParserRuleContext {
public TerminalNode FLOAT() { return getToken(CymbolParser.FLOAT, 0); }
public TerminalNode INTEGER() { return getToken(CymbolParser.INTEGER, 0); }
public TerminalNode VOID() { return getToken(CymbolParser.VOID, 0); }
public TypeContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_type; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterType(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitType(this);
}
}
public final TypeContext type() throws RecognitionException {
TypeContext _localctx = new TypeContext(_ctx, getState());
enterRule(_localctx, 4, RULE_type);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(34);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0)) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FunctionDeclContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode LPAREN() { return getToken(CymbolParser.LPAREN, 0); }
public TerminalNode RPAREN() { return getToken(CymbolParser.RPAREN, 0); }
public BlockContext block() {
return getRuleContext(BlockContext.class,0);
}
public FormalParametersContext formalParameters() {
return getRuleContext(FormalParametersContext.class,0);
}
public FunctionDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_functionDecl; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFunctionDecl(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFunctionDecl(this);
}
}
public final FunctionDeclContext functionDecl() throws RecognitionException {
FunctionDeclContext _localctx = new FunctionDeclContext(_ctx, getState());
enterRule(_localctx, 6, RULE_functionDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(36);
type();
setState(37);
match(ID);
setState(38);
match(LPAREN);
setState(40);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0)) {
{
setState(39);
formalParameters();
}
}
setState(42);
match(RPAREN);
setState(43);
block();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FormalParametersContext extends ParserRuleContext {
public List<FormalParameterContext> formalParameter() {
return getRuleContexts(FormalParameterContext.class);
}
public FormalParameterContext formalParameter(int i) {
return getRuleContext(FormalParameterContext.class,i);
}
public List<TerminalNode> COMMA() { return getTokens(CymbolParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(CymbolParser.COMMA, i);
}
public FormalParametersContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_formalParameters; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFormalParameters(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFormalParameters(this);
}
}
public final FormalParametersContext formalParameters() throws RecognitionException {
FormalParametersContext _localctx = new FormalParametersContext(_ctx, getState());
enterRule(_localctx, 8, RULE_formalParameters);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(45);
formalParameter();
setState(50);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(46);
match(COMMA);
setState(47);
formalParameter();
}
}
setState(52);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FormalParameterContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public FormalParameterContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_formalParameter; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFormalParameter(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFormalParameter(this);
}
}
public final FormalParameterContext formalParameter() throws RecognitionException {
FormalParameterContext _localctx = new FormalParameterContext(_ctx, getState());
enterRule(_localctx, 10, RULE_formalParameter);
try {
enterOuterAlt(_localctx, 1);
{
setState(53);
type();
setState(54);
match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class BlockContext extends ParserRuleContext {
public TerminalNode LBRACE() { return getToken(CymbolParser.LBRACE, 0); }
public TerminalNode RBRACE() { return getToken(CymbolParser.RBRACE, 0); }
public List<StatContext> stat() {
return getRuleContexts(StatContext.class);
}
public StatContext stat(int i) {
return getRuleContext(StatContext.class,i);
}
public BlockContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_block; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterBlock(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitBlock(this);
}
}
public final BlockContext block() throws RecognitionException {
BlockContext _localctx = new BlockContext(_ctx, getState());
enterRule(_localctx, 12, RULE_block);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(56);
match(LBRACE);
setState(60);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << LBRACE) | (1L << BANG) | (1L << SUB) | (1L << FLOAT) | (1L << INTEGER) | (1L << VOID) | (1L << IF) | (1L << RETURN) | (1L << ID) | (1L << INT))) != 0)) {
{
{
setState(57);
stat();
}
}
setState(62);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(63);
match(RBRACE);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class StatContext extends ParserRuleContext {
public BlockContext block() {
return getRuleContext(BlockContext.class,0);
}
public VarDeclContext varDecl() {
return getRuleContext(VarDeclContext.class,0);
}
public TerminalNode IF() { return getToken(CymbolParser.IF, 0); }
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public List<StatContext> stat() {
return getRuleContexts(StatContext.class);
}
public StatContext stat(int i) {
return getRuleContext(StatContext.class,i);
}
public TerminalNode THEN() { return getToken(CymbolParser.THEN, 0); }
public TerminalNode RETURN() { return getToken(CymbolParser.RETURN, 0); }
public TerminalNode SEMICOLON() { return getToken(CymbolParser.SEMICOLON, 0); }
public TerminalNode ASSIGN() { return getToken(CymbolParser.ASSIGN, 0); }
public StatContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_stat; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterStat(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitStat(this);
}
}
public final StatContext stat() throws RecognitionException {
StatContext _localctx = new StatContext(_ctx, getState());
enterRule(_localctx, 14, RULE_stat);
int _la;
try {
setState(88);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(65);
block();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(66);
varDecl();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(67);
match(IF);
setState(68);
expr(0);
setState(69);
match(T__0);
setState(70);
stat();
setState(73);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) {
case 1:
{
setState(71);
match(THEN);
setState(72);
stat();
}
break;
}
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
setState(75);
match(RETURN);
setState(77);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << BANG) | (1L << SUB) | (1L << ID) | (1L << INT))) != 0)) {
{
setState(76);
expr(0);
}
}
setState(79);
match(SEMICOLON);
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
setState(80);
expr(0);
setState(81);
match(ASSIGN);
setState(82);
expr(0);
setState(83);
match(SEMICOLON);
}
break;
case 6:
enterOuterAlt(_localctx, 6);
{
setState(85);
expr(0);
setState(86);
match(SEMICOLON);
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExprContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode LPAREN() { return getToken(CymbolParser.LPAREN, 0); }
public TerminalNode RPAREN() { return getToken(CymbolParser.RPAREN, 0); }
public ExprListContext exprList() {
return getRuleContext(ExprListContext.class,0);
}
public TerminalNode LBRACK() { return getToken(CymbolParser.LBRACK, 0); }
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public TerminalNode RBRACK() { return getToken(CymbolParser.RBRACK, 0); }
public TerminalNode SUB() { return getToken(CymbolParser.SUB, 0); }
public TerminalNode BANG() { return getToken(CymbolParser.BANG, 0); }
public TerminalNode INT() { return getToken(CymbolParser.INT, 0); }
public TerminalNode MUL() { return getToken(CymbolParser.MUL, 0); }
public TerminalNode ADD() { return getToken(CymbolParser.ADD, 0); }
public TerminalNode EQUAL() { return getToken(CymbolParser.EQUAL, 0); }
public ExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expr; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterExpr(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitExpr(this);
}
}
public final ExprContext expr() throws RecognitionException {
return expr(0);
}
private ExprContext expr(int _p) throws RecognitionException {
ParserRuleContext _parentctx = _ctx;
int _parentState = getState();
ExprContext _localctx = new ExprContext(_ctx, _parentState);
ExprContext _prevctx = _localctx;
int _startState = 16;
enterRecursionRule(_localctx, 16, RULE_expr, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(112);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,10,_ctx) ) {
case 1:
{
setState(91);
match(ID);
setState(92);
match(LPAREN);
setState(94);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << BANG) | (1L << SUB) | (1L << ID) | (1L << INT))) != 0)) {
{
setState(93);
exprList();
}
}
setState(96);
match(RPAREN);
}
break;
case 2:
{
setState(97);
match(ID);
setState(98);
match(LBRACK);
setState(99);
expr(0);
setState(100);
match(RBRACK);
}
break;
case 3:
{
setState(102);
match(SUB);
setState(103);
expr(8);
}
break;
case 4:
{
setState(104);
match(BANG);
setState(105);
expr(7);
}
break;
case 5:
{
setState(106);
match(ID);
}
break;
case 6:
{
setState(107);
match(INT);
}
break;
case 7:
{
setState(108);
match(LPAREN);
setState(109);
expr(0);
setState(110);
match(RPAREN);
}
break;
}
_ctx.stop = _input.LT(-1);
setState(125);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
setState(123);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
case 1:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(114);
if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)");
setState(115);
match(MUL);
setState(116);
expr(7);
}
break;
case 2:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(117);
if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)");
setState(118);
_la = _input.LA(1);
if ( !(_la==ADD || _la==SUB) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(119);
expr(6);
}
break;
case 3:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(120);
if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
setState(121);
match(EQUAL);
setState(122);
expr(5);
}
break;
}
}
}
setState(127);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
unrollRecursionContexts(_parentctx);
}
return _localctx;
}
public static class ExprListContext extends ParserRuleContext {
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public List<TerminalNode> COMMA() { return getTokens(CymbolParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(CymbolParser.COMMA, i);
}
public ExprListContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_exprList; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterExprList(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitExprList(this);
}
}
public final ExprListContext exprList() throws RecognitionException {
ExprListContext _localctx = new ExprListContext(_ctx, getState());
enterRule(_localctx, 18, RULE_exprList);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(128);
expr(0);
setState(133);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(129);
match(COMMA);
setState(130);
expr(0);
}
}
setState(135);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
case 8:
return expr_sempred((ExprContext)_localctx, predIndex);
}
return true;
}
private boolean expr_sempred(ExprContext _localctx, int predIndex) {
switch (predIndex) {
case 0:
return precpred(_ctx, 6);
case 1:
return precpred(_ctx, 5);
case 2:
return precpred(_ctx, 4);
}
return true;
}
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\33\u008b\4\2\t\2"+
"\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\3\2\3\2\6\2\31\n\2\r\2\16\2\32\3\3\3\3\3\3\3\3\5\3!\n\3\3\3\3\3"+
"\3\4\3\4\3\5\3\5\3\5\3\5\5\5+\n\5\3\5\3\5\3\5\3\6\3\6\3\6\7\6\63\n\6\f"+
"\6\16\6\66\13\6\3\7\3\7\3\7\3\b\3\b\7\b=\n\b\f\b\16\b@\13\b\3\b\3\b\3"+
"\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\tL\n\t\3\t\3\t\5\tP\n\t\3\t\3\t\3\t\3"+
"\t\3\t\3\t\3\t\3\t\3\t\5\t[\n\t\3\n\3\n\3\n\3\n\5\na\n\n\3\n\3\n\3\n\3"+
"\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\ns\n\n\3\n\3\n\3"+
"\n\3\n\3\n\3\n\3\n\3\n\3\n\7\n~\n\n\f\n\16\n\u0081\13\n\3\13\3\13\3\13"+
"\7\13\u0086\n\13\f\13\16\13\u0089\13\13\3\13\2\3\22\f\2\4\6\b\n\f\16\20"+
"\22\24\2\4\3\2\22\24\3\2\16\17\2\u0098\2\30\3\2\2\2\4\34\3\2\2\2\6$\3"+
"\2\2\2\b&\3\2\2\2\n/\3\2\2\2\f\67\3\2\2\2\16:\3\2\2\2\20Z\3\2\2\2\22r"+
"\3\2\2\2\24\u0082\3\2\2\2\26\31\5\b\5\2\27\31\5\4\3\2\30\26\3\2\2\2\30"+
"\27\3\2\2\2\31\32\3\2\2\2\32\30\3\2\2\2\32\33\3\2\2\2\33\3\3\2\2\2\34"+
"\35\5\6\4\2\35 \7\30\2\2\36\37\7\4\2\2\37!\5\22\n\2 \36\3\2\2\2 !\3\2"+
"\2\2!\"\3\2\2\2\"#\7\5\2\2#\5\3\2\2\2$%\t\2\2\2%\7\3\2\2\2&\'\5\6\4\2"+
"\'(\7\30\2\2(*\7\6\2\2)+\5\n\6\2*)\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\7\7\2"+
"\2-.\5\16\b\2.\t\3\2\2\2/\64\5\f\7\2\60\61\7\f\2\2\61\63\5\f\7\2\62\60"+
"\3\2\2\2\63\66\3\2\2\2\64\62\3\2\2\2\64\65\3\2\2\2\65\13\3\2\2\2\66\64"+
"\3\2\2\2\678\5\6\4\289\7\30\2\29\r\3\2\2\2:>\7\n\2\2;=\5\20\t\2<;\3\2"+
"\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2\2AB\7\13\2\2B\17"+
"\3\2\2\2C[\5\16\b\2D[\5\4\3\2EF\7\25\2\2FG\5\22\n\2GH\7\3\2\2HK\5\20\t"+
"\2IJ\7\26\2\2JL\5\20\t\2KI\3\2\2\2KL\3\2\2\2L[\3\2\2\2MO\7\27\2\2NP\5"+
"\22\n\2ON\3\2\2\2OP\3\2\2\2PQ\3\2\2\2Q[\7\5\2\2RS\5\22\n\2ST\7\4\2\2T"+
"U\5\22\n\2UV\7\5\2\2V[\3\2\2\2WX\5\22\n\2XY\7\5\2\2Y[\3\2\2\2ZC\3\2\2"+
"\2ZD\3\2\2\2ZE\3\2\2\2ZM\3\2\2\2ZR\3\2\2\2ZW\3\2\2\2[\21\3\2\2\2\\]\b"+
"\n\1\2]^\7\30\2\2^`\7\6\2\2_a\5\24\13\2`_\3\2\2\2`a\3\2\2\2ab\3\2\2\2"+
"bs\7\7\2\2cd\7\30\2\2de\7\b\2\2ef\5\22\n\2fg\7\t\2\2gs\3\2\2\2hi\7\17"+
"\2\2is\5\22\n\njk\7\r\2\2ks\5\22\n\tls\7\30\2\2ms\7\31\2\2no\7\6\2\2o"+
"p\5\22\n\2pq\7\7\2\2qs\3\2\2\2r\\\3\2\2\2rc\3\2\2\2rh\3\2\2\2rj\3\2\2"+
"\2rl\3\2\2\2rm\3\2\2\2rn\3\2\2\2s\177\3\2\2\2tu\f\b\2\2uv\7\20\2\2v~\5"+
"\22\n\twx\f\7\2\2xy\t\3\2\2y~\5\22\n\bz{\f\6\2\2{|\7\21\2\2|~\5\22\n\7"+
"}t\3\2\2\2}w\3\2\2\2}z\3\2\2\2~\u0081\3\2\2\2\177}\3\2\2\2\177\u0080\3"+
"\2\2\2\u0080\23\3\2\2\2\u0081\177\3\2\2\2\u0082\u0087\5\22\n\2\u0083\u0084"+
"\7\f\2\2\u0084\u0086\5\22\n\2\u0085\u0083\3\2\2\2\u0086\u0089\3\2\2\2"+
"\u0087\u0085\3\2\2\2\u0087\u0088\3\2\2\2\u0088\25\3\2\2\2\u0089\u0087"+
"\3\2\2\2\20\30\32 *\64>KOZ`r}\177\u0087";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 28,956
| 28.368154
| 228
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/CymbolReservedWordDecider.java
|
package com.ibm.ai4code.parser.cymbol;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CymbolReservedWordDecider implements ReservedWordDeciderI{
public static final String [] keywords = {
"float",
"int",
"void",
"if",
"then",
"else",
"return"
};
public static final String [] ops = {
"(",
")",
"[",
"]",
"-",
"!",
"*",
"+",
"-",
"=="
};
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public CymbolReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 985
| 16.607143
| 71
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol/Main.java
|
package com.ibm.ai4code.parser.cymbol;
import org.antlr.v4.runtime.CommonTokenStream;
import com.ibm.ai4code.parser.c_multi.C11Parser;
public class Main {
// public static void tokenize() {
// lexer = new CymbolLexer(input);
// tokenStream = new CommonTokenStream(lexer);
// C11Parser parser = new com.ibm.ai4code.parser.c11.C11Parser(tokenStream);
// //parser.removeErrorListeners();
// //parser.addErrorListener(new SPTBaseErrorListener(srcFileName));
// ruleNames = parser.getRuleNames();
// tree = parser.compilationUnit();
// rwdi = new com.ibm.ai4code.parser.c11.C11ReservedWordDecider();
// }
public static void main(String[] args) {
}
}
| 664
| 26.708333
| 77
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolBaseListener.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
* This class provides an empty implementation of {@link CymbolListener},
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
public class CymbolBaseListener implements CymbolListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFile(CymbolParser.FileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFile(CymbolParser.FileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVarDecl(CymbolParser.VarDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVarDecl(CymbolParser.VarDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterType(CymbolParser.TypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitType(CymbolParser.TypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionDecl(CymbolParser.FunctionDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionDecl(CymbolParser.FunctionDeclContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFormalParameters(CymbolParser.FormalParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFormalParameters(CymbolParser.FormalParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFormalParameter(CymbolParser.FormalParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFormalParameter(CymbolParser.FormalParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBlock(CymbolParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBlock(CymbolParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStat(CymbolParser.StatContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStat(CymbolParser.StatContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpr(CymbolParser.ExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpr(CymbolParser.ExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExprList(CymbolParser.ExprListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExprList(CymbolParser.ExprListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(ErrorNode node) { }
}
| 4,230
| 25.610063
| 90
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolLexer.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CymbolLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, ASSIGN=2, SEMICOLON=3, LPAREN=4, RPAREN=5, LBRACK=6, RBRACK=7,
LBRACE=8, RBRACE=9, COMMA=10, BANG=11, ADD=12, SUB=13, MUL=14, EQUAL=15,
FLOAT=16, INTEGER=17, VOID=18, IF=19, THEN=20, RETURN=21, ID=22, INT=23,
WS=24, SL_COMMENT=25;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"T__0", "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "LETTER", "INT", "WS",
"SL_COMMENT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'then'", "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"','", "'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'",
"'if'", "'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public CymbolLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Cymbol.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\33\u009a\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5"+
"\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16"+
"\3\16\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22"+
"\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\25\3\25\3\25\3\25"+
"\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\7\27{\n\27\f\27"+
"\16\27~\13\27\3\30\3\30\3\31\6\31\u0083\n\31\r\31\16\31\u0084\3\32\6\32"+
"\u0088\n\32\r\32\16\32\u0089\3\32\3\32\3\33\3\33\3\33\3\33\7\33\u0092"+
"\n\33\f\33\16\33\u0095\13\33\3\33\3\33\3\33\3\33\3\u0093\2\34\3\3\5\4"+
"\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22"+
"#\23%\24\'\25)\26+\27-\30/\2\61\31\63\32\65\33\3\2\5\3\2\62;\4\2C\\c|"+
"\5\2\13\f\17\17\"\"\2\u009d\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3"+
"\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2"+
"\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37"+
"\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3"+
"\2\2\2\2-\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\3\67\3\2\2\2"+
"\5<\3\2\2\2\7>\3\2\2\2\t@\3\2\2\2\13B\3\2\2\2\rD\3\2\2\2\17F\3\2\2\2\21"+
"H\3\2\2\2\23J\3\2\2\2\25L\3\2\2\2\27N\3\2\2\2\31P\3\2\2\2\33R\3\2\2\2"+
"\35T\3\2\2\2\37V\3\2\2\2!Y\3\2\2\2#_\3\2\2\2%c\3\2\2\2\'h\3\2\2\2)k\3"+
"\2\2\2+p\3\2\2\2-w\3\2\2\2/\177\3\2\2\2\61\u0082\3\2\2\2\63\u0087\3\2"+
"\2\2\65\u008d\3\2\2\2\678\7v\2\289\7j\2\29:\7g\2\2:;\7p\2\2;\4\3\2\2\2"+
"<=\7?\2\2=\6\3\2\2\2>?\7=\2\2?\b\3\2\2\2@A\7*\2\2A\n\3\2\2\2BC\7+\2\2"+
"C\f\3\2\2\2DE\7]\2\2E\16\3\2\2\2FG\7_\2\2G\20\3\2\2\2HI\7}\2\2I\22\3\2"+
"\2\2JK\7\177\2\2K\24\3\2\2\2LM\7.\2\2M\26\3\2\2\2NO\7#\2\2O\30\3\2\2\2"+
"PQ\7-\2\2Q\32\3\2\2\2RS\7/\2\2S\34\3\2\2\2TU\7,\2\2U\36\3\2\2\2VW\7?\2"+
"\2WX\7?\2\2X \3\2\2\2YZ\7h\2\2Z[\7n\2\2[\\\7q\2\2\\]\7c\2\2]^\7v\2\2^"+
"\"\3\2\2\2_`\7k\2\2`a\7p\2\2ab\7v\2\2b$\3\2\2\2cd\7x\2\2de\7q\2\2ef\7"+
"k\2\2fg\7f\2\2g&\3\2\2\2hi\7k\2\2ij\7h\2\2j(\3\2\2\2kl\7g\2\2lm\7n\2\2"+
"mn\7u\2\2no\7g\2\2o*\3\2\2\2pq\7t\2\2qr\7g\2\2rs\7v\2\2st\7w\2\2tu\7t"+
"\2\2uv\7p\2\2v,\3\2\2\2w|\5/\30\2x{\5/\30\2y{\t\2\2\2zx\3\2\2\2zy\3\2"+
"\2\2{~\3\2\2\2|z\3\2\2\2|}\3\2\2\2}.\3\2\2\2~|\3\2\2\2\177\u0080\t\3\2"+
"\2\u0080\60\3\2\2\2\u0081\u0083\t\2\2\2\u0082\u0081\3\2\2\2\u0083\u0084"+
"\3\2\2\2\u0084\u0082\3\2\2\2\u0084\u0085\3\2\2\2\u0085\62\3\2\2\2\u0086"+
"\u0088\t\4\2\2\u0087\u0086\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u0087\3\2"+
"\2\2\u0089\u008a\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\b\32\2\2\u008c"+
"\64\3\2\2\2\u008d\u008e\7\61\2\2\u008e\u008f\7\61\2\2\u008f\u0093\3\2"+
"\2\2\u0090\u0092\13\2\2\2\u0091\u0090\3\2\2\2\u0092\u0095\3\2\2\2\u0093"+
"\u0094\3\2\2\2\u0093\u0091\3\2\2\2\u0094\u0096\3\2\2\2\u0095\u0093\3\2"+
"\2\2\u0096\u0097\7\f\2\2\u0097\u0098\3\2\2\2\u0098\u0099\b\33\2\2\u0099"+
"\66\3\2\2\2\b\2z|\u0084\u0089\u0093\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 7,034
| 41.125749
| 97
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolListener.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.tree.ParseTreeListener;
/**
* This interface defines a complete listener for a parse tree produced by
* {@link CymbolParser}.
*/
public interface CymbolListener extends ParseTreeListener {
/**
* Enter a parse tree produced by {@link CymbolParser#file}.
* @param ctx the parse tree
*/
void enterFile(CymbolParser.FileContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#file}.
* @param ctx the parse tree
*/
void exitFile(CymbolParser.FileContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#varDecl}.
* @param ctx the parse tree
*/
void enterVarDecl(CymbolParser.VarDeclContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#varDecl}.
* @param ctx the parse tree
*/
void exitVarDecl(CymbolParser.VarDeclContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#type}.
* @param ctx the parse tree
*/
void enterType(CymbolParser.TypeContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#type}.
* @param ctx the parse tree
*/
void exitType(CymbolParser.TypeContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#functionDecl}.
* @param ctx the parse tree
*/
void enterFunctionDecl(CymbolParser.FunctionDeclContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#functionDecl}.
* @param ctx the parse tree
*/
void exitFunctionDecl(CymbolParser.FunctionDeclContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#formalParameters}.
* @param ctx the parse tree
*/
void enterFormalParameters(CymbolParser.FormalParametersContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#formalParameters}.
* @param ctx the parse tree
*/
void exitFormalParameters(CymbolParser.FormalParametersContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#formalParameter}.
* @param ctx the parse tree
*/
void enterFormalParameter(CymbolParser.FormalParameterContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#formalParameter}.
* @param ctx the parse tree
*/
void exitFormalParameter(CymbolParser.FormalParameterContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#block}.
* @param ctx the parse tree
*/
void enterBlock(CymbolParser.BlockContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#block}.
* @param ctx the parse tree
*/
void exitBlock(CymbolParser.BlockContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#stat}.
* @param ctx the parse tree
*/
void enterStat(CymbolParser.StatContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#stat}.
* @param ctx the parse tree
*/
void exitStat(CymbolParser.StatContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#expr}.
* @param ctx the parse tree
*/
void enterExpr(CymbolParser.ExprContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#expr}.
* @param ctx the parse tree
*/
void exitExpr(CymbolParser.ExprContext ctx);
/**
* Enter a parse tree produced by {@link CymbolParser#exprList}.
* @param ctx the parse tree
*/
void enterExprList(CymbolParser.ExprListContext ctx);
/**
* Exit a parse tree produced by {@link CymbolParser#exprList}.
* @param ctx the parse tree
*/
void exitExprList(CymbolParser.ExprListContext ctx);
}
| 3,502
| 30.845455
| 74
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolParser.java
|
// Generated from Cymbol.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CymbolParser extends Parser {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, ASSIGN=2, SEMICOLON=3, LPAREN=4, RPAREN=5, LBRACK=6, RBRACK=7,
LBRACE=8, RBRACE=9, COMMA=10, BANG=11, ADD=12, SUB=13, MUL=14, EQUAL=15,
FLOAT=16, INTEGER=17, VOID=18, IF=19, THEN=20, RETURN=21, ID=22, INT=23,
WS=24, SL_COMMENT=25;
public static final int
RULE_file = 0, RULE_varDecl = 1, RULE_type = 2, RULE_functionDecl = 3,
RULE_formalParameters = 4, RULE_formalParameter = 5, RULE_block = 6, RULE_stat = 7,
RULE_expr = 8, RULE_exprList = 9;
private static String[] makeRuleNames() {
return new String[] {
"file", "varDecl", "type", "functionDecl", "formalParameters", "formalParameter",
"block", "stat", "expr", "exprList"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'then'", "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"','", "'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'",
"'if'", "'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "Cymbol.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public CymbolParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class FileContext extends ParserRuleContext {
public List<FunctionDeclContext> functionDecl() {
return getRuleContexts(FunctionDeclContext.class);
}
public FunctionDeclContext functionDecl(int i) {
return getRuleContext(FunctionDeclContext.class,i);
}
public List<VarDeclContext> varDecl() {
return getRuleContexts(VarDeclContext.class);
}
public VarDeclContext varDecl(int i) {
return getRuleContext(VarDeclContext.class,i);
}
public FileContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_file; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFile(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFile(this);
}
}
public final FileContext file() throws RecognitionException {
FileContext _localctx = new FileContext(_ctx, getState());
enterRule(_localctx, 0, RULE_file);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(22);
_errHandler.sync(this);
_la = _input.LA(1);
do {
{
setState(22);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,0,_ctx) ) {
case 1:
{
setState(20);
functionDecl();
}
break;
case 2:
{
setState(21);
varDecl();
}
break;
}
}
setState(24);
_errHandler.sync(this);
_la = _input.LA(1);
} while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0) );
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class VarDeclContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode SEMICOLON() { return getToken(CymbolParser.SEMICOLON, 0); }
public TerminalNode ASSIGN() { return getToken(CymbolParser.ASSIGN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public VarDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_varDecl; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterVarDecl(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitVarDecl(this);
}
}
public final VarDeclContext varDecl() throws RecognitionException {
VarDeclContext _localctx = new VarDeclContext(_ctx, getState());
enterRule(_localctx, 2, RULE_varDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(26);
type();
setState(27);
match(ID);
setState(30);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==ASSIGN) {
{
setState(28);
match(ASSIGN);
setState(29);
expr(0);
}
}
setState(32);
match(SEMICOLON);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class TypeContext extends ParserRuleContext {
public TerminalNode FLOAT() { return getToken(CymbolParser.FLOAT, 0); }
public TerminalNode INTEGER() { return getToken(CymbolParser.INTEGER, 0); }
public TerminalNode VOID() { return getToken(CymbolParser.VOID, 0); }
public TypeContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_type; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterType(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitType(this);
}
}
public final TypeContext type() throws RecognitionException {
TypeContext _localctx = new TypeContext(_ctx, getState());
enterRule(_localctx, 4, RULE_type);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(34);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0)) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FunctionDeclContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode LPAREN() { return getToken(CymbolParser.LPAREN, 0); }
public TerminalNode RPAREN() { return getToken(CymbolParser.RPAREN, 0); }
public BlockContext block() {
return getRuleContext(BlockContext.class,0);
}
public FormalParametersContext formalParameters() {
return getRuleContext(FormalParametersContext.class,0);
}
public FunctionDeclContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_functionDecl; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFunctionDecl(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFunctionDecl(this);
}
}
public final FunctionDeclContext functionDecl() throws RecognitionException {
FunctionDeclContext _localctx = new FunctionDeclContext(_ctx, getState());
enterRule(_localctx, 6, RULE_functionDecl);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(36);
type();
setState(37);
match(ID);
setState(38);
match(LPAREN);
setState(40);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FLOAT) | (1L << INTEGER) | (1L << VOID))) != 0)) {
{
setState(39);
formalParameters();
}
}
setState(42);
match(RPAREN);
setState(43);
block();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FormalParametersContext extends ParserRuleContext {
public List<FormalParameterContext> formalParameter() {
return getRuleContexts(FormalParameterContext.class);
}
public FormalParameterContext formalParameter(int i) {
return getRuleContext(FormalParameterContext.class,i);
}
public List<TerminalNode> COMMA() { return getTokens(CymbolParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(CymbolParser.COMMA, i);
}
public FormalParametersContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_formalParameters; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFormalParameters(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFormalParameters(this);
}
}
public final FormalParametersContext formalParameters() throws RecognitionException {
FormalParametersContext _localctx = new FormalParametersContext(_ctx, getState());
enterRule(_localctx, 8, RULE_formalParameters);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(45);
formalParameter();
setState(50);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(46);
match(COMMA);
setState(47);
formalParameter();
}
}
setState(52);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FormalParameterContext extends ParserRuleContext {
public TypeContext type() {
return getRuleContext(TypeContext.class,0);
}
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public FormalParameterContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_formalParameter; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterFormalParameter(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitFormalParameter(this);
}
}
public final FormalParameterContext formalParameter() throws RecognitionException {
FormalParameterContext _localctx = new FormalParameterContext(_ctx, getState());
enterRule(_localctx, 10, RULE_formalParameter);
try {
enterOuterAlt(_localctx, 1);
{
setState(53);
type();
setState(54);
match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class BlockContext extends ParserRuleContext {
public TerminalNode LBRACE() { return getToken(CymbolParser.LBRACE, 0); }
public TerminalNode RBRACE() { return getToken(CymbolParser.RBRACE, 0); }
public List<StatContext> stat() {
return getRuleContexts(StatContext.class);
}
public StatContext stat(int i) {
return getRuleContext(StatContext.class,i);
}
public BlockContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_block; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterBlock(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitBlock(this);
}
}
public final BlockContext block() throws RecognitionException {
BlockContext _localctx = new BlockContext(_ctx, getState());
enterRule(_localctx, 12, RULE_block);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(56);
match(LBRACE);
setState(60);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << LBRACE) | (1L << BANG) | (1L << SUB) | (1L << FLOAT) | (1L << INTEGER) | (1L << VOID) | (1L << IF) | (1L << RETURN) | (1L << ID) | (1L << INT))) != 0)) {
{
{
setState(57);
stat();
}
}
setState(62);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(63);
match(RBRACE);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class StatContext extends ParserRuleContext {
public BlockContext block() {
return getRuleContext(BlockContext.class,0);
}
public VarDeclContext varDecl() {
return getRuleContext(VarDeclContext.class,0);
}
public TerminalNode IF() { return getToken(CymbolParser.IF, 0); }
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public List<StatContext> stat() {
return getRuleContexts(StatContext.class);
}
public StatContext stat(int i) {
return getRuleContext(StatContext.class,i);
}
public TerminalNode THEN() { return getToken(CymbolParser.THEN, 0); }
public TerminalNode RETURN() { return getToken(CymbolParser.RETURN, 0); }
public TerminalNode SEMICOLON() { return getToken(CymbolParser.SEMICOLON, 0); }
public TerminalNode ASSIGN() { return getToken(CymbolParser.ASSIGN, 0); }
public StatContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_stat; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterStat(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitStat(this);
}
}
public final StatContext stat() throws RecognitionException {
StatContext _localctx = new StatContext(_ctx, getState());
enterRule(_localctx, 14, RULE_stat);
int _la;
try {
setState(88);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(65);
block();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(66);
varDecl();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(67);
match(IF);
setState(68);
expr(0);
setState(69);
match(T__0);
setState(70);
stat();
setState(73);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) {
case 1:
{
setState(71);
match(THEN);
setState(72);
stat();
}
break;
}
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
setState(75);
match(RETURN);
setState(77);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << BANG) | (1L << SUB) | (1L << ID) | (1L << INT))) != 0)) {
{
setState(76);
expr(0);
}
}
setState(79);
match(SEMICOLON);
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
setState(80);
expr(0);
setState(81);
match(ASSIGN);
setState(82);
expr(0);
setState(83);
match(SEMICOLON);
}
break;
case 6:
enterOuterAlt(_localctx, 6);
{
setState(85);
expr(0);
setState(86);
match(SEMICOLON);
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExprContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(CymbolParser.ID, 0); }
public TerminalNode LPAREN() { return getToken(CymbolParser.LPAREN, 0); }
public TerminalNode RPAREN() { return getToken(CymbolParser.RPAREN, 0); }
public ExprListContext exprList() {
return getRuleContext(ExprListContext.class,0);
}
public TerminalNode LBRACK() { return getToken(CymbolParser.LBRACK, 0); }
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public TerminalNode RBRACK() { return getToken(CymbolParser.RBRACK, 0); }
public TerminalNode SUB() { return getToken(CymbolParser.SUB, 0); }
public TerminalNode BANG() { return getToken(CymbolParser.BANG, 0); }
public TerminalNode INT() { return getToken(CymbolParser.INT, 0); }
public TerminalNode MUL() { return getToken(CymbolParser.MUL, 0); }
public TerminalNode ADD() { return getToken(CymbolParser.ADD, 0); }
public TerminalNode EQUAL() { return getToken(CymbolParser.EQUAL, 0); }
public ExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expr; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterExpr(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitExpr(this);
}
}
public final ExprContext expr() throws RecognitionException {
return expr(0);
}
private ExprContext expr(int _p) throws RecognitionException {
ParserRuleContext _parentctx = _ctx;
int _parentState = getState();
ExprContext _localctx = new ExprContext(_ctx, _parentState);
ExprContext _prevctx = _localctx;
int _startState = 16;
enterRecursionRule(_localctx, 16, RULE_expr, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(112);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,10,_ctx) ) {
case 1:
{
setState(91);
match(ID);
setState(92);
match(LPAREN);
setState(94);
_errHandler.sync(this);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LPAREN) | (1L << BANG) | (1L << SUB) | (1L << ID) | (1L << INT))) != 0)) {
{
setState(93);
exprList();
}
}
setState(96);
match(RPAREN);
}
break;
case 2:
{
setState(97);
match(ID);
setState(98);
match(LBRACK);
setState(99);
expr(0);
setState(100);
match(RBRACK);
}
break;
case 3:
{
setState(102);
match(SUB);
setState(103);
expr(8);
}
break;
case 4:
{
setState(104);
match(BANG);
setState(105);
expr(7);
}
break;
case 5:
{
setState(106);
match(ID);
}
break;
case 6:
{
setState(107);
match(INT);
}
break;
case 7:
{
setState(108);
match(LPAREN);
setState(109);
expr(0);
setState(110);
match(RPAREN);
}
break;
}
_ctx.stop = _input.LT(-1);
setState(125);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
setState(123);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
case 1:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(114);
if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)");
setState(115);
match(MUL);
setState(116);
expr(7);
}
break;
case 2:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(117);
if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)");
setState(118);
_la = _input.LA(1);
if ( !(_la==ADD || _la==SUB) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(119);
expr(6);
}
break;
case 3:
{
_localctx = new ExprContext(_parentctx, _parentState);
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(120);
if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
setState(121);
match(EQUAL);
setState(122);
expr(5);
}
break;
}
}
}
setState(127);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
unrollRecursionContexts(_parentctx);
}
return _localctx;
}
public static class ExprListContext extends ParserRuleContext {
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public List<TerminalNode> COMMA() { return getTokens(CymbolParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(CymbolParser.COMMA, i);
}
public ExprListContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_exprList; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).enterExprList(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof CymbolListener ) ((CymbolListener)listener).exitExprList(this);
}
}
public final ExprListContext exprList() throws RecognitionException {
ExprListContext _localctx = new ExprListContext(_ctx, getState());
enterRule(_localctx, 18, RULE_exprList);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(128);
expr(0);
setState(133);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(129);
match(COMMA);
setState(130);
expr(0);
}
}
setState(135);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
case 8:
return expr_sempred((ExprContext)_localctx, predIndex);
}
return true;
}
private boolean expr_sempred(ExprContext _localctx, int predIndex) {
switch (predIndex) {
case 0:
return precpred(_ctx, 6);
case 1:
return precpred(_ctx, 5);
case 2:
return precpred(_ctx, 4);
}
return true;
}
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\33\u008b\4\2\t\2"+
"\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\3\2\3\2\6\2\31\n\2\r\2\16\2\32\3\3\3\3\3\3\3\3\5\3!\n\3\3\3\3\3"+
"\3\4\3\4\3\5\3\5\3\5\3\5\5\5+\n\5\3\5\3\5\3\5\3\6\3\6\3\6\7\6\63\n\6\f"+
"\6\16\6\66\13\6\3\7\3\7\3\7\3\b\3\b\7\b=\n\b\f\b\16\b@\13\b\3\b\3\b\3"+
"\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\tL\n\t\3\t\3\t\5\tP\n\t\3\t\3\t\3\t\3"+
"\t\3\t\3\t\3\t\3\t\3\t\5\t[\n\t\3\n\3\n\3\n\3\n\5\na\n\n\3\n\3\n\3\n\3"+
"\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\ns\n\n\3\n\3\n\3"+
"\n\3\n\3\n\3\n\3\n\3\n\3\n\7\n~\n\n\f\n\16\n\u0081\13\n\3\13\3\13\3\13"+
"\7\13\u0086\n\13\f\13\16\13\u0089\13\13\3\13\2\3\22\f\2\4\6\b\n\f\16\20"+
"\22\24\2\4\3\2\22\24\3\2\16\17\2\u0098\2\30\3\2\2\2\4\34\3\2\2\2\6$\3"+
"\2\2\2\b&\3\2\2\2\n/\3\2\2\2\f\67\3\2\2\2\16:\3\2\2\2\20Z\3\2\2\2\22r"+
"\3\2\2\2\24\u0082\3\2\2\2\26\31\5\b\5\2\27\31\5\4\3\2\30\26\3\2\2\2\30"+
"\27\3\2\2\2\31\32\3\2\2\2\32\30\3\2\2\2\32\33\3\2\2\2\33\3\3\2\2\2\34"+
"\35\5\6\4\2\35 \7\30\2\2\36\37\7\4\2\2\37!\5\22\n\2 \36\3\2\2\2 !\3\2"+
"\2\2!\"\3\2\2\2\"#\7\5\2\2#\5\3\2\2\2$%\t\2\2\2%\7\3\2\2\2&\'\5\6\4\2"+
"\'(\7\30\2\2(*\7\6\2\2)+\5\n\6\2*)\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\7\7\2"+
"\2-.\5\16\b\2.\t\3\2\2\2/\64\5\f\7\2\60\61\7\f\2\2\61\63\5\f\7\2\62\60"+
"\3\2\2\2\63\66\3\2\2\2\64\62\3\2\2\2\64\65\3\2\2\2\65\13\3\2\2\2\66\64"+
"\3\2\2\2\678\5\6\4\289\7\30\2\29\r\3\2\2\2:>\7\n\2\2;=\5\20\t\2<;\3\2"+
"\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2\2AB\7\13\2\2B\17"+
"\3\2\2\2C[\5\16\b\2D[\5\4\3\2EF\7\25\2\2FG\5\22\n\2GH\7\3\2\2HK\5\20\t"+
"\2IJ\7\26\2\2JL\5\20\t\2KI\3\2\2\2KL\3\2\2\2L[\3\2\2\2MO\7\27\2\2NP\5"+
"\22\n\2ON\3\2\2\2OP\3\2\2\2PQ\3\2\2\2Q[\7\5\2\2RS\5\22\n\2ST\7\4\2\2T"+
"U\5\22\n\2UV\7\5\2\2V[\3\2\2\2WX\5\22\n\2XY\7\5\2\2Y[\3\2\2\2ZC\3\2\2"+
"\2ZD\3\2\2\2ZE\3\2\2\2ZM\3\2\2\2ZR\3\2\2\2ZW\3\2\2\2[\21\3\2\2\2\\]\b"+
"\n\1\2]^\7\30\2\2^`\7\6\2\2_a\5\24\13\2`_\3\2\2\2`a\3\2\2\2ab\3\2\2\2"+
"bs\7\7\2\2cd\7\30\2\2de\7\b\2\2ef\5\22\n\2fg\7\t\2\2gs\3\2\2\2hi\7\17"+
"\2\2is\5\22\n\njk\7\r\2\2ks\5\22\n\tls\7\30\2\2ms\7\31\2\2no\7\6\2\2o"+
"p\5\22\n\2pq\7\7\2\2qs\3\2\2\2r\\\3\2\2\2rc\3\2\2\2rh\3\2\2\2rj\3\2\2"+
"\2rl\3\2\2\2rm\3\2\2\2rn\3\2\2\2s\177\3\2\2\2tu\f\b\2\2uv\7\20\2\2v~\5"+
"\22\n\twx\f\7\2\2xy\t\3\2\2y~\5\22\n\bz{\f\6\2\2{|\7\21\2\2|~\5\22\n\7"+
"}t\3\2\2\2}w\3\2\2\2}z\3\2\2\2~\u0081\3\2\2\2\177}\3\2\2\2\177\u0080\3"+
"\2\2\2\u0080\23\3\2\2\2\u0081\177\3\2\2\2\u0082\u0087\5\22\n\2\u0083\u0084"+
"\7\f\2\2\u0084\u0086\5\22\n\2\u0085\u0083\3\2\2\2\u0086\u0089\3\2\2\2"+
"\u0087\u0085\3\2\2\2\u0087\u0088\3\2\2\2\u0088\25\3\2\2\2\u0089\u0087"+
"\3\2\2\2\20\30\32 *\64>KOZ`r}\177\u0087";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 28,962
| 28.374239
| 228
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolReservedWordDecider.java
|
package com.ibm.ai4code.parser.cymbol_multi;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class CymbolReservedWordDecider implements ReservedWordDeciderI{
public static final String [] keywords = {
"float",
"int",
"void",
"if",
"then",
"else",
"return"
};
public static final String [] ops = {
"(",
")",
"[",
"]",
"-",
"!",
"*",
"+",
"-",
"=="
};
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public CymbolReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
}
| 991
| 16.714286
| 71
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/CymbolTokens.java
|
// Generated from CymbolTokens.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CymbolTokens extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
KEYWORDS=1, PUNCTUATORS=2, ASSIGN=3, SEMICOLON=4, LPAREN=5, RPAREN=6,
LBRACK=7, RBRACK=8, LBRACE=9, RBRACE=10, COMMA=11, BANG=12, ADD=13, SUB=14,
MUL=15, EQUAL=16, FLOAT=17, INTEGER=18, VOID=19, IF=20, THEN=21, RETURN=22,
ID=23, INT=24, WS=25, SL_COMMENT=26;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"KEYWORDS", "PUNCTUATORS", "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN",
"LBRACK", "RBRACK", "LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB",
"MUL", "EQUAL", "FLOAT", "INTEGER", "VOID", "IF", "THEN", "RETURN", "ID",
"LETTER", "INT", "WS", "SL_COMMENT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, null, null, "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'",
"','", "'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'",
"'if'", "'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, "KEYWORDS", "PUNCTUATORS", "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN",
"LBRACK", "RBRACK", "LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB",
"MUL", "EQUAL", "FLOAT", "INTEGER", "VOID", "IF", "THEN", "RETURN", "ID",
"INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public CymbolTokens(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "CymbolTokens.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\34\u00af\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\3\2\3\2\3\2\3\2\3\2\3\2\5\2@\n\2\3"+
"\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3P\n\3\3\4\3"+
"\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3"+
"\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20\3\21\3\21\3\21\3\22\3\22\3\22\3\22"+
"\3\22\3\22\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25"+
"\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30"+
"\3\30\7\30\u0090\n\30\f\30\16\30\u0093\13\30\3\31\3\31\3\32\6\32\u0098"+
"\n\32\r\32\16\32\u0099\3\33\6\33\u009d\n\33\r\33\16\33\u009e\3\33\3\33"+
"\3\34\3\34\3\34\3\34\7\34\u00a7\n\34\f\34\16\34\u00aa\13\34\3\34\3\34"+
"\3\34\3\34\3\u00a8\2\35\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f"+
"\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\2\63"+
"\32\65\33\67\34\3\2\5\3\2\62;\4\2C\\c|\5\2\13\f\17\17\"\"\2\u00c4\2\3"+
"\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2"+
"\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31"+
"\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2"+
"\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2"+
"\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\3?\3\2\2\2\5O\3\2\2\2\7Q\3\2\2\2"+
"\tS\3\2\2\2\13U\3\2\2\2\rW\3\2\2\2\17Y\3\2\2\2\21[\3\2\2\2\23]\3\2\2\2"+
"\25_\3\2\2\2\27a\3\2\2\2\31c\3\2\2\2\33e\3\2\2\2\35g\3\2\2\2\37i\3\2\2"+
"\2!k\3\2\2\2#n\3\2\2\2%t\3\2\2\2\'x\3\2\2\2)}\3\2\2\2+\u0080\3\2\2\2-"+
"\u0085\3\2\2\2/\u008c\3\2\2\2\61\u0094\3\2\2\2\63\u0097\3\2\2\2\65\u009c"+
"\3\2\2\2\67\u00a2\3\2\2\29@\5#\22\2:@\5%\23\2;@\5\'\24\2<@\5)\25\2=@\5"+
"+\26\2>@\5-\27\2?9\3\2\2\2?:\3\2\2\2?;\3\2\2\2?<\3\2\2\2?=\3\2\2\2?>\3"+
"\2\2\2@\4\3\2\2\2AP\5\7\4\2BP\5\t\5\2CP\5\13\6\2DP\5\r\7\2EP\5\17\b\2"+
"FP\5\21\t\2GP\5\23\n\2HP\5\25\13\2IP\5\27\f\2JP\5\31\r\2KP\5\33\16\2L"+
"P\5\35\17\2MP\5\37\20\2NP\5!\21\2OA\3\2\2\2OB\3\2\2\2OC\3\2\2\2OD\3\2"+
"\2\2OE\3\2\2\2OF\3\2\2\2OG\3\2\2\2OH\3\2\2\2OI\3\2\2\2OJ\3\2\2\2OK\3\2"+
"\2\2OL\3\2\2\2OM\3\2\2\2ON\3\2\2\2P\6\3\2\2\2QR\7?\2\2R\b\3\2\2\2ST\7"+
"=\2\2T\n\3\2\2\2UV\7*\2\2V\f\3\2\2\2WX\7+\2\2X\16\3\2\2\2YZ\7]\2\2Z\20"+
"\3\2\2\2[\\\7_\2\2\\\22\3\2\2\2]^\7}\2\2^\24\3\2\2\2_`\7\177\2\2`\26\3"+
"\2\2\2ab\7.\2\2b\30\3\2\2\2cd\7#\2\2d\32\3\2\2\2ef\7-\2\2f\34\3\2\2\2"+
"gh\7/\2\2h\36\3\2\2\2ij\7,\2\2j \3\2\2\2kl\7?\2\2lm\7?\2\2m\"\3\2\2\2"+
"no\7h\2\2op\7n\2\2pq\7q\2\2qr\7c\2\2rs\7v\2\2s$\3\2\2\2tu\7k\2\2uv\7p"+
"\2\2vw\7v\2\2w&\3\2\2\2xy\7x\2\2yz\7q\2\2z{\7k\2\2{|\7f\2\2|(\3\2\2\2"+
"}~\7k\2\2~\177\7h\2\2\177*\3\2\2\2\u0080\u0081\7g\2\2\u0081\u0082\7n\2"+
"\2\u0082\u0083\7u\2\2\u0083\u0084\7g\2\2\u0084,\3\2\2\2\u0085\u0086\7"+
"t\2\2\u0086\u0087\7g\2\2\u0087\u0088\7v\2\2\u0088\u0089\7w\2\2\u0089\u008a"+
"\7t\2\2\u008a\u008b\7p\2\2\u008b.\3\2\2\2\u008c\u0091\5\61\31\2\u008d"+
"\u0090\5\61\31\2\u008e\u0090\t\2\2\2\u008f\u008d\3\2\2\2\u008f\u008e\3"+
"\2\2\2\u0090\u0093\3\2\2\2\u0091\u008f\3\2\2\2\u0091\u0092\3\2\2\2\u0092"+
"\60\3\2\2\2\u0093\u0091\3\2\2\2\u0094\u0095\t\3\2\2\u0095\62\3\2\2\2\u0096"+
"\u0098\t\2\2\2\u0097\u0096\3\2\2\2\u0098\u0099\3\2\2\2\u0099\u0097\3\2"+
"\2\2\u0099\u009a\3\2\2\2\u009a\64\3\2\2\2\u009b\u009d\t\4\2\2\u009c\u009b"+
"\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u009c\3\2\2\2\u009e\u009f\3\2\2\2\u009f"+
"\u00a0\3\2\2\2\u00a0\u00a1\b\33\2\2\u00a1\66\3\2\2\2\u00a2\u00a3\7\61"+
"\2\2\u00a3\u00a4\7\61\2\2\u00a4\u00a8\3\2\2\2\u00a5\u00a7\13\2\2\2\u00a6"+
"\u00a5\3\2\2\2\u00a7\u00aa\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a8\u00a6\3\2"+
"\2\2\u00a9\u00ab\3\2\2\2\u00aa\u00a8\3\2\2\2\u00ab\u00ac\7\f\2\2\u00ac"+
"\u00ad\3\2\2\2\u00ad\u00ae\b\34\2\2\u00ae8\3\2\2\2\n\2?O\u008f\u0091\u0099"+
"\u009e\u00a8\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 7,940
| 43.363128
| 97
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/cymbol_multi/Cymbol_lexer_common.java
|
// Generated from Cymbol_lexer_common.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.cymbol_multi;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class Cymbol_lexer_common extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
ASSIGN=1, SEMICOLON=2, LPAREN=3, RPAREN=4, LBRACK=5, RBRACK=6, LBRACE=7,
RBRACE=8, COMMA=9, BANG=10, ADD=11, SUB=12, MUL=13, EQUAL=14, FLOAT=15,
INTEGER=16, VOID=17, IF=18, THEN=19, RETURN=20, ID=21, INT=22, WS=23,
SL_COMMENT=24;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "LBRACE",
"RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT", "INTEGER",
"VOID", "IF", "THEN", "RETURN", "ID", "LETTER", "INT", "WS", "SL_COMMENT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'='", "';'", "'('", "')'", "'['", "']'", "'{'", "'}'", "','",
"'!'", "'+'", "'-'", "'*'", "'=='", "'float'", "'int'", "'void'", "'if'",
"'else'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, "ASSIGN", "SEMICOLON", "LPAREN", "RPAREN", "LBRACK", "RBRACK",
"LBRACE", "RBRACE", "COMMA", "BANG", "ADD", "SUB", "MUL", "EQUAL", "FLOAT",
"INTEGER", "VOID", "IF", "THEN", "RETURN", "ID", "INT", "WS", "SL_COMMENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public Cymbol_lexer_common(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Cymbol_lexer_common.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\32\u0093\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3"+
"\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\17"+
"\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22"+
"\3\22\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25"+
"\3\25\3\25\3\26\3\26\3\26\7\26t\n\26\f\26\16\26w\13\26\3\27\3\27\3\30"+
"\6\30|\n\30\r\30\16\30}\3\31\6\31\u0081\n\31\r\31\16\31\u0082\3\31\3\31"+
"\3\32\3\32\3\32\3\32\7\32\u008b\n\32\f\32\16\32\u008e\13\32\3\32\3\32"+
"\3\32\3\32\3\u008c\2\33\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f"+
"\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\2/\30\61\31\63"+
"\32\3\2\5\3\2\62;\4\2C\\c|\5\2\13\f\17\17\"\"\2\u0096\2\3\3\2\2\2\2\5"+
"\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2"+
"\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33"+
"\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2"+
"\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2"+
"\3\65\3\2\2\2\5\67\3\2\2\2\79\3\2\2\2\t;\3\2\2\2\13=\3\2\2\2\r?\3\2\2"+
"\2\17A\3\2\2\2\21C\3\2\2\2\23E\3\2\2\2\25G\3\2\2\2\27I\3\2\2\2\31K\3\2"+
"\2\2\33M\3\2\2\2\35O\3\2\2\2\37R\3\2\2\2!X\3\2\2\2#\\\3\2\2\2%a\3\2\2"+
"\2\'d\3\2\2\2)i\3\2\2\2+p\3\2\2\2-x\3\2\2\2/{\3\2\2\2\61\u0080\3\2\2\2"+
"\63\u0086\3\2\2\2\65\66\7?\2\2\66\4\3\2\2\2\678\7=\2\28\6\3\2\2\29:\7"+
"*\2\2:\b\3\2\2\2;<\7+\2\2<\n\3\2\2\2=>\7]\2\2>\f\3\2\2\2?@\7_\2\2@\16"+
"\3\2\2\2AB\7}\2\2B\20\3\2\2\2CD\7\177\2\2D\22\3\2\2\2EF\7.\2\2F\24\3\2"+
"\2\2GH\7#\2\2H\26\3\2\2\2IJ\7-\2\2J\30\3\2\2\2KL\7/\2\2L\32\3\2\2\2MN"+
"\7,\2\2N\34\3\2\2\2OP\7?\2\2PQ\7?\2\2Q\36\3\2\2\2RS\7h\2\2ST\7n\2\2TU"+
"\7q\2\2UV\7c\2\2VW\7v\2\2W \3\2\2\2XY\7k\2\2YZ\7p\2\2Z[\7v\2\2[\"\3\2"+
"\2\2\\]\7x\2\2]^\7q\2\2^_\7k\2\2_`\7f\2\2`$\3\2\2\2ab\7k\2\2bc\7h\2\2"+
"c&\3\2\2\2de\7g\2\2ef\7n\2\2fg\7u\2\2gh\7g\2\2h(\3\2\2\2ij\7t\2\2jk\7"+
"g\2\2kl\7v\2\2lm\7w\2\2mn\7t\2\2no\7p\2\2o*\3\2\2\2pu\5-\27\2qt\5-\27"+
"\2rt\t\2\2\2sq\3\2\2\2sr\3\2\2\2tw\3\2\2\2us\3\2\2\2uv\3\2\2\2v,\3\2\2"+
"\2wu\3\2\2\2xy\t\3\2\2y.\3\2\2\2z|\t\2\2\2{z\3\2\2\2|}\3\2\2\2}{\3\2\2"+
"\2}~\3\2\2\2~\60\3\2\2\2\177\u0081\t\4\2\2\u0080\177\3\2\2\2\u0081\u0082"+
"\3\2\2\2\u0082\u0080\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0084\3\2\2\2\u0084"+
"\u0085\b\31\2\2\u0085\62\3\2\2\2\u0086\u0087\7\61\2\2\u0087\u0088\7\61"+
"\2\2\u0088\u008c\3\2\2\2\u0089\u008b\13\2\2\2\u008a\u0089\3\2\2\2\u008b"+
"\u008e\3\2\2\2\u008c\u008d\3\2\2\2\u008c\u008a\3\2\2\2\u008d\u008f\3\2"+
"\2\2\u008e\u008c\3\2\2\2\u008f\u0090\7\f\2\2\u0090\u0091\3\2\2\2\u0091"+
"\u0092\b\32\2\2\u0092\64\3\2\2\2\b\2su}\u0082\u008c\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| 6,823
| 40.865031
| 97
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/java/JavaReservedWordDecider.java
|
package com.ibm.ai4code.parser.java;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class JavaReservedWordDecider implements ReservedWordDeciderI{
//cat *kws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords= {
"abstract",
"assert",
"boolean",
"break",
"byte",
"case",
"catch",
"char",
"class",
"const",
"continue",
"default",
"do",
"double",
"else",
"enum",
"extends",
"final",
"finally",
"float",
"for",
"if",
"goto",
"implements",
"import",
"instanceof",
"int",
"interface",
"long",
"native",
"new",
"package",
"private",
"protected",
"public",
"return",
"short",
"static",
"strictfp",
"super",
"switch",
"synchronized",
"this",
"throw",
"throws",
"transient",
"try",
"void",
"volatile",
"while"
};
//cat *ops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
"(",
")",
"{",
"}",
"[",
"]",
";", // weiz 2020-1029
",",
".",
"=",
">",
"<",
"!",
"~",
"?",
":", // weiz 2020-10-29
"==",
"<=",
">=",
"!=",
"&&",
"||",
"++",
"--",
"+",
"-",
"*",
"/",
"&",
"|",
"^",
"%",
"+=",
"-=",
"*=",
"/=",
"&=",
"|=",
"^=",
"%=",
"<<=",
">>=",
">>>=",
"->",
"::" // weiz 2020-10-29
};
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public JavaReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
}
| 2,998
| 21.380597
| 80
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/java_multi/JavaReservedWordDecider.java
|
package com.ibm.ai4code.parser.java_multi;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class JavaReservedWordDecider implements ReservedWordDeciderI{
//cat *kws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords= {
"abstract",
"assert",
"boolean",
"break",
"byte",
"case",
"catch",
"char",
"class",
"const",
"continue",
"default",
"do",
"double",
"else",
"enum",
"extends",
"final",
"finally",
"float",
"for",
"if",
"goto",
"implements",
"import",
"instanceof",
"int",
"interface",
"long",
"native",
"new",
"package",
"private",
"protected",
"public",
"return",
"short",
"static",
"strictfp",
"super",
"switch",
"synchronized",
"this",
"throw",
"throws",
"transient",
"try",
"void",
"volatile",
"while"
};
//cat *ops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
"(",
")",
"{",
"}",
"[",
"]",
";", // weiz 2020-1029
",",
".",
"=",
">",
"<",
"!",
"~",
"?",
":", // weiz 2020-10-29
"==",
"<=",
">=",
"!=",
"&&",
"||",
"++",
"--",
"+",
"-",
"*",
"/",
"&",
"|",
"^",
"%",
"+=",
"-=",
"*=",
"/=",
"&=",
"|=",
"^=",
"%=",
"<<=",
">>=",
">>>=",
"->",
"::" // weiz 2020-10-29
};
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public JavaReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
}
| 3,004
| 21.425373
| 80
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python/PythonLexerBase.java
|
package com.ibm.ai4code.parser.python;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import java.util.Stack;
public abstract class PythonLexerBase extends Lexer {
public static int TabSize = 8;
// The amount of opened braces, brackets and parenthesis.
private int _opened;
// The stack that keeps track of the indentation level.
private Stack<Integer> _indents = new Stack<>();
// A circular buffer where extra tokens are pushed on (see the NEWLINE and WS lexer rules).
private int _firstTokensInd;
private int _lastTokenInd;
private Token[] _buffer = new Token[32];
private Token _lastToken;
protected PythonLexerBase(CharStream input) {
super(input);
}
@Override
public void emit(Token token) {
super.setToken(token);
if (_buffer[_firstTokensInd] != null)
{
_lastTokenInd = IncTokenInd(_lastTokenInd);
if (_lastTokenInd == _firstTokensInd)
{
// Enlarge buffer
Token[] newArray = new Token[_buffer.length * 2];
int destInd = newArray.length - (_buffer.length - _firstTokensInd);
System.arraycopy(_buffer, 0, newArray, 0, _firstTokensInd);
System.arraycopy(_buffer, _firstTokensInd, newArray, destInd, _buffer.length - _firstTokensInd);
_firstTokensInd = destInd;
_buffer = newArray;
}
}
_buffer[_lastTokenInd] = token;
_lastToken = token;
}
@Override
public Token nextToken() {
// Check if the end-of-file is ahead and there are still some DEDENTS expected.
if (_input.LA(1) == EOF && _indents.size() > 0)
{
if (_buffer[_lastTokenInd] == null || _buffer[_lastTokenInd].getType() != PythonLexer.LINE_BREAK)
{
// First emit an extra line break that serves as the end of the statement.
emit(PythonLexer.LINE_BREAK);
}
// Now emit as much DEDENT tokens as needed.
while (_indents.size() != 0)
{
emit(PythonLexer.DEDENT);
_indents.pop();
}
}
Token next = super.nextToken();
if (_buffer[_firstTokensInd] == null)
{
return next;
}
Token result = _buffer[_firstTokensInd];
_buffer[_firstTokensInd] = null;
if (_firstTokensInd != _lastTokenInd)
{
_firstTokensInd = IncTokenInd(_firstTokensInd);
}
return result;
}
protected void HandleNewLine() {
emit(PythonLexer.NEWLINE, HIDDEN, getText());
char next = (char) _input.LA(1);
// Process whitespaces in HandleSpaces
if (next != ' ' && next != '\t' && IsNotNewLineOrComment(next))
{
ProcessNewLine(0);
}
}
protected void HandleSpaces() {
char next = (char) _input.LA(1);
if ((_lastToken == null || _lastToken.getType() == PythonLexer.NEWLINE) && IsNotNewLineOrComment(next))
{
// Calculates the indentation of the provided spaces, taking the
// following rules into account:
//
// "Tabs are replaced (from left to right) by one to eight spaces
// such that the total number of characters up to and including
// the replacement is a multiple of eight [...]"
//
// -- https://docs.python.org/3.1/reference/lexical_analysis.html#indentation
int indent = 0;
String text = getText();
for (int i = 0; i < text.length(); i++) {
indent += text.charAt(i) == '\t' ? TabSize - indent % TabSize : 1;
}
ProcessNewLine(indent);
}
emit(PythonLexer.WS, HIDDEN, getText());
}
protected void IncIndentLevel() {
_opened++;
}
protected void DecIndentLevel() {
if (_opened > 0) {
--_opened;
}
}
private boolean IsNotNewLineOrComment(char next) {
return _opened == 0 && next != '\r' && next != '\n' && next != '\f' && next != '#';
}
private void ProcessNewLine(int indent) {
emit(PythonLexer.LINE_BREAK);
int previous = _indents.size() == 0 ? 0 : _indents.peek();
if (indent > previous)
{
_indents.push(indent);
emit(PythonLexer.INDENT);
}
else
{
// Possibly emit more than 1 DEDENT token.
while (_indents.size() != 0 && _indents.peek() > indent)
{
emit(PythonLexer.DEDENT);
_indents.pop();
}
}
}
private int IncTokenInd(int ind) {
return (ind + 1) % _buffer.length;
}
private void emit(int tokenType) {
emit(tokenType, DEFAULT_TOKEN_CHANNEL, "");
}
private void emit(int tokenType, int channel, String text) {
int charIndex = getCharIndex();
CommonToken token = new CommonToken(_tokenFactorySourcePair, tokenType, channel, charIndex - text.length(), charIndex);
token.setLine(getLine());
token.setCharPositionInLine(getCharPositionInLine());
token.setText(text);
emit(token);
}
}
| 5,439
| 28.405405
| 127
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python/PythonParserBase.java
|
package com.ibm.ai4code.parser.python;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.TokenStream;
public abstract class PythonParserBase extends Parser
{
public PythonVersion Version = PythonVersion.Autodetect;
protected PythonParserBase(TokenStream input) {
super(input);
}
protected boolean CheckVersion(int version) {
return Version == PythonVersion.Autodetect || version == Version.getValue();
}
protected void SetVersion(int requiredVersion) {
if (requiredVersion == 2) {
Version = PythonVersion.Python2;
} else if (requiredVersion == 3) {
Version = PythonVersion.Python3;
}
}
}
| 700
| 24.962963
| 84
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python/PythonParserListener.java
|
// Generated from PythonParser.g4 by ANTLR 4.8
package com.ibm.ai4code.parser.python;
import org.antlr.v4.runtime.tree.ParseTreeListener;
/**
* This interface defines a complete listener for a parse tree produced by
* {@link PythonParser}.
*/
public interface PythonParserListener extends ParseTreeListener {
/**
* Enter a parse tree produced by {@link PythonParser#root}.
* @param ctx the parse tree
*/
void enterRoot(PythonParser.RootContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#root}.
* @param ctx the parse tree
*/
void exitRoot(PythonParser.RootContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#single_input}.
* @param ctx the parse tree
*/
void enterSingle_input(PythonParser.Single_inputContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#single_input}.
* @param ctx the parse tree
*/
void exitSingle_input(PythonParser.Single_inputContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#file_input}.
* @param ctx the parse tree
*/
void enterFile_input(PythonParser.File_inputContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#file_input}.
* @param ctx the parse tree
*/
void exitFile_input(PythonParser.File_inputContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#eval_input}.
* @param ctx the parse tree
*/
void enterEval_input(PythonParser.Eval_inputContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#eval_input}.
* @param ctx the parse tree
*/
void exitEval_input(PythonParser.Eval_inputContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#stmt}.
* @param ctx the parse tree
*/
void enterStmt(PythonParser.StmtContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#stmt}.
* @param ctx the parse tree
*/
void exitStmt(PythonParser.StmtContext ctx);
/**
* Enter a parse tree produced by the {@code if_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterIf_stmt(PythonParser.If_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code if_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitIf_stmt(PythonParser.If_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code while_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterWhile_stmt(PythonParser.While_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code while_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitWhile_stmt(PythonParser.While_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code for_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterFor_stmt(PythonParser.For_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code for_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitFor_stmt(PythonParser.For_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code try_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterTry_stmt(PythonParser.Try_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code try_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitTry_stmt(PythonParser.Try_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code with_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterWith_stmt(PythonParser.With_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code with_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitWith_stmt(PythonParser.With_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code class_or_func_def_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void enterClass_or_func_def_stmt(PythonParser.Class_or_func_def_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code class_or_func_def_stmt}
* labeled alternative in {@link PythonParser#compound_stmt}.
* @param ctx the parse tree
*/
void exitClass_or_func_def_stmt(PythonParser.Class_or_func_def_stmtContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#suite}.
* @param ctx the parse tree
*/
void enterSuite(PythonParser.SuiteContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#suite}.
* @param ctx the parse tree
*/
void exitSuite(PythonParser.SuiteContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#decorator}.
* @param ctx the parse tree
*/
void enterDecorator(PythonParser.DecoratorContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#decorator}.
* @param ctx the parse tree
*/
void exitDecorator(PythonParser.DecoratorContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#elif_clause}.
* @param ctx the parse tree
*/
void enterElif_clause(PythonParser.Elif_clauseContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#elif_clause}.
* @param ctx the parse tree
*/
void exitElif_clause(PythonParser.Elif_clauseContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#else_clause}.
* @param ctx the parse tree
*/
void enterElse_clause(PythonParser.Else_clauseContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#else_clause}.
* @param ctx the parse tree
*/
void exitElse_clause(PythonParser.Else_clauseContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#finally_clause}.
* @param ctx the parse tree
*/
void enterFinally_clause(PythonParser.Finally_clauseContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#finally_clause}.
* @param ctx the parse tree
*/
void exitFinally_clause(PythonParser.Finally_clauseContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#with_item}.
* @param ctx the parse tree
*/
void enterWith_item(PythonParser.With_itemContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#with_item}.
* @param ctx the parse tree
*/
void exitWith_item(PythonParser.With_itemContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#except_clause}.
* @param ctx the parse tree
*/
void enterExcept_clause(PythonParser.Except_clauseContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#except_clause}.
* @param ctx the parse tree
*/
void exitExcept_clause(PythonParser.Except_clauseContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#classdef}.
* @param ctx the parse tree
*/
void enterClassdef(PythonParser.ClassdefContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#classdef}.
* @param ctx the parse tree
*/
void exitClassdef(PythonParser.ClassdefContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#funcdef}.
* @param ctx the parse tree
*/
void enterFuncdef(PythonParser.FuncdefContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#funcdef}.
* @param ctx the parse tree
*/
void exitFuncdef(PythonParser.FuncdefContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#typedargslist}.
* @param ctx the parse tree
*/
void enterTypedargslist(PythonParser.TypedargslistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#typedargslist}.
* @param ctx the parse tree
*/
void exitTypedargslist(PythonParser.TypedargslistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#args}.
* @param ctx the parse tree
*/
void enterArgs(PythonParser.ArgsContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#args}.
* @param ctx the parse tree
*/
void exitArgs(PythonParser.ArgsContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#kwargs}.
* @param ctx the parse tree
*/
void enterKwargs(PythonParser.KwargsContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#kwargs}.
* @param ctx the parse tree
*/
void exitKwargs(PythonParser.KwargsContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#def_parameters}.
* @param ctx the parse tree
*/
void enterDef_parameters(PythonParser.Def_parametersContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#def_parameters}.
* @param ctx the parse tree
*/
void exitDef_parameters(PythonParser.Def_parametersContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#def_parameter}.
* @param ctx the parse tree
*/
void enterDef_parameter(PythonParser.Def_parameterContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#def_parameter}.
* @param ctx the parse tree
*/
void exitDef_parameter(PythonParser.Def_parameterContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#named_parameter}.
* @param ctx the parse tree
*/
void enterNamed_parameter(PythonParser.Named_parameterContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#named_parameter}.
* @param ctx the parse tree
*/
void exitNamed_parameter(PythonParser.Named_parameterContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#simple_stmt}.
* @param ctx the parse tree
*/
void enterSimple_stmt(PythonParser.Simple_stmtContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#simple_stmt}.
* @param ctx the parse tree
*/
void exitSimple_stmt(PythonParser.Simple_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code expr_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterExpr_stmt(PythonParser.Expr_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code expr_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitExpr_stmt(PythonParser.Expr_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code print_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterPrint_stmt(PythonParser.Print_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code print_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitPrint_stmt(PythonParser.Print_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code del_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterDel_stmt(PythonParser.Del_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code del_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitDel_stmt(PythonParser.Del_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code pass_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterPass_stmt(PythonParser.Pass_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code pass_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitPass_stmt(PythonParser.Pass_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code break_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterBreak_stmt(PythonParser.Break_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code break_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitBreak_stmt(PythonParser.Break_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code continue_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterContinue_stmt(PythonParser.Continue_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code continue_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitContinue_stmt(PythonParser.Continue_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code return_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterReturn_stmt(PythonParser.Return_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code return_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitReturn_stmt(PythonParser.Return_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code raise_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterRaise_stmt(PythonParser.Raise_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code raise_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitRaise_stmt(PythonParser.Raise_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code yield_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterYield_stmt(PythonParser.Yield_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code yield_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitYield_stmt(PythonParser.Yield_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code import_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterImport_stmt(PythonParser.Import_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code import_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitImport_stmt(PythonParser.Import_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code from_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterFrom_stmt(PythonParser.From_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code from_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitFrom_stmt(PythonParser.From_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code global_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterGlobal_stmt(PythonParser.Global_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code global_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitGlobal_stmt(PythonParser.Global_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code exec_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterExec_stmt(PythonParser.Exec_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code exec_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitExec_stmt(PythonParser.Exec_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code assert_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterAssert_stmt(PythonParser.Assert_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code assert_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitAssert_stmt(PythonParser.Assert_stmtContext ctx);
/**
* Enter a parse tree produced by the {@code nonlocal_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void enterNonlocal_stmt(PythonParser.Nonlocal_stmtContext ctx);
/**
* Exit a parse tree produced by the {@code nonlocal_stmt}
* labeled alternative in {@link PythonParser#small_stmt}.
* @param ctx the parse tree
*/
void exitNonlocal_stmt(PythonParser.Nonlocal_stmtContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#testlist_star_expr}.
* @param ctx the parse tree
*/
void enterTestlist_star_expr(PythonParser.Testlist_star_exprContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#testlist_star_expr}.
* @param ctx the parse tree
*/
void exitTestlist_star_expr(PythonParser.Testlist_star_exprContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#star_expr}.
* @param ctx the parse tree
*/
void enterStar_expr(PythonParser.Star_exprContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#star_expr}.
* @param ctx the parse tree
*/
void exitStar_expr(PythonParser.Star_exprContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#assign_part}.
* @param ctx the parse tree
*/
void enterAssign_part(PythonParser.Assign_partContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#assign_part}.
* @param ctx the parse tree
*/
void exitAssign_part(PythonParser.Assign_partContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#exprlist}.
* @param ctx the parse tree
*/
void enterExprlist(PythonParser.ExprlistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#exprlist}.
* @param ctx the parse tree
*/
void exitExprlist(PythonParser.ExprlistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#import_as_names}.
* @param ctx the parse tree
*/
void enterImport_as_names(PythonParser.Import_as_namesContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#import_as_names}.
* @param ctx the parse tree
*/
void exitImport_as_names(PythonParser.Import_as_namesContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#import_as_name}.
* @param ctx the parse tree
*/
void enterImport_as_name(PythonParser.Import_as_nameContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#import_as_name}.
* @param ctx the parse tree
*/
void exitImport_as_name(PythonParser.Import_as_nameContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#dotted_as_names}.
* @param ctx the parse tree
*/
void enterDotted_as_names(PythonParser.Dotted_as_namesContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#dotted_as_names}.
* @param ctx the parse tree
*/
void exitDotted_as_names(PythonParser.Dotted_as_namesContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#dotted_as_name}.
* @param ctx the parse tree
*/
void enterDotted_as_name(PythonParser.Dotted_as_nameContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#dotted_as_name}.
* @param ctx the parse tree
*/
void exitDotted_as_name(PythonParser.Dotted_as_nameContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#test}.
* @param ctx the parse tree
*/
void enterTest(PythonParser.TestContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#test}.
* @param ctx the parse tree
*/
void exitTest(PythonParser.TestContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#varargslist}.
* @param ctx the parse tree
*/
void enterVarargslist(PythonParser.VarargslistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#varargslist}.
* @param ctx the parse tree
*/
void exitVarargslist(PythonParser.VarargslistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#vardef_parameters}.
* @param ctx the parse tree
*/
void enterVardef_parameters(PythonParser.Vardef_parametersContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#vardef_parameters}.
* @param ctx the parse tree
*/
void exitVardef_parameters(PythonParser.Vardef_parametersContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#vardef_parameter}.
* @param ctx the parse tree
*/
void enterVardef_parameter(PythonParser.Vardef_parameterContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#vardef_parameter}.
* @param ctx the parse tree
*/
void exitVardef_parameter(PythonParser.Vardef_parameterContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#varargs}.
* @param ctx the parse tree
*/
void enterVarargs(PythonParser.VarargsContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#varargs}.
* @param ctx the parse tree
*/
void exitVarargs(PythonParser.VarargsContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#varkwargs}.
* @param ctx the parse tree
*/
void enterVarkwargs(PythonParser.VarkwargsContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#varkwargs}.
* @param ctx the parse tree
*/
void exitVarkwargs(PythonParser.VarkwargsContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#logical_test}.
* @param ctx the parse tree
*/
void enterLogical_test(PythonParser.Logical_testContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#logical_test}.
* @param ctx the parse tree
*/
void exitLogical_test(PythonParser.Logical_testContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#comparison}.
* @param ctx the parse tree
*/
void enterComparison(PythonParser.ComparisonContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#comparison}.
* @param ctx the parse tree
*/
void exitComparison(PythonParser.ComparisonContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#expr}.
* @param ctx the parse tree
*/
void enterExpr(PythonParser.ExprContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#expr}.
* @param ctx the parse tree
*/
void exitExpr(PythonParser.ExprContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#atom}.
* @param ctx the parse tree
*/
void enterAtom(PythonParser.AtomContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#atom}.
* @param ctx the parse tree
*/
void exitAtom(PythonParser.AtomContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#dictorsetmaker}.
* @param ctx the parse tree
*/
void enterDictorsetmaker(PythonParser.DictorsetmakerContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#dictorsetmaker}.
* @param ctx the parse tree
*/
void exitDictorsetmaker(PythonParser.DictorsetmakerContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#testlist_comp}.
* @param ctx the parse tree
*/
void enterTestlist_comp(PythonParser.Testlist_compContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#testlist_comp}.
* @param ctx the parse tree
*/
void exitTestlist_comp(PythonParser.Testlist_compContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#testlist}.
* @param ctx the parse tree
*/
void enterTestlist(PythonParser.TestlistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#testlist}.
* @param ctx the parse tree
*/
void exitTestlist(PythonParser.TestlistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#dotted_name}.
* @param ctx the parse tree
*/
void enterDotted_name(PythonParser.Dotted_nameContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#dotted_name}.
* @param ctx the parse tree
*/
void exitDotted_name(PythonParser.Dotted_nameContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#name}.
* @param ctx the parse tree
*/
void enterName(PythonParser.NameContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#name}.
* @param ctx the parse tree
*/
void exitName(PythonParser.NameContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#number}.
* @param ctx the parse tree
*/
void enterNumber(PythonParser.NumberContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#number}.
* @param ctx the parse tree
*/
void exitNumber(PythonParser.NumberContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#integer}.
* @param ctx the parse tree
*/
void enterInteger(PythonParser.IntegerContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#integer}.
* @param ctx the parse tree
*/
void exitInteger(PythonParser.IntegerContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#yield_expr}.
* @param ctx the parse tree
*/
void enterYield_expr(PythonParser.Yield_exprContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#yield_expr}.
* @param ctx the parse tree
*/
void exitYield_expr(PythonParser.Yield_exprContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#yield_arg}.
* @param ctx the parse tree
*/
void enterYield_arg(PythonParser.Yield_argContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#yield_arg}.
* @param ctx the parse tree
*/
void exitYield_arg(PythonParser.Yield_argContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#trailer}.
* @param ctx the parse tree
*/
void enterTrailer(PythonParser.TrailerContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#trailer}.
* @param ctx the parse tree
*/
void exitTrailer(PythonParser.TrailerContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#arguments}.
* @param ctx the parse tree
*/
void enterArguments(PythonParser.ArgumentsContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#arguments}.
* @param ctx the parse tree
*/
void exitArguments(PythonParser.ArgumentsContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#arglist}.
* @param ctx the parse tree
*/
void enterArglist(PythonParser.ArglistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#arglist}.
* @param ctx the parse tree
*/
void exitArglist(PythonParser.ArglistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#argument}.
* @param ctx the parse tree
*/
void enterArgument(PythonParser.ArgumentContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#argument}.
* @param ctx the parse tree
*/
void exitArgument(PythonParser.ArgumentContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#subscriptlist}.
* @param ctx the parse tree
*/
void enterSubscriptlist(PythonParser.SubscriptlistContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#subscriptlist}.
* @param ctx the parse tree
*/
void exitSubscriptlist(PythonParser.SubscriptlistContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#subscript}.
* @param ctx the parse tree
*/
void enterSubscript(PythonParser.SubscriptContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#subscript}.
* @param ctx the parse tree
*/
void exitSubscript(PythonParser.SubscriptContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#sliceop}.
* @param ctx the parse tree
*/
void enterSliceop(PythonParser.SliceopContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#sliceop}.
* @param ctx the parse tree
*/
void exitSliceop(PythonParser.SliceopContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#comp_for}.
* @param ctx the parse tree
*/
void enterComp_for(PythonParser.Comp_forContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#comp_for}.
* @param ctx the parse tree
*/
void exitComp_for(PythonParser.Comp_forContext ctx);
/**
* Enter a parse tree produced by {@link PythonParser#comp_iter}.
* @param ctx the parse tree
*/
void enterComp_iter(PythonParser.Comp_iterContext ctx);
/**
* Exit a parse tree produced by {@link PythonParser#comp_iter}.
* @param ctx the parse tree
*/
void exitComp_iter(PythonParser.Comp_iterContext ctx);
}
| 28,334
| 33.05649
| 82
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python/PythonReservedWordDecider.java
|
package com.ibm.ai4code.parser.python;
import java.util.HashSet;
import com.ibm.ai4code.parser.commons.ReservedWordDeciderI;
public class PythonReservedWordDecider implements ReservedWordDeciderI{
// cat *kws_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] keywords = {
"def",
"return",
"raise",
"from",
"import",
"nonlocal",
"as",
"global",
"assert",
"if",
"elif",
"else",
"while",
"for",
"in",
"try",
"None",
"finally",
"with",
"except",
"lambda",
"or",
"and",
"not",
"is",
"class",
"yield",
"del",
"pass",
"continue",
"break",
"async",
"await",
"print",
"exec",
"True",
"False"
};
// cat *ops_orig.txt | grep ":" | cut -d':' -f2 | sed "s/'/\"/g" | sed "s/;/,/g"
public static final String [] ops = {
".",
"...",
"`",
"*",
",",
":", // weiz 2020-10-29
";", // weiz 2020-10-29
"**",
"=",
"|",
"^",
"&",
"<<",
">>",
"+",
"-",
"/",
"%",
"//",
"~",
"<",
">",
"==",
">=",
"<=",
"<>",
"!=",
"@",
"->",
"+=",
"-=",
"*=",
"@=",
"/=",
"%=",
"&=",
"|=",
"^=",
"<<=",
">>=",
"**=",
"//=",
"(", // weiz 2020-10-29 {IncIndentLevel(),},
")", // weiz 2020-10-29 {DecIndentLevel(),},
"{", // weiz 2020-10-29 {IncIndentLevel(),},
"}", // weiz 2020-10-29 {DecIndentLevel(),},
"[", // weiz 2020-10-29 {IncIndentLevel(),},
"]" // weiz 2020-10-29 {DecIndentLevel(),},
};
HashSet<String> keywordsHashSet=new HashSet<String>();
HashSet<String> opsHashSet = new HashSet<String>();
public void buildKeyWordsHashSet() {
for(String keyword: keywords) {
keywordsHashSet.add(keyword);
}
}
public void buildOPsHashSet() {
for(String op: ops) {
opsHashSet.add(op);
}
}
public PythonReservedWordDecider() {
buildKeyWordsHashSet();
buildOPsHashSet();
}
@Override
public boolean isReserved(String word) {
return (keywordsHashSet.contains(word) || opsHashSet.contains(word));
}
}
| 2,185
| 16.488
| 81
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python/PythonVersion.java
|
package com.ibm.ai4code.parser.python;
public enum PythonVersion {
Autodetect(0),
Python2(2),
Python3(3);
private final int value;
PythonVersion(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
| 274
| 14.277778
| 38
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python_multi/PythonLexerBase.java
|
package com.ibm.ai4code.parser.python_multi;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import java.util.Stack;
public abstract class PythonLexerBase extends Lexer {
public static int TabSize = 8;
// The amount of opened braces, brackets and parenthesis.
private int _opened;
// The stack that keeps track of the indentation level.
private Stack<Integer> _indents = new Stack<>();
// A circular buffer where extra tokens are pushed on (see the NEWLINE and WS lexer rules).
private int _firstTokensInd;
private int _lastTokenInd;
private Token[] _buffer = new Token[32];
private Token _lastToken;
protected PythonLexerBase(CharStream input) {
super(input);
}
@Override
public void emit(Token token) {
super.setToken(token);
if (_buffer[_firstTokensInd] != null)
{
_lastTokenInd = IncTokenInd(_lastTokenInd);
if (_lastTokenInd == _firstTokensInd)
{
// Enlarge buffer
Token[] newArray = new Token[_buffer.length * 2];
int destInd = newArray.length - (_buffer.length - _firstTokensInd);
System.arraycopy(_buffer, 0, newArray, 0, _firstTokensInd);
System.arraycopy(_buffer, _firstTokensInd, newArray, destInd, _buffer.length - _firstTokensInd);
_firstTokensInd = destInd;
_buffer = newArray;
}
}
_buffer[_lastTokenInd] = token;
_lastToken = token;
}
@Override
public Token nextToken() {
// Check if the end-of-file is ahead and there are still some DEDENTS expected.
if (_input.LA(1) == EOF && _indents.size() > 0)
{
if (_buffer[_lastTokenInd] == null || _buffer[_lastTokenInd].getType() != PythonLexer.LINE_BREAK)
{
// First emit an extra line break that serves as the end of the statement.
emit(PythonLexer.LINE_BREAK);
}
// Now emit as much DEDENT tokens as needed.
while (_indents.size() != 0)
{
emit(PythonLexer.DEDENT);
_indents.pop();
}
}
Token next = super.nextToken();
if (_buffer[_firstTokensInd] == null)
{
return next;
}
Token result = _buffer[_firstTokensInd];
_buffer[_firstTokensInd] = null;
if (_firstTokensInd != _lastTokenInd)
{
_firstTokensInd = IncTokenInd(_firstTokensInd);
}
return result;
}
protected void HandleNewLine() {
emit(PythonLexer.NEWLINE, HIDDEN, getText());
char next = (char) _input.LA(1);
// Process whitespaces in HandleSpaces
if (next != ' ' && next != '\t' && IsNotNewLineOrComment(next))
{
ProcessNewLine(0);
}
}
protected void HandleSpaces() {
char next = (char) _input.LA(1);
if ((_lastToken == null || _lastToken.getType() == PythonLexer.NEWLINE) && IsNotNewLineOrComment(next))
{
// Calculates the indentation of the provided spaces, taking the
// following rules into account:
//
// "Tabs are replaced (from left to right) by one to eight spaces
// such that the total number of characters up to and including
// the replacement is a multiple of eight [...]"
//
// -- https://docs.python.org/3.1/reference/lexical_analysis.html#indentation
int indent = 0;
String text = getText();
for (int i = 0; i < text.length(); i++) {
indent += text.charAt(i) == '\t' ? TabSize - indent % TabSize : 1;
}
ProcessNewLine(indent);
}
emit(PythonLexer.WS, HIDDEN, getText());
}
protected void IncIndentLevel() {
_opened++;
}
protected void DecIndentLevel() {
if (_opened > 0) {
--_opened;
}
}
private boolean IsNotNewLineOrComment(char next) {
return _opened == 0 && next != '\r' && next != '\n' && next != '\f' && next != '#';
}
private void ProcessNewLine(int indent) {
emit(PythonLexer.LINE_BREAK);
int previous = _indents.size() == 0 ? 0 : _indents.peek();
if (indent > previous)
{
_indents.push(indent);
emit(PythonLexer.INDENT);
}
else
{
// Possibly emit more than 1 DEDENT token.
while (_indents.size() != 0 && _indents.peek() > indent)
{
emit(PythonLexer.DEDENT);
_indents.pop();
}
}
}
private int IncTokenInd(int ind) {
return (ind + 1) % _buffer.length;
}
private void emit(int tokenType) {
emit(tokenType, DEFAULT_TOKEN_CHANNEL, "");
}
private void emit(int tokenType, int channel, String text) {
int charIndex = getCharIndex();
CommonToken token = new CommonToken(_tokenFactorySourcePair, tokenType, channel, charIndex - text.length(), charIndex);
token.setLine(getLine());
token.setCharPositionInLine(getCharPositionInLine());
token.setText(text);
emit(token);
}
}
| 5,445
| 28.437838
| 127
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python_multi/PythonParserBase.java
|
package com.ibm.ai4code.parser.python_multi;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.TokenStream;
public abstract class PythonParserBase extends Parser
{
public PythonVersion Version = PythonVersion.Autodetect;
protected PythonParserBase(TokenStream input) {
super(input);
}
protected boolean CheckVersion(int version) {
return Version == PythonVersion.Autodetect || version == Version.getValue();
}
protected void SetVersion(int requiredVersion) {
if (requiredVersion == 2) {
Version = PythonVersion.Python2;
} else if (requiredVersion == 3) {
Version = PythonVersion.Python3;
}
}
}
| 706
| 25.185185
| 84
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python_multi/PythonTokensLexerBase.java
|
package com.ibm.ai4code.parser.python_multi;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import java.util.Stack;
public abstract class PythonTokensLexerBase extends Lexer {
public static int TabSize = 8;
// The amount of opened braces, brackets and parenthesis.
private int _opened;
// The stack that keeps track of the indentation level.
private Stack<Integer> _indents = new Stack<>();
// A circular buffer where extra tokens are pushed on (see the NEWLINE and WS lexer rules).
private int _firstTokensInd;
private int _lastTokenInd;
private Token[] _buffer = new Token[32];
private Token _lastToken;
protected PythonTokensLexerBase(CharStream input) {
super(input);
}
@Override
public void emit(Token token) {
super.setToken(token);
if (_buffer[_firstTokensInd] != null)
{
_lastTokenInd = IncTokenInd(_lastTokenInd);
if (_lastTokenInd == _firstTokensInd)
{
// Enlarge buffer
Token[] newArray = new Token[_buffer.length * 2];
int destInd = newArray.length - (_buffer.length - _firstTokensInd);
System.arraycopy(_buffer, 0, newArray, 0, _firstTokensInd);
System.arraycopy(_buffer, _firstTokensInd, newArray, destInd, _buffer.length - _firstTokensInd);
_firstTokensInd = destInd;
_buffer = newArray;
}
}
_buffer[_lastTokenInd] = token;
_lastToken = token;
}
@Override
public Token nextToken() {
// Check if the end-of-file is ahead and there are still some DEDENTS expected.
if (_input.LA(1) == EOF && _indents.size() > 0)
{
if (_buffer[_lastTokenInd] == null || _buffer[_lastTokenInd].getType() != PythonTokens.LINE_BREAK)
{
// First emit an extra line break that serves as the end of the statement.
emit(PythonTokens.LINE_BREAK); // weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
}
// Now emit as much DEDENT tokens as needed.
while (_indents.size() != 0)
{
emit(PythonTokens.DEDENT);// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
_indents.pop();
}
}
Token next = super.nextToken();
if (_buffer[_firstTokensInd] == null)
{
return next;
}
Token result = _buffer[_firstTokensInd];
_buffer[_firstTokensInd] = null;
if (_firstTokensInd != _lastTokenInd)
{
_firstTokensInd = IncTokenInd(_firstTokensInd);
}
return result;
}
protected void HandleNewLine() {
emit(PythonTokens.NEWLINE, HIDDEN, getText());// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
char next = (char) _input.LA(1);
// Process whitespaces in HandleSpaces
if (next != ' ' && next != '\t' && IsNotNewLineOrComment(next))
{
ProcessNewLine(0);
}
}
protected void HandleSpaces() {
char next = (char) _input.LA(1);
// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
if ((_lastToken == null || _lastToken.getType() == PythonTokens.NEWLINE) && IsNotNewLineOrComment(next))
{
// Calculates the indentation of the provided spaces, taking the
// following rules into account:
//
// "Tabs are replaced (from left to right) by one to eight spaces
// such that the total number of characters up to and including
// the replacement is a multiple of eight [...]"
//
// -- https://docs.python.org/3.1/reference/lexical_analysis.html#indentation
int indent = 0;
String text = getText();
for (int i = 0; i < text.length(); i++) {
indent += text.charAt(i) == '\t' ? TabSize - indent % TabSize : 1;
}
ProcessNewLine(indent);
}
emit(PythonTokens.WS, HIDDEN, getText()); // weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
}
protected void IncIndentLevel() {
_opened++;
}
protected void DecIndentLevel() {
if (_opened > 0) {
--_opened;
}
}
private boolean IsNotNewLineOrComment(char next) {
return _opened == 0 && next != '\r' && next != '\n' && next != '\f' && next != '#';
}
private void ProcessNewLine(int indent) {
emit(PythonTokens.LINE_BREAK);// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
int previous = _indents.size() == 0 ? 0 : _indents.peek();
if (indent > previous)
{
_indents.push(indent);
emit(PythonTokens.INDENT);// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
}
else
{
// Possibly emit more than 1 DEDENT token.
while (_indents.size() != 0 && _indents.peek() > indent)
{
emit(PythonTokens.DEDENT);// weiz 2021-02-22, we need to use PythonTokens
// instead of PythonLexer here to emit the right token.
_indents.pop();
}
}
}
private int IncTokenInd(int ind) {
return (ind + 1) % _buffer.length;
}
private void emit(int tokenType) {
emit(tokenType, DEFAULT_TOKEN_CHANNEL, "");
}
private void emit(int tokenType, int channel, String text) {
int charIndex = getCharIndex();
CommonToken token = new CommonToken(_tokenFactorySourcePair, tokenType, channel, charIndex - text.length(), charIndex);
token.setLine(getLine());
token.setCharPositionInLine(getCharPositionInLine());
token.setText(text);
emit(token);
}
}
| 6,392
| 32.124352
| 127
|
java
|
null |
Project_CodeNet-main/tools/spt-generator/src/com/ibm/ai4code/parser/python_multi/PythonVersion.java
|
package com.ibm.ai4code.parser.python_multi;
public enum PythonVersion {
Autodetect(0),
Python2(2),
Python3(3);
private final int value;
PythonVersion(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
| 280
| 14.611111
| 44
|
java
|
or-tools
|
or-tools-master/cmake/samples/java/src/main/java/com/google/ortools/App.java
|
package com.google.ortools;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}
| 181
| 12
| 45
|
java
|
or-tools
|
or-tools-master/cmake/samples/java/src/test/java/com/google/ortools/AppTest.java
|
package com.google.ortools;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| 646
| 15.589744
| 46
|
java
|
or-tools
|
or-tools-master/examples/contrib/AllDifferentExcept0.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class AllDifferentExcept0 {
// alldifferent_except_0(solver, x)
// A decomposition of the global constraint
// alldifferent_except_0, i.e. all values
// must be either distinct, or 0.
public static void alldifferent_except_0(Solver solver, IntVar[] a) {
int n = a.length;
for (int i = 0; i < n; i++) {
for (int j = 0; j < i; j++) {
IntVar bi = solver.makeIsDifferentCstVar(a[i], 0);
IntVar bj = solver.makeIsDifferentCstVar(a[j], 0);
IntVar bij = solver.makeIsDifferentCstVar(a[i], a[j]);
solver.addConstraint(solver.makeLessOrEqual(solver.makeProd(bi, bj).var(), bij));
}
}
}
/**
* Implements a (decomposition) of global constraint alldifferent_except_0. See
* http://www.hakank.org/google_or_tools/circuit.py
*/
private static void solve() {
Solver solver = new Solver("AllDifferentExcept0");
//
// data
//
int n = 5;
//
// variables
//
IntVar[] x = solver.makeIntVarArray(n, 0, n - 1, "x");
//
// constraints
//
alldifferent_except_0(solver, x);
// we also require at least 2 0's
IntVar[] z_tmp = solver.makeBoolVarArray(n, "z_tmp");
for (int i = 0; i < n; i++) {
solver.addConstraint(solver.makeIsEqualCstCt(x[i], 0, z_tmp[i]));
}
IntVar z = solver.makeSum(z_tmp).var();
solver.addConstraint(solver.makeEquality(z, 2));
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.print("x: ");
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println(" z: " + z.value());
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
AllDifferentExcept0.solve();
}
}
| 3,141
| 29.803922
| 95
|
java
|
or-tools
|
or-tools-master/examples/contrib/AllInterval.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class AllInterval {
/**
* Implements the all interval problem. See http://www.hakank.org/google_or_tools/all_interval.py
*/
private static void solve(int n) {
Solver solver = new Solver("AllInterval");
//
// variables
//
IntVar[] x = solver.makeIntVarArray(n, 0, n - 1, "x");
IntVar[] diffs = solver.makeIntVarArray(n - 1, 1, n - 1, "diffs");
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(x));
solver.addConstraint(solver.makeAllDifferent(diffs));
for (int k = 0; k < n - 1; k++) {
solver.addConstraint(solver.makeEquality(
diffs[k], solver.makeAbs(solver.makeDifference(x[k + 1], x[k])).var()));
}
// symmetry breaking
solver.addConstraint(solver.makeLess(x[0], x[n - 1]));
solver.addConstraint(solver.makeLess(diffs[0], diffs[1]));
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.print("x : ");
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.print("\ndiffs: ");
for (int i = 0; i < n - 1; i++) {
System.out.print(diffs[i].value() + " ");
}
System.out.println("\n");
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 12;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
AllInterval.solve(n);
}
}
| 2,807
| 28.87234
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/Circuit.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Circuit {
/**
* circuit(solver, x)
*
* <p>A decomposition of the global constraint circuit, based on some observation of the orbits in
* an array.
*
* <p>Note: The domain of x must be 0..n-1 (not 1..n) since Java is 0-based.
*/
public static void circuit(Solver solver, IntVar[] x) {
int n = x.length;
IntVar[] z = solver.makeIntVarArray(n, 0, n - 1, "z");
solver.addConstraint(solver.makeAllDifferent(x));
solver.addConstraint(solver.makeAllDifferent(z));
// put the orbit of x[0] in z[0..n-1]
solver.addConstraint(solver.makeEquality(z[0], x[0]));
for (int i = 1; i < n - 1; i++) {
solver.addConstraint(solver.makeEquality(z[i], solver.makeElement(x, z[i - 1]).var()));
}
// z may not be 0 for i < n-1
for (int i = 1; i < n - 1; i++) {
solver.addConstraint(solver.makeNonEquality(z[i], 0));
}
// when i = n-1 it must be 0
solver.addConstraint(solver.makeEquality(z[n - 1], 0));
}
/**
* Implements a (decomposition) of the global constraint circuit. See
* http://www.hakank.org/google_or_tools/circuit.py
*/
private static void solve(int n) {
Solver solver = new Solver("Circuit");
//
// variables
//
IntVar[] x = solver.makeIntVarArray(n, 0, n - 1, "x");
//
// constraints
//
circuit(solver, x);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 5;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
Circuit.solve(n);
}
}
| 3,124
| 28.481132
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/CoinsGrid.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class CoinsGrid {
/** Solves the Coins Grid problem. See http://www.hakank.org/google_or_tools/coins_grid.py */
private static void solve() {
Solver solver = new Solver("CoinsGrid");
// data
int n = 5; // 31;
int c = 2; // 14;
// variables
IntVar[][] x = new IntVar[n][n];
IntVar[] x_flat = new IntVar[n * n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
x[i][j] = solver.makeIntVar(0, 1, "x[" + i + "," + j + "]");
x_flat[i * n + j] = x[i][j];
}
}
// constraints
// sum row/columns == c
for (int i = 0; i < n; i++) {
IntVar[] row = new IntVar[n];
IntVar[] col = new IntVar[n];
for (int j = 0; j < n; j++) {
row[j] = x[i][j];
col[j] = x[j][i];
}
solver.addConstraint(solver.makeSumEquality(row, c));
solver.addConstraint(solver.makeSumEquality(col, c));
}
// quadratic horizonal distance
IntVar[] obj_tmp = new IntVar[n * n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
obj_tmp[i * n + j] = solver.makeProd(x[i][j], (i - j) * (i - j)).var();
}
}
IntVar obj_var = solver.makeSum(obj_tmp).var();
// objective
OptimizeVar obj = solver.makeMinimize(obj_var, 1);
// search
DecisionBuilder db =
solver.makePhase(x_flat, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MAX_VALUE);
solver.newSearch(db, obj);
// output
while (solver.nextSolution()) {
System.out.println("obj_var: " + obj_var.value());
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(x[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
CoinsGrid.solve();
}
}
| 3,159
| 29.980392
| 95
|
java
|
or-tools
|
or-tools-master/examples/contrib/CoinsGridMIP.java
|
/*
* Copyright 2017 Darian Sastre darian.sastre@minimaxlabs.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************
*
* Coins grid problem in Google CP Solver.
*
* Problem from
* Tony Hurlimann: "A coin puzzle - SVOR-contest 2007"
* http://www.svor.ch/competitions/competition2007/AsroContestSolution.pdf
* "
* In a quadratic grid (or a larger chessboard) with 31x31 cells, one should
* place coins in such a way that the following conditions are fulfilled:
* 1. In each row exactly 14 coins must be placed.
* 2. In each column exactly 14 coins must be placed.
* 3. The sum of the quadratic horizontal distance from the main diagonal
* of all cells containing a coin must be as small as possible.
* 4. In each cell at most one coin can be placed.
* The description says to place 14x31 = 434 coins on the chessboard each row
* containing 14 coins and each column also containing 14 coins.
* "
*
* This is a Java MIP version of
* http://www.hakank.org/google_or_tools/coins_grid_mip.py
*
* which is the MIP version of
* http://www.hakank.org/google_or_tools/coins_grid.py
*
* by Hakan Kjellerstrand (hakank@gmail.com).
*
* Java version by Darian Sastre (darian.sastre@minimaxlabs.com)
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.*;
public class CoinsGridMIP {
private static void solve(String solverType) {
System.out.println("---- CoinsGridMIP with " + solverType);
MPSolver solver = MPSolver.createSolver(solverType);
if (solver == null)
return;
/** invariants */
int n = 31;
int c = 14;
/** variables */
MPVariable[][] x = new MPVariable[n][n];
for (int i = 0; i < n; i++) {
x[i] = solver.makeBoolVarArray(n);
}
/** constraints & objective */
MPConstraint[] constraints = new MPConstraint[2 * n];
MPObjective obj = solver.objective();
for (int i = 0; i < n; i++) {
constraints[2 * i] = solver.makeConstraint(c, c);
constraints[2 * i + 1] = solver.makeConstraint(c, c);
for (int j = 0; j < n; j++) {
constraints[2 * i].setCoefficient(x[i][j], 1);
constraints[2 * i + 1].setCoefficient(x[j][i], 1);
obj.setCoefficient(x[i][j], (i - j) * (j - i));
}
}
solver.solve();
System.out.println("Problem solved in " + solver.wallTime() + "ms");
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print((int) x[i][j].solutionValue() + " ");
}
System.out.println();
}
}
public static void main(String[] args) {
Loader.loadNativeLibraries();
solve("SCIP");
solve("CBC");
solve("GLPK");
solve("SAT");
}
}
| 3,337
| 31.407767
| 78
|
java
|
or-tools
|
or-tools-master/examples/contrib/ColoringMIP.java
|
/*
* Copyright 2017 Darian Sastre darian.sastre@minimaxlabs.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************
*
* This model was created by Hakan Kjellerstrand (hakank@gmail.com)
*
* Java version by Darian Sastre (darian.sastre@minimaxlabs.com)
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.MPConstraint;
import com.google.ortools.linearsolver.MPObjective;
import com.google.ortools.linearsolver.MPSolver;
import com.google.ortools.linearsolver.MPVariable;
public class ColoringMIP {
public static class Edge {
public int a, b;
public Edge(int a, int b) {
this.a = a;
this.b = b;
}
}
private static void solve(String solverType) {
System.out.println("---- CoinsGridMIP with " + solverType);
MPSolver solver = MPSolver.createSolver(solverType);
if (solver == null)
return;
double infinity = MPSolver.infinity();
/** invariants */
int noCols = 5; // variables number
int noNodes = 11; // constraints number
Edge[] edges = {new Edge(1, 2), new Edge(1, 4), new Edge(1, 7), new Edge(1, 9), new Edge(2, 3),
new Edge(2, 6), new Edge(2, 8), new Edge(3, 5), new Edge(3, 7), new Edge(3, 10),
new Edge(4, 5), new Edge(4, 6), new Edge(4, 10), new Edge(5, 8), new Edge(5, 9),
new Edge(6, 11), new Edge(7, 11), new Edge(8, 11), new Edge(9, 11), new Edge(10, 11)};
/** variables */
MPVariable[][] x = new MPVariable[noNodes][noCols];
for (Integer i = 0; i < noNodes; i++) {
x[i] = solver.makeBoolVarArray(noCols);
}
MPVariable[] colUsed = solver.makeBoolVarArray(noCols);
MPObjective obj = solver.objective();
for (MPVariable objVar : colUsed) {
obj.setCoefficient(objVar, 1);
}
/** Bound each vertex to only one color */
MPConstraint[] constraints = new MPConstraint[noNodes];
for (int i = 0; i < noNodes; i++) {
constraints[i] = solver.makeConstraint(1, 1);
for (int j = 0; j < noCols; j++) {
constraints[i].setCoefficient(x[i][j], 1);
}
}
/** Set adjacent nodes to have different colors */
MPConstraint[][] adjacencies = new MPConstraint[edges.length][noCols];
for (int i = 0; i < edges.length; i++) {
for (int j = 0; j < noCols; j++) {
adjacencies[i][j] = solver.makeConstraint(-infinity, 0);
adjacencies[i][j].setCoefficient(x[edges[i].a - 1][j], 1);
adjacencies[i][j].setCoefficient(x[edges[i].b - 1][j], 1);
adjacencies[i][j].setCoefficient(colUsed[j], -1);
}
}
/** Minimize by default */
final MPSolver.ResultStatus resultStatus = solver.solve();
/** printing */
if (resultStatus != MPSolver.ResultStatus.OPTIMAL) {
System.err.println("The problem does not have an optimal solution!");
return;
} else {
System.out.println("Problem solved in " + solver.wallTime() + "ms");
System.out.print("Colors used: ");
for (MPVariable var : colUsed) {
System.out.print((int) var.solutionValue() + " ");
}
System.out.println("\n");
for (int i = 0; i < noNodes; i++) {
System.out.print("Col of vertex " + i + " : ");
for (int j = 0; j < noCols; j++) {
if (x[i][j].solutionValue() > 0) {
System.out.println(j);
}
}
}
}
}
public static void main(String[] args) {
Loader.loadNativeLibraries();
solve("SCIP");
solve("CBC");
solve("GLPK");
solve("SAT");
}
}
| 4,116
| 31.936
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/CoveringOpl.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class CoveringOpl {
/** Solves a set covering problem. See http://www.hakank.org/google_or_tools/covering_opl.py */
private static void solve() {
Solver solver = new Solver("CoveringOpl");
//
// data
//
int num_workers = 32;
int num_tasks = 15;
// Which worker is qualified for each task.
// Note: This is 1-based and will be made 0-base below.
int[][] qualified = {{1, 9, 19, 22, 25, 28, 31}, {2, 12, 15, 19, 21, 23, 27, 29, 30, 31, 32},
{3, 10, 19, 24, 26, 30, 32}, {4, 21, 25, 28, 32}, {5, 11, 16, 22, 23, 27, 31},
{6, 20, 24, 26, 30, 32}, {7, 12, 17, 25, 30, 31}, {8, 17, 20, 22, 23},
{9, 13, 14, 26, 29, 30, 31}, {10, 21, 25, 31, 32}, {14, 15, 18, 23, 24, 27, 30, 32},
{18, 19, 22, 24, 26, 29, 31}, {11, 20, 25, 28, 30, 32}, {16, 19, 23, 31},
{9, 18, 26, 28, 31, 32}};
int[] cost = {1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6,
6, 6, 7, 8, 9};
//
// variables
//
IntVar[] hire = solver.makeIntVarArray(num_workers, 0, 1, "workers");
IntVar total_cost = solver.makeScalProd(hire, cost).var();
//
// constraints
//
for (int j = 0; j < num_tasks; j++) {
// Sum the cost for hiring the qualified workers
// (also, make 0-base).
int len = qualified[j].length;
IntVar[] tmp = new IntVar[len];
for (int c = 0; c < len; c++) {
tmp[c] = hire[qualified[j][c] - 1];
}
IntVar b = solver.makeSum(tmp).var();
solver.addConstraint(solver.makeGreaterOrEqual(b, 1));
}
// Objective: Minimize total cost
OptimizeVar objective = solver.makeMinimize(total_cost, 1);
//
// search
//
DecisionBuilder db =
solver.makePhase(hire, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("Cost: " + total_cost.value());
System.out.print("Hire: ");
for (int i = 0; i < num_workers; i++) {
if (hire[i].value() == 1) {
System.out.print(i + " ");
}
}
System.out.println("\n");
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
CoveringOpl.solve();
}
}
| 3,605
| 32.388889
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/Crossword.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Crossword {
/** Solving a simple crossword. See http://www.hakank.org/google_or_tools/crossword2.py */
private static void solve() {
Solver solver = new Solver("Crossword");
//
// data
//
String[] alpha = {"_", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n",
"o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"};
int a = 1;
int b = 2;
int c = 3;
int d = 4;
int e = 5;
int f = 6;
int g = 7;
int h = 8;
int i = 9;
int j = 10;
int k = 11;
int l = 12;
int m = 13;
int n = 14;
int o = 15;
int p = 16;
int q = 17;
int r = 18;
int s = 19;
int t = 20;
int u = 21;
int v = 22;
int w = 23;
int x = 24;
int y = 25;
int z = 26;
int num_words = 15;
int word_len = 5;
int[][] AA = {{h, o, s, e, s}, // HOSES
{l, a, s, e, r}, // LASER
{s, a, i, l, s}, // SAILS
{s, h, e, e, t}, // SHEET
{s, t, e, e, r}, // STEER
{h, e, e, l, 0}, // HEEL
{h, i, k, e, 0}, // HIKE
{k, e, e, l, 0}, // KEEL
{k, n, o, t, 0}, // KNOT
{l, i, n, e, 0}, // LINE
{a, f, t, 0, 0}, // AFT
{a, l, e, 0, 0}, // ALE
{e, e, l, 0, 0}, // EEL
{l, e, e, 0, 0}, // LEE
{t, i, e, 0, 0}}; // TIE
int num_overlapping = 12;
int[][] overlapping = {{0, 2, 1, 0}, // s
{0, 4, 2, 0}, // s
{3, 1, 1, 2}, // i
{3, 2, 4, 0}, // k
{3, 3, 2, 2}, // e
{6, 0, 1, 3}, // l
{6, 1, 4, 1}, // e
{6, 2, 2, 3}, // e
{7, 0, 5, 1}, // l
{7, 2, 1, 4}, // s
{7, 3, 4, 2}, // e
{7, 4, 2, 4}}; // r
int N = 8;
//
// variables
//
IntVar[][] A = new IntVar[num_words][word_len];
IntVar[] A_flat = new IntVar[num_words * word_len];
// for labeling on A and E
IntVar[] all = new IntVar[(num_words * word_len) + N];
for (int I = 0; I < num_words; I++) {
for (int J = 0; J < word_len; J++) {
A[I][J] = solver.makeIntVar(0, 26, "A[" + I + "," + J + "]");
A_flat[I * word_len + J] = A[I][J];
all[I * word_len + J] = A[I][J];
}
}
IntVar[] E = solver.makeIntVarArray(N, 0, num_words, "E");
for (int I = 0; I < N; I++) {
all[num_words * word_len + I] = E[I];
}
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(E));
for (int I = 0; I < num_words; I++) {
for (int J = 0; J < word_len; J++) {
solver.addConstraint(solver.makeEquality(A[I][J], AA[I][J]));
}
}
for (int I = 0; I < num_overlapping; I++) {
solver.addConstraint(solver.makeEquality(
solver
.makeElement(A_flat,
solver
.makeSum(
solver.makeProd(E[overlapping[I][0]], word_len).var(), overlapping[I][1])
.var())
.var(),
solver
.makeElement(A_flat,
solver
.makeSum(
solver.makeProd(E[overlapping[I][2]], word_len).var(), overlapping[I][3])
.var())
.var()));
}
//
// search
//
DecisionBuilder db = solver.makePhase(all, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.println("E:");
for (int ee = 0; ee < N; ee++) {
int e_val = (int) E[ee].value();
System.out.print(ee + ": (" + e_val + ") ");
for (int ii = 0; ii < word_len; ii++) {
System.out.print(alpha[(int) A[ee][ii].value()]);
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Crossword.solve();
}
}
| 5,203
| 27.12973
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/DeBruijn.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class DeBruijn {
/**
* toNum(solver, a, num, base)
*
* <p>channelling between the array a and the number num
*/
private static void toNum(Solver solver, IntVar[] a, IntVar num, int base) {
int len = a.length;
IntVar[] tmp = new IntVar[len];
for (int i = 0; i < len; i++) {
tmp[i] = solver.makeProd(a[i], (int) Math.pow(base, (len - i - 1))).var();
}
solver.addConstraint(solver.makeEquality(solver.makeSum(tmp).var(), num));
}
/**
* Implements "arbitrary" de Bruijn sequences. See
* http://www.hakank.org/google_or_tools/debruijn_binary.py
*/
private static void solve(int base, int n, int m) {
Solver solver = new Solver("DeBruijn");
System.out.println("base: " + base + " n: " + n + " m: " + m);
// Ensure that the number of each digit in bin_code is
// the same. Nice feature, but it can slow things down...
boolean check_same_gcc = false; // true;
//
// variables
//
IntVar[] x = solver.makeIntVarArray(m, 0, (int) Math.pow(base, n) - 1, "x");
IntVar[][] binary = new IntVar[m][n];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
binary[i][j] = solver.makeIntVar(0, base - 1, "binary[" + i + "," + j + "]");
}
}
// this is the de Bruijn sequence
IntVar[] bin_code = solver.makeIntVarArray(m, 0, base - 1, "bin_code");
// occurences of each number in bin_code
IntVar[] gcc = solver.makeIntVarArray(base, 0, m, "gcc");
// for the branching
IntVar[] all = new IntVar[2 * m + base];
for (int i = 0; i < m; i++) {
all[i] = x[i];
all[m + i] = bin_code[i];
}
for (int i = 0; i < base; i++) {
all[2 * m + i] = gcc[i];
}
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(x));
// converts x <-> binary
for (int i = 0; i < m; i++) {
IntVar[] t = new IntVar[n];
for (int j = 0; j < n; j++) {
t[j] = binary[i][j];
}
toNum(solver, t, x[i], base);
}
// the de Bruijn condition:
// the first elements in binary[i] is the same as the last
// elements in binary[i-1]
for (int i = 1; i < m; i++) {
for (int j = 1; j < n; j++) {
solver.addConstraint(solver.makeEquality(binary[i - 1][j], binary[i][j - 1]));
}
}
// ... and around the corner
for (int j = 1; j < n; j++) {
solver.addConstraint(solver.makeEquality(binary[m - 1][j], binary[0][j - 1]));
}
// converts binary -> bin_code (de Bruijn sequence)
for (int i = 0; i < m; i++) {
solver.addConstraint(solver.makeEquality(bin_code[i], binary[i][0]));
}
// extra: ensure that all the numbers in the de Bruijn sequence
// (bin_code) has the same occurrences (if check_same_gcc is True
// and mathematically possible)
solver.addConstraint(solver.makeDistribute(bin_code, gcc));
if (check_same_gcc && m % base == 0) {
for (int i = 1; i < base; i++) {
solver.addConstraint(solver.makeEquality(gcc[i], gcc[i - 1]));
}
}
// symmetry breaking:
// the minimum value of x should be first
solver.addConstraint(solver.makeEquality(x[0], solver.makeMin(x).var()));
//
// search
//
DecisionBuilder db =
solver.makePhase(all, solver.CHOOSE_MIN_SIZE_LOWEST_MAX, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.print("x: ");
for (int i = 0; i < m; i++) {
System.out.print(x[i].value() + " ");
}
System.out.print("\nde Bruijn sequence:");
for (int i = 0; i < m; i++) {
System.out.print(bin_code[i].value() + " ");
}
System.out.print("\ngcc: ");
for (int i = 0; i < base; i++) {
System.out.print(gcc[i].value() + " ");
}
System.out.println("\n");
// for debugging etc: show the full binary table
/*
System.out.println("binary:");
for(int i = 0; i < m; i++) {
for(int j = 0; j < n; j++) {
System.out.print(binary[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
*/
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int base = 2;
int n = 3;
int m = 8;
if (args.length > 0) {
base = Integer.parseInt(args[0]);
}
if (args.length > 1) {
n = Integer.parseInt(args[1]);
m = (int) Math.pow(base, n);
}
if (args.length > 2) {
int m_max = (int) Math.pow(base, n);
m = Integer.parseInt(args[2]);
if (m > m_max) {
System.out.println("m(" + m + ") is too large. Set m to " + m_max + ".");
m = m_max;
}
}
DeBruijn.solve(base, n, m);
}
}
| 6,031
| 28.281553
| 90
|
java
|
or-tools
|
or-tools-master/examples/contrib/Diet.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Diet {
/** Solves the Diet problem. See http://www.hakank.org/google_or_tools/diet1.py */
private static void solve() {
Solver solver = new Solver("Diet");
int n = 4;
int[] price = {50, 20, 30, 80}; // in cents
// requirements for each nutrition type
int[] limits = {500, 6, 10, 8};
// nutritions for each product
int[] calories = {400, 200, 150, 500};
int[] chocolate = {3, 2, 0, 0};
int[] sugar = {2, 2, 4, 4};
int[] fat = {2, 4, 1, 5};
//
// Variables
//
IntVar[] x = solver.makeIntVarArray(n, 0, 100, "x");
IntVar cost = solver.makeScalProd(x, price).var();
//
// Constraints
//
solver.addConstraint(solver.makeScalProdGreaterOrEqual(x, calories, limits[0]));
solver.addConstraint(solver.makeScalProdGreaterOrEqual(x, chocolate, limits[1]));
solver.addConstraint(solver.makeScalProdGreaterOrEqual(x, sugar, limits[2]));
solver.addConstraint(solver.makeScalProdGreaterOrEqual(x, fat, limits[3]));
//
// Objective
//
OptimizeVar obj = solver.makeMinimize(cost, 1);
//
// Search
//
DecisionBuilder db = solver.makePhase(x, solver.CHOOSE_PATH, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db, obj);
while (solver.nextSolution()) {
System.out.println("cost: " + cost.value());
System.out.print("x: ");
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Diet.solve();
}
}
| 2,873
| 30.23913
| 90
|
java
|
or-tools
|
or-tools-master/examples/contrib/DietMIP.java
|
/*
* Copyright 2017 Darian Sastre darian.sastre@minimaxlabs.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************
*
* This model was created by Hakan Kjellerstrand (hakank@gmail.com)
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.MPConstraint;
import com.google.ortools.linearsolver.MPObjective;
import com.google.ortools.linearsolver.MPSolver;
import com.google.ortools.linearsolver.MPVariable;
public class DietMIP {
private static void solve(String solverType) {
System.out.println("---- DietMIP with " + solverType);
MPSolver solver = MPSolver.createSolver(solverType);
if (solver == null)
return;
double infinity = MPSolver.infinity();
int n = 4; // variables number
int m = 4; // constraints number
int[] price = {50, 20, 30, 80};
int[] limits = {500, 6, 10, 8};
int[] calories = {400, 200, 150, 500};
int[] chocolate = {3, 2, 0, 0};
int[] sugar = {2, 2, 4, 4};
int[] fat = {2, 4, 1, 5};
int[][] values = {calories, chocolate, sugar, fat};
MPVariable[] x = solver.makeIntVarArray(n, 0, 100, "x");
MPObjective objective = solver.objective();
MPConstraint[] targets = new MPConstraint[4];
for (int i = 0; i < n; i++) {
objective.setCoefficient(x[i], price[i]);
// constraints
targets[i] = solver.makeConstraint(limits[i], infinity);
for (int j = 0; j < m; j++) {
targets[i].setCoefficient(x[j], values[i][j]);
}
}
final MPSolver.ResultStatus resultStatus = solver.solve();
/** printing */
if (resultStatus != MPSolver.ResultStatus.OPTIMAL) {
System.err.println("The problem does not have an optimal solution!");
return;
} else {
System.out.println("Optimal objective value = " + solver.objective().value());
System.out.print("Item quantities: ");
System.out.print((int) x[0].solutionValue() + " ");
System.out.print((int) x[1].solutionValue() + " ");
System.out.print((int) x[2].solutionValue() + " ");
System.out.print((int) x[3].solutionValue() + " ");
}
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
solve("SCIP");
solve("CBC");
}
}
| 2,858
| 31.123596
| 84
|
java
|
or-tools
|
or-tools-master/examples/contrib/DivisibleBy9Through1.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class DivisibleBy9Through1 {
/**
* A simple propagator for modulo constraint.
*
* <p>This implementation is based on the ECLiPSe version mentioned in "A Modulo propagator for
* ECLiPSE"
* http://www.hakank.org/constraint_programming_blog/2010/05/a_modulo_propagator_for_eclips.html
* The ECLiPSe Prolog source code: http://www.hakank.org/eclipse/modulo_propagator.ecl
*/
public static void my_mod(Solver solver, IntVar x, IntVar y, IntVar r) {
long lbx = x.min();
long ubx = x.max();
long ubx_neg = -ubx;
long lbx_neg = -lbx;
int min_x = (int) Math.min(lbx, ubx_neg);
int max_x = (int) Math.max(ubx, lbx_neg);
IntVar d = solver.makeIntVar(min_x, max_x, "d");
// r >= 0
solver.addConstraint(solver.makeGreaterOrEqual(r, 0));
// x*r >= 0
solver.addConstraint(solver.makeGreaterOrEqual(solver.makeProd(x, r).var(), 0));
// -abs(y) < r
solver.addConstraint(solver.makeLess(solver.makeOpposite(solver.makeAbs(y).var()).var(), r));
// r < abs(y)
solver.addConstraint(solver.makeLess(r, solver.makeAbs(y).var().var()));
// min_x <= d, i.e. d > min_x
solver.addConstraint(solver.makeGreater(d, min_x));
// d <= max_x
solver.addConstraint(solver.makeLessOrEqual(d, max_x));
// x == y*d+r
solver.addConstraint(
solver.makeEquality(x, solver.makeSum(solver.makeProd(y, d).var(), r).var()));
}
/**
* toNum(solver, a, num, base)
*
* <p>channelling between the array a and the number num
*/
private static void toNum(Solver solver, IntVar[] a, IntVar num, int base) {
int len = a.length;
IntVar[] tmp = new IntVar[len];
for (int i = 0; i < len; i++) {
tmp[i] = solver.makeProd(a[i], (int) Math.pow(base, (len - i - 1))).var();
}
solver.addConstraint(solver.makeEquality(solver.makeSum(tmp).var(), num));
}
/**
* Solves the divisible by 9 through 1 problem. See
* http://www.hakank.org/google_or_tools/divisible_by_9_through_1.py
*/
private static void solve(int base) {
Solver solver = new Solver("DivisibleBy9Through1");
//
// data
//
int m = (int) Math.pow(base, (base - 1)) - 1;
int n = base - 1;
String[] digits_str = {"_", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
System.out.println("base: " + base);
//
// variables
//
// digits
IntVar[] x = solver.makeIntVarArray(n, 1, base - 1, "x");
// the numbers. t[0] contains the answe
IntVar[] t = solver.makeIntVarArray(n, 0, m, "t");
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(x));
// Ensure the divisibility of base .. 1
IntVar zero = solver.makeIntConst(0);
for (int i = 0; i < n; i++) {
int mm = base - i - 1;
IntVar[] tt = new IntVar[mm];
for (int j = 0; j < mm; j++) {
tt[j] = x[j];
}
toNum(solver, tt, t[i], base);
IntVar mm_const = solver.makeIntConst(mm);
my_mod(solver, t[i], mm_const, zero);
}
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.print("x: ");
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println("\nt: ");
for (int i = 0; i < n; i++) {
System.out.print(t[i].value() + " ");
}
System.out.println();
if (base != 10) {
System.out.print("Number base 10: " + t[0].value());
System.out.print(" Base " + base + ": ");
for (int i = 0; i < n; i++) {
System.out.print(digits_str[(int) x[i].value() + 1]);
}
System.out.println("\n");
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int base = 10;
if (args.length > 0) {
int new_base = Integer.parseInt(args[0]);
if (new_base > 10) {
// Note: The next valid base after 10 is 14 and
// the number 559922224824157, which is too large in this model.
System.out.println("Sorry, max allowed base is 10. Setting base to 10.");
} else if (new_base < 2) {
System.out.println("Sorry, min allowed base is 2. Setting base to 2.");
base = 2;
} else {
base = new_base;
}
}
DivisibleBy9Through1.solve(base);
}
}
| 5,678
| 29.697297
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/GolombRuler.java
|
/**
* Copyright (c) 1999-2011, Ecole des Mines de Nantes All rights reserved. Redistribution and use in
* source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* <p>* Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer. * Redistributions in binary form must reproduce the
* above copyright notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution. * Neither the name of the Ecole des Mines
* de Nantes nor the names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* <p>THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.SearchMonitor;
import com.google.ortools.constraintsolver.SolutionCollector;
import com.google.ortools.constraintsolver.Solver;
/**
* Golomb ruler problem <br>
*
* @author Charles Prud'homme
* @since 17/03/11
*/
public class GolombRuler {
/** Golomb Ruler Problem. */
private static void solve(int m) {
Solver solver = new Solver("GR " + m);
IntVar[] ticks = solver.makeIntVarArray(m, 0, ((m < 31) ? (1 << (m + 1)) - 1 : 9999), "ticks");
solver.addConstraint(solver.makeEquality(ticks[0], 0));
for (int i = 0; i < ticks.length - 1; i++) {
solver.addConstraint(solver.makeLess(ticks[i], ticks[i + 1]));
}
IntVar[] diff = new IntVar[(m * m - m) / 2];
for (int k = 0, i = 0; i < m - 1; i++) {
for (int j = i + 1; j < m; j++, k++) {
diff[k] = solver.makeDifference(ticks[j], ticks[i]).var();
solver.addConstraint(solver.makeGreaterOrEqual(diff[k], (j - i) * (j - i + 1) / 2));
}
}
solver.addConstraint(solver.makeAllDifferent(diff));
// break symetries
if (m > 2) {
solver.addConstraint(solver.makeLess(diff[0], diff[diff.length - 1]));
}
OptimizeVar opt = solver.makeMinimize(ticks[m - 1], 1);
DecisionBuilder db =
solver.makePhase(ticks, solver.CHOOSE_MIN_SIZE_LOWEST_MIN, solver.ASSIGN_MIN_VALUE);
SolutionCollector collector = solver.makeLastSolutionCollector();
collector.add(ticks);
collector.addObjective(ticks[m - 1]);
SearchMonitor log = solver.makeSearchLog(10000, opt);
solver.solve(db, opt, log, collector);
System.out.println("Optimal solution = " + collector.objectiveValue(0));
for (int i = 0; i < m; ++i) {
System.out.print("[" + collector.value(0, ticks[i]) + "] ");
}
System.out.println();
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
GolombRuler.solve(8);
}
}
| 3,737
| 41.965517
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/Issue173.java
|
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.MPConstraint;
import com.google.ortools.linearsolver.MPObjective;
import com.google.ortools.linearsolver.MPSolver;
import com.google.ortools.linearsolver.MPVariable;
public class Issue173 {
public static void breakit() {
for (int i = 0; i < 50000; i++) {
solveLP();
}
}
private static void solveLP() {
MPSolver solver = MPSolver.createSolver("CBC");
if (solver == null) {
System.out.println("Could not create solver CBC");
return;
}
MPVariable x = solver.makeNumVar(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, "x");
final MPObjective objective = solver.objective();
objective.setMaximization();
objective.setCoefficient(x, 1);
MPConstraint constraint = solver.makeConstraint(0, 5);
constraint.setCoefficient(x, 1);
solver.solve();
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
breakit();
}
}
| 1,046
| 25.846154
| 94
|
java
|
or-tools
|
or-tools-master/examples/contrib/KnapsackMIP.java
|
/*
* Copyright 2017 Darian Sastre darian.sastre@minimaxlabs.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************
*
* Each knapsack perceives a different weight for each item. Item values are
* the same across knapsacks. Optimizing constrains the count of each item such
* that all knapsack capacities are respected, and their values are maximized.
*
* This model was created by Hakan Kjellerstrand (hakank@gmail.com)
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.*;
public class KnapsackMIP {
private static void solve(String solverType) {
MPSolver solver = MPSolver.createSolver(solverType);
if (solver == null) {
System.out.println("Could not create solver");
return;
}
/** variables */
int itemCount = 12;
int capacityCount = 7;
int[] capacity = {18209, 7692, 1333, 924, 26638, 61188, 13360};
int[] value = {96, 76, 56, 11, 86, 10, 66, 86, 83, 12, 9, 81};
int[][] weights = {{19, 1, 10, 1, 1, 14, 152, 11, 1, 1, 1, 1},
{0, 4, 53, 0, 0, 80, 0, 4, 5, 0, 0, 0}, {4, 660, 3, 0, 30, 0, 3, 0, 4, 90, 0, 0},
{7, 0, 18, 6, 770, 330, 7, 0, 0, 6, 0, 0}, {0, 20, 0, 4, 52, 3, 0, 0, 0, 5, 4, 0},
{0, 0, 40, 70, 4, 63, 0, 0, 60, 0, 4, 0}, {0, 32, 0, 0, 0, 5, 0, 3, 0, 660, 0, 9}};
int maxCapacity = -1;
for (int c : capacity) {
if (c > maxCapacity) {
maxCapacity = c;
}
}
MPVariable[] taken = solver.makeIntVarArray(itemCount, 0, maxCapacity);
/** constraints */
MPConstraint constraints[] = new MPConstraint[capacityCount];
for (int i = 0; i < capacityCount; i++) {
constraints[i] = solver.makeConstraint(0, capacity[i]);
for (int j = 0; j < itemCount; j++) {
constraints[i].setCoefficient(taken[j], weights[i][j]);
}
}
/** objective */
MPObjective obj = solver.objective();
obj.setMaximization();
for (int i = 0; i < itemCount; i++) {
obj.setCoefficient(taken[i], value[i]);
}
solver.solve();
/** printing */
System.out.println("Max cost: " + obj.value());
System.out.print("Item quantities: ");
for (MPVariable var : taken) {
System.out.print((int) var.solutionValue() + " ");
}
}
public static void main(String[] args) {
Loader.loadNativeLibraries();
solve("CBC_MIXED_INTEGER_PROGRAMMING");
}
}
| 2,981
| 33.275862
| 91
|
java
|
or-tools
|
or-tools-master/examples/contrib/LeastDiff.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import java.io.*;
import java.text.*;
import java.util.*;
public class LeastDiff {
/** Solves the Least Diff problem. See http://www.hakank.org/google_or_tools/least_diff.py */
private static void solve() {
final int base = 10;
Solver solver = new Solver("LeastDiff");
//
// Variables
//
IntVar a = solver.makeIntVar(0, base - 1, "a");
IntVar b = solver.makeIntVar(0, base - 1, "b");
IntVar c = solver.makeIntVar(0, base - 1, "c");
IntVar d = solver.makeIntVar(0, base - 1, "d");
IntVar e = solver.makeIntVar(0, base - 1, "e");
IntVar f = solver.makeIntVar(0, base - 1, "f");
IntVar g = solver.makeIntVar(0, base - 1, "g");
IntVar h = solver.makeIntVar(0, base - 1, "h");
IntVar i = solver.makeIntVar(0, base - 1, "i");
IntVar j = solver.makeIntVar(0, base - 1, "j");
IntVar[] all = {a, b, c, d, e, f, g, h, i, j};
//
// Constraints
//
int[] coeffs = {10000, 1000, 100, 10, 1};
IntVar x = solver.makeScalProd(new IntVar[] {a, b, c, d, e}, coeffs).var();
x.setName("x");
IntVar y = solver.makeScalProd(new IntVar[] {f, g, h, i, j}, coeffs).var();
y.setName("y");
// a > 0
solver.addConstraint(solver.makeGreater(a, 0));
// f > 0
solver.addConstraint(solver.makeGreater(f, 0));
// diff = x - y
IntVar diff = solver.makeDifference(x, y).var();
diff.setName("diff");
solver.addConstraint(solver.makeAllDifferent(all));
//
// Objective
//
OptimizeVar obj = solver.makeMinimize(diff, 1);
//
// Search
//
DecisionBuilder db = solver.makePhase(all, solver.CHOOSE_PATH, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db, obj);
while (solver.nextSolution()) {
System.out.println("" + x.value() + " - " + y.value() + " = " + diff.value());
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
LeastDiff.solve();
}
}
| 2,986
| 31.11828
| 95
|
java
|
or-tools
|
or-tools-master/examples/contrib/MagicSquare.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class MagicSquare {
/** Solves the Magic Square problem. See http://www.hakank.org/google_or_tools/magic_square.py */
private static void solve(int n, int num) {
Solver solver = new Solver("MagicSquare");
System.out.println("n: " + n);
//
// variables
//
IntVar[][] x = new IntVar[n][n];
// for the branching
IntVar[] x_flat = new IntVar[n * n];
//
// constraints
//
final long s = (n * (n * n + 1)) / 2;
System.out.println("s: " + s);
// IntVar s = solver.makeIntVar(0, n*n*n, "s");
IntVar[] diag1 = new IntVar[n];
IntVar[] diag2 = new IntVar[n];
for (int i = 0; i < n; i++) {
IntVar[] row = new IntVar[n];
for (int j = 0; j < n; j++) {
x[i][j] = solver.makeIntVar(1, n * n, "x[" + i + "," + j + "]");
x_flat[i * n + j] = x[i][j];
row[j] = x[i][j];
}
// sum row to s
solver.addConstraint(solver.makeSumEquality(row, s));
diag1[i] = x[i][i];
diag2[i] = x[i][n - i - 1];
}
// sum diagonals to s
solver.addConstraint(solver.makeSumEquality(diag1, s));
solver.addConstraint(solver.makeSumEquality(diag2, s));
// sum columns to s
for (int j = 0; j < n; j++) {
IntVar[] col = new IntVar[n];
for (int i = 0; i < n; i++) {
col[i] = x[i][j];
}
solver.addConstraint(solver.makeSumEquality(col, s));
}
// all are different
solver.addConstraint(solver.makeAllDifferent(x_flat));
// symmetry breaking: upper left is 1
// solver.addConstraint(solver.makeEquality(x[0][0], 1));
//
// Solve
//
DecisionBuilder db =
solver.makePhase(x_flat, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_CENTER_VALUE);
solver.newSearch(db);
int c = 0;
while (solver.nextSolution()) {
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(x[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
c++;
if (num > 0 && c >= num) {
break;
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 4;
int num = 0;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
if (args.length > 1) {
num = Integer.parseInt(args[1]);
}
MagicSquare.solve(n, num);
}
}
| 3,615
| 27.928
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/Map.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Map {
/** Solves a simple map coloring problem. See http://www.hakank.org/google_or_tools/map.py */
private static void solve() {
Solver solver = new Solver("Map");
//
// data
//
int Belgium = 0;
int Denmark = 1;
int France = 2;
int Germany = 3;
int Netherlands = 4;
int Luxembourg = 5;
int n = 6;
int max_num_colors = 4;
//
// Variables
//
IntVar[] color = solver.makeIntVarArray(n, 1, max_num_colors, "x");
//
// Constraints
//
solver.addConstraint(solver.makeNonEquality(color[France], color[Belgium]));
solver.addConstraint(solver.makeNonEquality(color[France], color[Luxembourg]));
solver.addConstraint(solver.makeNonEquality(color[France], color[Germany]));
solver.addConstraint(solver.makeNonEquality(color[Luxembourg], color[Germany]));
solver.addConstraint(solver.makeNonEquality(color[Luxembourg], color[Belgium]));
solver.addConstraint(solver.makeNonEquality(color[Belgium], color[Netherlands]));
solver.addConstraint(solver.makeNonEquality(color[Belgium], color[Germany]));
solver.addConstraint(solver.makeNonEquality(color[Germany], color[Netherlands]));
solver.addConstraint(solver.makeNonEquality(color[Germany], color[Denmark]));
// Symmetry breaking
solver.addConstraint(solver.makeEquality(color[Belgium], 1));
//
// Search
//
DecisionBuilder db =
solver.makePhase(color, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
while (solver.nextSolution()) {
System.out.print("Colors: ");
for (int i = 0; i < n; i++) {
System.out.print(color[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Map.solve();
}
}
| 3,120
| 32.923913
| 95
|
java
|
or-tools
|
or-tools-master/examples/contrib/Map2.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Map2 {
/**
* Solves a simple map coloring problem, take II. See http://www.hakank.org/google_or_tools/map.py
*/
private static void solve() {
Solver solver = new Solver("Map2");
//
// data
//
int Belgium = 0;
int Denmark = 1;
int France = 2;
int Germany = 3;
int Netherlands = 4;
int Luxembourg = 5;
int n = 6;
int max_num_colors = 4;
int[][] neighbours = {{France, Belgium}, {France, Luxembourg}, {France, Germany},
{Luxembourg, Germany}, {Luxembourg, Belgium}, {Belgium, Netherlands}, {Belgium, Germany},
{Germany, Netherlands}, {Germany, Denmark}};
//
// Variables
//
IntVar[] color = solver.makeIntVarArray(n, 1, max_num_colors, "x");
//
// Constraints
//
for (int i = 0; i < neighbours.length; i++) {
solver.addConstraint(
solver.makeNonEquality(color[neighbours[i][0]], color[neighbours[i][1]]));
}
// Symmetry breaking
solver.addConstraint(solver.makeEquality(color[Belgium], 1));
//
// Search
//
DecisionBuilder db =
solver.makePhase(color, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
while (solver.nextSolution()) {
System.out.print("Colors: ");
for (int i = 0; i < n; i++) {
System.out.print(color[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Map2.solve();
}
}
| 2,795
| 29.064516
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/Minesweeper.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Minesweeper {
static int X = -1;
//
// Default problem.
// It has 4 solutions.
//
static int default_r = 8;
static int default_c = 8;
static int[][] default_game = {{2, 3, X, 2, 2, X, 2, 1}, {X, X, 4, X, X, 4, X, 2},
{X, X, X, X, X, X, 4, X}, {X, 5, X, 6, X, X, X, 2}, {2, X, X, X, 5, 5, X, 2},
{1, 3, 4, X, X, X, 4, X}, {0, 1, X, 4, X, X, X, 3}, {0, 1, 2, X, 2, 3, X, 2}};
// for the actual problem
static int r;
static int c;
static int[][] game;
/** Solves the Minesweeper problems. See http://www.hakank.org/google_or_tools/minesweeper.py */
private static void solve() {
Solver solver = new Solver("Minesweeper");
int[] S = {-1, 0, 1};
//
// data
//
System.out.println("Problem:");
for (int i = 0; i < r; i++) {
for (int j = 0; j < c; j++) {
if (game[i][j] > X) {
System.out.print(game[i][j] + " ");
} else {
System.out.print("X ");
}
}
System.out.println();
}
System.out.println();
//
// Variables
//
IntVar[][] mines = new IntVar[r][c];
IntVar[] mines_flat = new IntVar[r * c]; // for branching
for (int i = 0; i < r; i++) {
for (int j = 0; j < c; j++) {
mines[i][j] = solver.makeIntVar(0, 1, "mines[" + i + ", " + j + "]");
mines_flat[i * c + j] = mines[i][j];
}
}
//
// Constraints
//
for (int i = 0; i < r; i++) {
for (int j = 0; j < c; j++) {
if (game[i][j] >= 0) {
solver.addConstraint(solver.makeEquality(mines[i][j], 0));
// this cell is the sum of all its neighbours
ArrayList<IntVar> neighbours = new ArrayList<IntVar>();
for (int a : S) {
for (int b : S) {
if (i + a >= 0 && j + b >= 0 && i + a < r && j + b < c) {
neighbours.add(mines[i + a][j + b]);
}
}
}
solver.addConstraint(
solver.makeSumEquality(neighbours.toArray(new IntVar[1]), game[i][j]));
}
if (game[i][j] > X) {
// This cell cannot be a mine since it
// has some value assigned to it
solver.addConstraint(solver.makeEquality(mines[i][j], 0));
}
}
}
//
// Search
//
DecisionBuilder db =
solver.makePhase(mines_flat, solver.INT_VAR_SIMPLE, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
int sol = 0;
while (solver.nextSolution()) {
sol++;
System.out.println("Solution #" + sol + ":");
for (int i = 0; i < r; i++) {
for (int j = 0; j < c; j++) {
System.out.print(mines[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
/**
* Reads a minesweeper file. File format: # a comment which is ignored % a comment which also is
* ignored number of rows number of columns < row number of neighbours lines... >
*
* <p>0..8 means number of neighbours, "." mean unknown (may be a mine)
*
* <p>Example (from minesweeper0.txt) # Problem from Gecode/examples/minesweeper.cc problem 0 6 6
* ..2.3. 2..... ..24.3 1.34.. .....3 .3.3..
*/
private static void readFile(String file) {
System.out.println("readFile(" + file + ")");
int lineCount = 0;
try {
BufferedReader inr = new BufferedReader(new FileReader(file));
String str;
while ((str = inr.readLine()) != null && str.length() > 0) {
str = str.trim();
// ignore comments
if (str.startsWith("#") || str.startsWith("%")) {
continue;
}
System.out.println(str);
if (lineCount == 0) {
r = Integer.parseInt(str); // number of rows
} else if (lineCount == 1) {
c = Integer.parseInt(str); // number of columns
game = new int[r][c];
} else {
// the problem matrix
String row[] = str.split("");
for (int j = 1; j <= c; j++) {
String s = row[j];
if (s.equals(".")) {
game[lineCount - 2][j - 1] = -1;
} else {
game[lineCount - 2][j - 1] = Integer.parseInt(s);
}
}
}
lineCount++;
} // end while
inr.close();
} catch (IOException e) {
System.out.println(e);
}
} // end readFile
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
String file = "";
if (args.length > 0) {
file = args[0];
Minesweeper.readFile(file);
} else {
game = default_game;
r = default_r;
c = default_c;
}
Minesweeper.solve();
}
}
| 5,971
| 28.564356
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/MultiThreadTest.java
|
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.MPConstraint;
import com.google.ortools.linearsolver.MPObjective;
import com.google.ortools.linearsolver.MPSolver;
import com.google.ortools.linearsolver.MPSolver.OptimizationProblemType;
import com.google.ortools.linearsolver.MPSolver.ResultStatus;
import com.google.ortools.linearsolver.MPVariable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class MultiThreadTest {
private static final boolean verboseOutput = false; // To enable Cbc logging
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
launchProtocol(10, 8, true);
System.out.println("Cbc multi thread test successful");
return;
}
public static void launchProtocol(int wholeLoopAttmpts, int threadPoolSize, boolean runInParallel)
throws Exception {
for (int noAttmpt = 0; noAttmpt < wholeLoopAttmpts; noAttmpt++) {
System.out.println(String.format("Attempt %d", noAttmpt));
int maxThreads = threadPoolSize;
List<SolverThread> threadList = new ArrayList<SolverThread>();
for (int i = 0; i < maxThreads; i++) {
SolverThread thread = new SolverThread();
threadList.add(thread);
}
ExecutorService executor = Executors.newFixedThreadPool(maxThreads);
if (runInParallel) {
System.out.println("Launching thread pool");
executor.invokeAll(threadList);
for (SolverThread thread : threadList) {
System.out.println(thread.getStatusSolver().toString());
}
} else {
for (SolverThread thread : threadList) {
System.out.println("Launching single thread");
executor.invokeAll(Arrays.asList(thread));
System.out.println(thread.getStatusSolver().toString());
}
}
System.out.println("Attempt finalized!");
executor.shutdown();
}
System.out.println("Now exiting multi thread execution");
}
private static MPSolver makeProblem() {
MPSolver solver = MPSolver.createSolver("CBC");
if (solver == null) {
System.out.println("Could not create solver CBC");
return solver;
}
double infinity = MPSolver.infinity();
// x1 and x2 are integer non-negative variables.
MPVariable x1 = solver.makeIntVar(0.0, infinity, "x1");
MPVariable x2 = solver.makeIntVar(0.0, infinity, "x2");
// Minimize x1 + 2 * x2.
MPObjective objective = solver.objective();
objective.setCoefficient(x1, 1);
objective.setCoefficient(x2, 2);
// 2 * x2 + 3 * x1 >= 17.
MPConstraint ct = solver.makeConstraint(17, infinity);
ct.setCoefficient(x1, 3);
ct.setCoefficient(x2, 2);
if (verboseOutput) {
solver.enableOutput();
}
return solver;
}
private static final class SolverThread implements Callable<MPSolver.ResultStatus> {
private MPSolver.ResultStatus statusSolver;
public SolverThread() {}
@Override
public ResultStatus call() throws Exception {
MPSolver solver = makeProblem();
if (solver == null) {
statusSolver = MPSolver.ResultStatus.NOT_SOLVED;
} else {
statusSolver = solver.solve();
// Check that the problem has an optimal solution.
if (MPSolver.ResultStatus.OPTIMAL.equals(statusSolver)) {
throw new RuntimeException("Non OPTIMAL status after solve.");
}
}
return statusSolver;
}
public MPSolver.ResultStatus getStatusSolver() {
return statusSolver;
}
}
}
| 3,747
| 30.233333
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/NQueens.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class NQueens {
/** Solves the N Queens problem. See http://www.hakank.org/google_or_tools/nqueens2.py */
private static void solve(int n, int num, int print) {
Solver solver = new Solver("NQueens");
System.out.println("n: " + n);
//
// variables
//
IntVar[] q = solver.makeIntVarArray(n, 0, n - 1, "q");
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(q));
IntVar b = solver.makeIntVar(1, 1, "b");
IntVar[] q1 = new IntVar[n];
IntVar[] q2 = new IntVar[n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < i; j++) {
// // q[i]+i != q[j]+j
solver.addConstraint(
solver.makeNonEquality(solver.makeSum(q[i], i).var(), solver.makeSum(q[j], j).var()));
// q[i]-i != q[j]-j
solver.addConstraint(
solver.makeNonEquality(solver.makeSum(q[i], -i).var(), solver.makeSum(q[j], -j).var()));
}
}
//
// Solve
//
DecisionBuilder db =
solver.makePhase(q, solver.CHOOSE_MIN_SIZE_LOWEST_MAX, solver.ASSIGN_CENTER_VALUE);
solver.newSearch(db);
int c = 0;
while (solver.nextSolution()) {
if (print != 0) {
for (int i = 0; i < n; i++) {
System.out.print(q[i].value() + " ");
}
System.out.println();
}
c++;
if (num > 0 && c >= num) {
break;
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 8;
int num = 0;
int print = 1;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
if (args.length > 1) {
num = Integer.parseInt(args[1]);
}
if (args.length > 2) {
print = Integer.parseInt(args[2]);
}
NQueens.solve(n, num, print);
}
}
| 3,022
| 27.790476
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/NQueens2.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class NQueens2 {
/** Solves the N Queens problem. See http://www.hakank.org/google_or_tools/nqueens2.py */
private static void solve(int n, int num, int print) {
Solver solver = new Solver("NQueens");
System.out.println("n: " + n);
//
// variables
//
IntVar[] q = solver.makeIntVarArray(n, 0, n - 1, "q");
//
// constraints
//
solver.addConstraint(solver.makeAllDifferent(q));
IntVar[] q1 = new IntVar[n];
IntVar[] q2 = new IntVar[n];
for (int i = 0; i < n; i++) {
q1[i] = solver.makeSum(q[i], i).var();
q2[i] = solver.makeSum(q[i], -i).var();
}
solver.addConstraint(solver.makeAllDifferent(q1));
solver.addConstraint(solver.makeAllDifferent(q2));
//
// Solve
//
DecisionBuilder db =
solver.makePhase(q, solver.CHOOSE_MIN_SIZE_LOWEST_MAX, solver.ASSIGN_CENTER_VALUE);
solver.newSearch(db);
int c = 0;
while (solver.nextSolution()) {
if (print != 0) {
for (int i = 0; i < n; i++) {
System.out.print(q[i].value() + " ");
}
System.out.println();
}
c++;
if (num > 0 && c >= num) {
break;
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 8;
int num = 0;
int print = 1;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
if (args.length > 1) {
num = Integer.parseInt(args[1]);
}
if (args.length > 2) {
print = Integer.parseInt(args[2]);
}
NQueens2.solve(n, num, print);
}
}
| 2,816
| 27.454545
| 91
|
java
|
or-tools
|
or-tools-master/examples/contrib/Partition.java
|
/**
* Copyright (c) 1999-2011, Ecole des Mines de Nantes All rights reserved. Redistribution and use in
* source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* <p>* Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer. * Redistributions in binary form must reproduce the
* above copyright notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution. * Neither the name of the Ecole des Mines
* de Nantes nor the names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* <p>THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
/**
* Partition n numbers into two groups, so that - the sum of the first group equals the sum of the
* second, - and the sum of the squares of the first group equals the sum of the squares of the
* second <br>
*
* @author Charles Prud'homme
* @since 18/03/11
*/
public class Partition {
/** Partition Problem. */
private static void solve(int m) {
Solver solver = new Solver("Partition " + m);
IntVar[] x, y;
x = solver.makeIntVarArray(m, 1, 2 * m, "x");
y = solver.makeIntVarArray(m, 1, 2 * m, "y");
// break symmetries
for (int i = 0; i < m - 1; i++) {
solver.addConstraint(solver.makeLess(x[i], x[i + 1]));
solver.addConstraint(solver.makeLess(y[i], y[i + 1]));
}
solver.addConstraint(solver.makeLess(x[0], y[0]));
IntVar[] xy = new IntVar[2 * m];
for (int i = m - 1; i >= 0; i--) {
xy[i] = x[i];
xy[m + i] = y[i];
}
solver.addConstraint(solver.makeAllDifferent(xy));
int[] coeffs = new int[2 * m];
for (int i = m - 1; i >= 0; i--) {
coeffs[i] = 1;
coeffs[m + i] = -1;
}
solver.addConstraint(solver.makeScalProdEquality(xy, coeffs, 0));
IntVar[] sxy, sx, sy;
sxy = new IntVar[2 * m];
sx = new IntVar[m];
sy = new IntVar[m];
for (int i = m - 1; i >= 0; i--) {
sx[i] = solver.makeSquare(x[i]).var();
sxy[i] = sx[i];
sy[i] = solver.makeSquare(y[i]).var();
sxy[m + i] = sy[i];
}
solver.addConstraint(solver.makeScalProdEquality(sxy, coeffs, 0));
solver.addConstraint(solver.makeSumEquality(x, 2 * m * (2 * m + 1) / 4));
solver.addConstraint(solver.makeSumEquality(y, 2 * m * (2 * m + 1) / 4));
solver.addConstraint(solver.makeSumEquality(sx, 2 * m * (2 * m + 1) * (4 * m + 1) / 12));
solver.addConstraint(solver.makeSumEquality(sy, 2 * m * (2 * m + 1) * (4 * m + 1) / 12));
DecisionBuilder db = solver.makeDefaultPhase(xy);
SolutionCollector collector = solver.makeFirstSolutionCollector();
collector.add(xy);
SearchMonitor log = solver.makeSearchLog(10000);
solver.newSearch(db, log, collector);
solver.nextSolution();
System.out.println("Solution solution");
for (int i = 0; i < m; ++i) {
System.out.print("[" + collector.value(0, xy[i]) + "] ");
}
System.out.printf("\n");
for (int i = 0; i < m; ++i) {
System.out.print("[" + collector.value(0, xy[m + i]) + "] ");
}
System.out.println();
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Partition.solve(32);
}
}
| 4,248
| 39.466667
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/QuasigroupCompletion.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class QuasigroupCompletion {
static int X = 0;
/*
* default problem
*
* Example from Ruben Martins and Ines Lynce
* Breaking Local Symmetries in Quasigroup Completion Problems, page 3
* The solution is unique:
*
* 1 3 2 5 4
* 2 5 4 1 3
* 4 1 3 2 5
* 5 4 1 3 2
* 3 2 5 4 1
*/
static int default_n = 5;
static int[][] default_problem = {
{1, X, X, X, 4}, {X, 5, X, X, X}, {4, X, X, 2, X}, {X, 4, X, X, X}, {X, X, 5, X, 1}};
// for the actual problem
static int n;
static int[][] problem;
/**
* Solves the Quasigroup Completion problem. See
* http://www.hakank.org/google_or_tools/quasigroup_completion.py
*/
private static void solve() {
Solver solver = new Solver("QuasigroupCompletion");
//
// data
//
System.out.println("Problem:");
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(problem[i][j] + " ");
}
System.out.println();
}
System.out.println();
//
// Variables
//
IntVar[][] x = new IntVar[n][n];
IntVar[] x_flat = new IntVar[n * n]; // for branching
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
x[i][j] = solver.makeIntVar(1, n, "x[" + i + "," + j + "]");
x_flat[i * n + j] = x[i][j];
}
}
//
// Constraints
//
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (problem[i][j] > X) {
solver.addConstraint(solver.makeEquality(x[i][j], problem[i][j]));
}
}
}
//
// rows and columns must be different
//
// rows
for (int i = 0; i < n; i++) {
IntVar[] row = new IntVar[n];
for (int j = 0; j < n; j++) {
row[j] = x[i][j];
}
solver.addConstraint(solver.makeAllDifferent(row));
}
// columns
for (int j = 0; j < n; j++) {
IntVar[] col = new IntVar[n];
for (int i = 0; i < n; i++) {
col[i] = x[i][j];
}
solver.addConstraint(solver.makeAllDifferent(col));
}
//
// Search
//
DecisionBuilder db = solver.makePhase(x_flat, solver.INT_VAR_SIMPLE, solver.ASSIGN_MIN_VALUE);
solver.newSearch(db);
int sol = 0;
while (solver.nextSolution()) {
sol++;
System.out.println("Solution #" + sol + ":");
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(x[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
/**
* Reads a Quasigroup completion file. File format: # a comment which is ignored % a comment which
* also is ignored number of rows (n) < row number of space separated entries >
*
* <p>"." or "0" means unknown, integer 1..n means known value
*
* <p>Example 5 1 . . . 4 . 5 . . . 4 . . 2 . . 4 . . . . . 5 . 1
*/
private static void readFile(String file) {
System.out.println("readFile(" + file + ")");
int lineCount = 0;
try {
BufferedReader inr = new BufferedReader(new FileReader(file));
String str;
while ((str = inr.readLine()) != null && str.length() > 0) {
str = str.trim();
// ignore comments
if (str.startsWith("#") || str.startsWith("%")) {
continue;
}
System.out.println(str);
if (lineCount == 0) {
n = Integer.parseInt(str); // number of rows
problem = new int[n][n];
} else {
// the problem matrix
String row[] = str.split(" ");
for (int i = 0; i < n; i++) {
String s = row[i];
if (s.equals(".")) {
problem[lineCount - 1][i] = 0;
} else {
problem[lineCount - 1][i] = Integer.parseInt(s);
}
}
}
lineCount++;
} // end while
inr.close();
} catch (IOException e) {
System.out.println(e);
}
} // end readFile
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
if (args.length > 0) {
String file = "";
file = args[0];
QuasigroupCompletion.readFile(file);
} else {
problem = default_problem;
n = default_n;
}
QuasigroupCompletion.solve();
}
}
| 5,574
| 26.195122
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/SendMoreMoney.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SendMoreMoney {
/** Solves the SEND+MORE=MONEY problem. */
private static void solve() {
int base = 10;
Solver solver = new Solver("SendMoreMoney");
IntVar s = solver.makeIntVar(0, base - 1, "s");
IntVar e = solver.makeIntVar(0, base - 1, "e");
IntVar n = solver.makeIntVar(0, base - 1, "n");
IntVar d = solver.makeIntVar(0, base - 1, "d");
IntVar m = solver.makeIntVar(0, base - 1, "m");
IntVar o = solver.makeIntVar(0, base - 1, "o");
IntVar r = solver.makeIntVar(0, base - 1, "r");
IntVar y = solver.makeIntVar(0, base - 1, "y");
IntVar[] x = {s, e, n, d, m, o, r, y};
IntVar[] eq = {s, e, n, d, m, o, r, e, m, o, n, e, y};
int[] coeffs = {
1000, 100, 10,
1, // S E N D +
1000, 100, 10,
1, // M O R E
-10000, -1000, -100, -10,
-1 // == M O N E Y
};
solver.addConstraint(solver.makeScalProdEquality(eq, coeffs, 0));
// alternative:
solver.addConstraint(solver.makeScalProdEquality(
new IntVar[] {s, e, n, d, m, o, r, e, m, o, n, e, y}, coeffs, 0));
// s > 0
solver.addConstraint(solver.makeGreater(s, 0));
// m > 0
solver.addConstraint(solver.makeGreater(m, 0));
solver.addConstraint(solver.makeAllDifferent(x));
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
while (solver.nextSolution()) {
for (int i = 0; i < 8; i++) {
System.out.print(x[i].toString() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
SendMoreMoney.solve();
}
}
| 2,919
| 33.352941
| 95
|
java
|
or-tools
|
or-tools-master/examples/contrib/SendMoreMoney2.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import java.io.*;
import java.text.*;
import java.util.*;
public class SendMoreMoney2 {
static Solver sol;
// Some helper methods
static IntExpr p(IntExpr a, int b, IntExpr c) {
return sol.makeSum(sol.makeProd(a, b), c);
}
static IntExpr p(IntVar a, int b) {
return sol.makeProd(a, b);
}
// a slighly more intelligent scalar product
static IntExpr sp(IntVar[] a) {
int len = a.length;
int c = 1;
int[] t = new int[len];
for (int i = len - 1; i >= 0; i--) {
t[i] = c;
c *= 10;
}
return sol.makeScalProd(a, t);
}
/** Solves the SEND+MORE=MONEY problem with different approaches. */
private static void solve(int alt) {
sol = new Solver("SendMoreMoney");
int base = 10;
//
// variables
//
IntVar s = sol.makeIntVar(0, base - 1, "s");
IntVar e = sol.makeIntVar(0, base - 1, "e");
IntVar n = sol.makeIntVar(0, base - 1, "n");
IntVar d = sol.makeIntVar(0, base - 1, "d");
IntVar m = sol.makeIntVar(0, base - 1, "m");
IntVar o = sol.makeIntVar(0, base - 1, "o");
IntVar r = sol.makeIntVar(0, base - 1, "r");
IntVar y = sol.makeIntVar(0, base - 1, "y");
IntVar[] x = {s, e, n, d, m, o, r, y};
//
// Constraints
//
/*
*
* Below are some alternatives encodings of the
* same idea:
*
* 1000*s + 100*e + 10*n + d +
* 1000*m + 100*o + 10*r + e ==
* 10000*m + 1000*o + 100*n + 10*e + y
*
*/
if (alt == 0) {
//
// First, a version approach which is just too noisy.
//
sol.addConstraint(sol.makeEquality(
sol.makeSum(sol.makeSum(sol.makeProd(s, 1000),
sol.makeSum(sol.makeProd(e, 100),
sol.makeSum(sol.makeProd(n, 10), sol.makeProd(d, 1)))),
sol.makeSum(sol.makeProd(m, 1000),
sol.makeSum(sol.makeProd(o, 100),
sol.makeSum(sol.makeProd(r, 10), sol.makeProd(e, 1)))))
.var(),
sol.makeSum(sol.makeProd(m, 10000),
sol.makeSum(sol.makeProd(o, 1000),
sol.makeSum(sol.makeProd(n, 100),
sol.makeSum(sol.makeProd(e, 10), sol.makeProd(y, 1)))))
.var()));
} else if (alt == 1) {
//
// Alternative 1, using the helper methods
//
// p(IntExpr, int, IntExpr) and
// p(IntVar, int)
//
sol.addConstraint(sol.makeEquality(sol.makeSum(p(s, 1000, p(e, 100, p(n, 10, p(d, 1)))),
p(m, 1000, p(o, 100, p(r, 10, p(e, 1)))))
.var(),
p(m, 10000, p(o, 1000, p(n, 100, p(e, 10, p(y, 1))))).var()));
} else if (alt == 2) {
//
// Alternative 2
//
sol.addConstraint(sol.makeEquality(
sol.makeSum(sol.makeScalProd(new IntVar[] {s, e, n, d}, new int[] {1000, 100, 10, 1}),
sol.makeScalProd(new IntVar[] {m, o, r, e}, new int[] {1000, 100, 10, 1}))
.var(),
sol.makeScalProd(new IntVar[] {m, o, n, e, y}, new int[] {10000, 1000, 100, 10, 1})
.var()));
} else if (alt == 3) {
//
// alternative 3: same approach as 2, with some helper methods
//
sol.addConstraint(sol.makeEquality(
sol.makeSum(sp(new IntVar[] {s, e, n, d}), sp(new IntVar[] {m, o, r, e})).var(),
sp(new IntVar[] {m, o, n, e, y}).var()));
} else if (alt == 4) {
//
// Alternative 4, using explicit variables
//
IntExpr send = sol.makeScalProd(new IntVar[] {s, e, n, d}, new int[] {1000, 100, 10, 1});
IntExpr more = sol.makeScalProd(new IntVar[] {m, o, r, e}, new int[] {1000, 100, 10, 1});
IntExpr money =
sol.makeScalProd(new IntVar[] {m, o, n, e, y}, new int[] {10000, 1000, 100, 10, 1});
sol.addConstraint(sol.makeEquality(sol.makeSum(send, more).var(), money.var()));
}
// s > 0
sol.addConstraint(sol.makeGreater(s, 0));
// m > 0
sol.addConstraint(sol.makeGreater(m, 0));
sol.addConstraint(sol.makeAllDifferent(x));
//
// Search
//
DecisionBuilder db = sol.makePhase(x, sol.INT_VAR_DEFAULT, sol.INT_VALUE_DEFAULT);
sol.newSearch(db);
while (sol.nextSolution()) {
for (int i = 0; i < 8; i++) {
System.out.print(x[i].toString() + " ");
}
System.out.println();
}
sol.endSearch();
//
// Statistics
//
System.out.println();
System.out.println("Solutions: " + sol.solutions());
System.out.println("Failures: " + sol.failures());
System.out.println("Branches: " + sol.branches());
System.out.println("Wall time: " + sol.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
for (int i = 0; i < 5; i++) {
System.out.println("\nalternative #" + i);
SendMoreMoney2.solve(i);
}
}
}
| 5,758
| 31.536723
| 96
|
java
|
or-tools
|
or-tools-master/examples/contrib/SendMostMoney.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.*;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SendMostMoney {
/**
* Solves the SEND+MOST=MONEY problem, where we maximize MONEY. See
* http://www.hakank.org/google_or_tools/send_more_money.py
*/
private static long solve(long MONEY) {
Solver solver = new Solver("SendMostMoney");
//
// data
//
final int base = 10;
//
// variables
//
IntVar s = solver.makeIntVar(0, base - 1, "s");
IntVar e = solver.makeIntVar(0, base - 1, "e");
IntVar n = solver.makeIntVar(0, base - 1, "n");
IntVar d = solver.makeIntVar(0, base - 1, "d");
IntVar m = solver.makeIntVar(0, base - 1, "m");
IntVar o = solver.makeIntVar(0, base - 1, "o");
IntVar t = solver.makeIntVar(0, base - 1, "t");
IntVar y = solver.makeIntVar(0, base - 1, "y");
IntVar[] x = {s, e, n, d, m, o, t, y};
IntVar[] eq = {s, e, n, d, m, o, s, t, m, o, n, e, y};
int[] coeffs = {
1000, 100, 10,
1, // S E N D +
1000, 100, 10,
1, // M O S T
-10000, -1000, -100, -10,
-1 // == M O N E Y
};
solver.addConstraint(solver.makeScalProdEquality(eq, coeffs, 0));
IntVar money =
solver.makeScalProd(new IntVar[] {m, o, n, e, y}, new int[] {10000, 1000, 100, 10, 1})
.var();
//
// constraints
//
// s > 0
solver.addConstraint(solver.makeGreater(s, 0));
// m > 0
solver.addConstraint(solver.makeGreater(m, 0));
solver.addConstraint(solver.makeAllDifferent(x));
if (MONEY > 0) {
// Search for all solutions.
solver.addConstraint(solver.makeEquality(money, MONEY));
}
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MAX_VALUE);
if (MONEY == 0) {
// first round: get the optimal value
OptimizeVar obj = solver.makeMaximize(money, 1);
solver.newSearch(db, obj);
} else {
// search for all solutions
solver.newSearch(db);
}
long money_ret = 0;
while (solver.nextSolution()) {
System.out.println("money: " + money.value());
money_ret = money.value();
for (int i = 0; i < x.length; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
return money_ret;
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
System.out.println("Get the max value of money:");
long this_money = SendMostMoney.solve(0);
System.out.println("\nThen find all solutions with this value:");
long tmp = SendMostMoney.solve(this_money);
}
}
| 3,855
| 29.848
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/Seseman.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Seseman {
/** Solves the Seseman convent problem. See http://www.hakank.org/google_or_tools/seseman.py */
private static void solve(int n) {
Solver solver = new Solver("Seseman");
//
// data
//
final int border_sum = n * n;
//
// variables
//
IntVar[][] x = new IntVar[n][n];
IntVar[] x_flat = new IntVar[n * n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
x[i][j] = solver.makeIntVar(0, n * n);
x_flat[i * n + j] = x[i][j];
}
}
IntVar total_sum = solver.makeSum(x_flat).var();
//
// constraints
//
// zero in all middle cells
for (int i = 1; i < n - 1; i++) {
for (int j = 1; j < n - 1; j++) {
solver.addConstraint(solver.makeEquality(x[i][j], 0));
}
}
// all borders must be >= 1
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (i == 0 || j == 0 || i == n - 1 || j == n - 1) {
solver.addConstraint(solver.makeGreaterOrEqual(x[i][j], 1));
}
}
}
// sum the four borders
IntVar[] border1 = new IntVar[n];
IntVar[] border2 = new IntVar[n];
IntVar[] border3 = new IntVar[n];
IntVar[] border4 = new IntVar[n];
for (int i = 0; i < n; i++) {
border1[i] = x[i][0];
border2[i] = x[i][n - 1];
border3[i] = x[0][i];
border4[i] = x[n - 1][i];
}
solver.addConstraint(solver.makeSumEquality(border1, border_sum));
solver.addConstraint(solver.makeSumEquality(border2, border_sum));
solver.addConstraint(solver.makeSumEquality(border3, border_sum));
solver.addConstraint(solver.makeSumEquality(border4, border_sum));
//
// search
//
DecisionBuilder db = solver.makePhase(x_flat, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
while (solver.nextSolution()) {
System.out.println("total_sum: " + total_sum.value());
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(x[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int n = 3;
if (args.length > 0) {
n = Integer.parseInt(args[0]);
}
Seseman.solve(n);
}
}
| 3,589
| 28.42623
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/SetCovering.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SetCovering {
/** Solves a set covering problem. See http://www.hakank.org/google_or_tools/set_covering.py */
private static void solve() {
Solver solver = new Solver("SetCovering");
//
// data
//
// Placing of firestations, from Winston 'Operations Research',
// page 486.
int min_distance = 15;
int num_cities = 6;
int[][] distance = {{0, 10, 20, 30, 30, 20}, {10, 0, 25, 35, 20, 10}, {20, 25, 0, 15, 30, 20},
{30, 35, 15, 0, 15, 25}, {30, 20, 30, 15, 0, 14}, {20, 10, 20, 25, 14, 0}};
//
// variables
//
IntVar[] x = solver.makeIntVarArray(num_cities, 0, 1, "x");
IntVar z = solver.makeSum(x).var();
//
// constraints
//
// ensure that all cities are covered
for (int i = 0; i < num_cities; i++) {
ArrayList<IntVar> b = new ArrayList<IntVar>();
for (int j = 0; j < num_cities; j++) {
if (distance[i][j] <= min_distance) {
b.add(x[j]);
}
}
solver.addConstraint(solver.makeSumGreaterOrEqual(b.toArray(new IntVar[1]), 1));
}
//
// objective
//
OptimizeVar objective = solver.makeMinimize(z, 1);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("z: " + z.value());
System.out.print("x: ");
for (int i = 0; i < num_cities; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
SetCovering.solve();
}
}
| 3,007
| 29.693878
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/SetCovering2.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SetCovering2 {
/** Solves a set covering problem. See http://www.hakank.org/google_or_tools/set_covering2.py */
private static void solve() {
Solver solver = new Solver("SetCovering2");
//
// data
//
// Example 9.1-2 from
// Taha "Operations Research - An Introduction",
// page 354ff.
// Minimize the number of security telephones in street
// corners on a campus.
int n = 8; // maximum number of corners
int num_streets = 11; // number of connected streets
// corners of each street
// Note: 1-based (handled below)
int[][] corner = {
{1, 2}, {2, 3}, {4, 5}, {7, 8}, {6, 7}, {2, 6}, {1, 6}, {4, 7}, {2, 4}, {5, 8}, {3, 5}};
//
// variables
//
IntVar[] x = solver.makeIntVarArray(n, 0, 1, "x");
// number of telephones, to be minimize
IntVar z = solver.makeSum(x).var();
//
// constraints
//
// ensure that all cities are covered
for (int i = 0; i < num_streets; i++) {
IntVar[] b = new IntVar[2];
b[0] = x[corner[i][0] - 1];
b[1] = x[corner[i][1] - 1];
solver.addConstraint(solver.makeSumGreaterOrEqual(b, 1));
}
//
// objective
//
OptimizeVar objective = solver.makeMinimize(z, 1);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("z: " + z.value());
System.out.print("x: ");
for (int i = 0; i < n; i++) {
System.out.print(x[i].value() + " ");
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
SetCovering2.solve();
}
}
| 3,090
| 28.721154
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/SetCovering3.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SetCovering3 {
/** Solves a set covering problem. See http://www.hakank.org/google_or_tools/set_covering3.py */
private static void solve() {
Solver solver = new Solver("SetCovering3");
//
// data
//
// Set covering problem from
// Katta G. Murty: 'Optimization Models for Decision Making',
// page 302f
// http://ioe.engin.umich.edu/people/fac/books/murty/opti_model/junior-7.pdf
int num_groups = 6;
int num_senators = 10;
// which group does a senator belong to?
int[][] belongs = {{1, 1, 1, 1, 1, 0, 0, 0, 0, 0}, // 1 southern
{0, 0, 0, 0, 0, 1, 1, 1, 1, 1}, // 2 northern
{0, 1, 1, 0, 0, 0, 0, 1, 1, 1}, // 3 liberals
{1, 0, 0, 0, 1, 1, 1, 0, 0, 0}, // 4 conservative
{0, 0, 1, 1, 1, 1, 1, 0, 1, 0}, // 5 democrats
{1, 1, 0, 0, 0, 0, 0, 1, 0, 1}}; // 6 republicans
//
// variables
//
IntVar[] x = solver.makeIntVarArray(num_senators, 0, 1, "x");
// number of assigned senators, to be minimize
IntVar z = solver.makeSum(x).var();
//
// constraints
//
// ensure that each group is covered by at least
// one senator
for (int i = 0; i < num_groups; i++) {
IntVar[] b = new IntVar[num_senators];
for (int j = 0; j < num_senators; j++) {
b[j] = solver.makeProd(x[j], belongs[i][j]).var();
}
solver.addConstraint(solver.makeSumGreaterOrEqual(b, 1));
}
//
// objective
//
OptimizeVar objective = solver.makeMinimize(z, 1);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("z: " + z.value());
System.out.print("x: ");
for (int j = 0; j < num_senators; j++) {
System.out.print(x[j].value() + " ");
}
System.out.println();
// More details
for (int j = 0; j < num_senators; j++) {
if (x[j].value() == 1) {
System.out.print("Senator " + (1 + j) + " belongs to these groups: ");
for (int i = 0; i < num_groups; i++) {
if (belongs[i][j] == 1) {
System.out.print((1 + i) + " ");
}
}
System.out.println();
}
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
SetCovering3.solve();
}
}
| 3,765
| 30.383333
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/SetCovering4.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SetCovering4 {
/** Solves a set covering problem. See http://www.hakank.org/google_or_tools/set_covering4.py */
private static void solve(int set_partition) {
Solver solver = new Solver("SetCovering4");
//
// data
//
// Set partition and set covering problem from
// Example from the Swedish book
// Lundgren, Roennqvist, Vaebrand
// 'Optimeringslaera' (translation: 'Optimization theory'),
// page 408.
int num_alternatives = 10;
int num_objects = 8;
// costs for the alternatives
int[] costs = {19, 16, 18, 13, 15, 19, 15, 17, 16, 15};
// the alternatives, and their objects
int[][] a = {// 1 2 3 4 5 6 7 8 the objects
{1, 0, 0, 0, 0, 1, 0, 0}, // alternative 1
{0, 1, 0, 0, 0, 1, 0, 1}, // alternative 2
{1, 0, 0, 1, 0, 0, 1, 0}, // alternative 3
{0, 1, 1, 0, 1, 0, 0, 0}, // alternative 4
{0, 1, 0, 0, 1, 0, 0, 0}, // alternative 5
{0, 1, 1, 0, 0, 0, 0, 0}, // alternative 6
{0, 1, 1, 1, 0, 0, 0, 0}, // alternative 7
{0, 0, 0, 1, 1, 0, 0, 1}, // alternative 8
{0, 0, 1, 0, 0, 1, 0, 1}, // alternative 9
{1, 0, 0, 0, 0, 1, 1, 0}}; // alternative 10
//
// variables
//
IntVar[] x = solver.makeIntVarArray(num_alternatives, 0, 1, "x");
// number of assigned senators, to be minimize
IntVar z = solver.makeScalProd(x, costs).var();
//
// constraints
//
for (int j = 0; j < num_objects; j++) {
IntVar[] b = new IntVar[num_alternatives];
for (int i = 0; i < num_alternatives; i++) {
b[i] = solver.makeProd(x[i], a[i][j]).var();
}
if (set_partition == 1) {
solver.addConstraint(solver.makeSumGreaterOrEqual(b, 1));
} else {
solver.addConstraint(solver.makeSumEquality(b, 1));
}
}
//
// objective
//
OptimizeVar objective = solver.makeMinimize(z, 1);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("z: " + z.value());
System.out.print("Selected alternatives: ");
for (int i = 0; i < num_alternatives; i++) {
if (x[i].value() == 1) {
System.out.print((1 + i) + " ");
}
}
System.out.println("\n");
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
System.out.println("Set partition:");
SetCovering4.solve(1);
System.out.println("\nSet covering:");
SetCovering4.solve(0);
}
}
| 3,957
| 30.919355
| 98
|
java
|
or-tools
|
or-tools-master/examples/contrib/SetCoveringDeployment.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.OptimizeVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class SetCoveringDeployment {
/**
* Solves a set covering deployment problem. See
* http://www.hakank.org/google_or_tools/set_covering_deployment.py
*/
private static void solve() {
Solver solver = new Solver("SetCoveringDeployment");
//
// data
//
// From http://mathworld.wolfram.com/SetCoveringDeployment.html
String[] countries = {
"Alexandria", "Asia Minor", "Britain", "Byzantium", "Gaul", "Iberia", "Rome", "Tunis"};
int n = countries.length;
// the incidence matrix (neighbours)
int[][] mat = {{0, 1, 0, 1, 0, 0, 1, 1}, {1, 0, 0, 1, 0, 0, 0, 0}, {0, 0, 0, 0, 1, 1, 0, 0},
{1, 1, 0, 0, 0, 0, 1, 0}, {0, 0, 1, 0, 0, 1, 1, 0}, {0, 0, 1, 0, 1, 0, 1, 1},
{1, 0, 0, 1, 1, 1, 0, 1}, {1, 0, 0, 0, 0, 1, 1, 0}};
//
// variables
//
// First army
IntVar[] x = solver.makeIntVarArray(n, 0, 1, "x");
// Second (reserve) army
IntVar[] y = solver.makeIntVarArray(n, 0, 1, "y");
// total number of armies
IntVar num_armies = solver.makeSum(solver.makeSum(x), solver.makeSum(y)).var();
//
// constraints
//
//
// Constraint 1: There is always an army in a city
// (+ maybe a backup)
// Or rather: Is there a backup, there
// must be an an army
//
for (int i = 0; i < n; i++) {
solver.addConstraint(solver.makeGreaterOrEqual(x[i], y[i]));
}
//
// Constraint 2: There should always be an backup
// army near every city
//
for (int i = 0; i < n; i++) {
ArrayList<IntVar> count_neighbours = new ArrayList<IntVar>();
for (int j = 0; j < n; j++) {
if (mat[i][j] == 1) {
count_neighbours.add(y[j]);
}
}
solver.addConstraint(solver.makeGreaterOrEqual(
solver.makeSum(x[i], solver.makeSum(count_neighbours.toArray(new IntVar[1])).var()), 1));
}
//
// objective
//
OptimizeVar objective = solver.makeMinimize(num_armies, 1);
//
// search
//
DecisionBuilder db = solver.makePhase(x, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db, objective);
//
// output
//
while (solver.nextSolution()) {
System.out.println("num_armies: " + num_armies.value());
for (int i = 0; i < n; i++) {
if (x[i].value() == 1) {
System.out.print("Army: " + countries[i] + " ");
}
if (y[i].value() == 1) {
System.out.println("Reserve army: " + countries[i]);
}
}
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
SetCoveringDeployment.solve();
}
}
| 4,001
| 30.023256
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/SimpleRoutingTest.java
|
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.Assignment;
import com.google.ortools.constraintsolver.FirstSolutionStrategy;
import com.google.ortools.constraintsolver.RoutingIndexManager;
import com.google.ortools.constraintsolver.RoutingModel;
import com.google.ortools.constraintsolver.RoutingSearchParameters;
import com.google.ortools.constraintsolver.main;
import java.util.ArrayList;
import java.util.function.LongBinaryOperator;
public class SimpleRoutingTest {
// Static Add Library
private ArrayList<Integer> globalRes;
private long globalResCost;
private int[][] costMatrix;
public ArrayList<Integer> getGlobalRes() {
return globalRes;
}
public void setGlobalRes(ArrayList<Integer> globalRes) {
this.globalRes = globalRes;
}
public long getGlobalResCost() {
return globalResCost;
}
public void setGlobalResCost(int globalResCost) {
this.globalResCost = globalResCost;
}
public int[][] getCostMatrix() {
return costMatrix;
}
public void setCostMatrix(int[][] costMatrix) {
this.costMatrix = costMatrix;
}
public SimpleRoutingTest(int[][] costMatrix) {
super();
this.costMatrix = costMatrix;
globalRes = new ArrayList();
}
// Node Distance Evaluation
public static class NodeDistance implements LongBinaryOperator {
private int[][] costMatrix;
private RoutingIndexManager indexManager;
public NodeDistance(RoutingIndexManager manager, int[][] costMatrix) {
this.costMatrix = costMatrix;
this.indexManager = manager;
}
@Override
public long applyAsLong(long firstIndex, long secondIndex) {
final int firstNode = indexManager.indexToNode(firstIndex);
final int secondNode = indexManager.indexToNode(secondIndex);
return costMatrix[firstNode][secondNode];
}
}
// Solve Method
public void solve() {
RoutingIndexManager manager = new RoutingIndexManager(costMatrix.length, 1, 0);
RoutingModel routing = new RoutingModel(manager);
RoutingSearchParameters parameters =
RoutingSearchParameters.newBuilder()
.mergeFrom(main.defaultRoutingSearchParameters())
.setFirstSolutionStrategy(FirstSolutionStrategy.Value.PATH_CHEAPEST_ARC)
.build();
NodeDistance distances = new NodeDistance(manager, costMatrix);
routing.setArcCostEvaluatorOfAllVehicles(routing.registerTransitCallback(distances));
Assignment solution = routing.solve();
if (solution != null) {
int route_number = 0;
for (long node = routing.start(route_number); !routing.isEnd(node);
node = solution.value(routing.nextVar(node))) {
globalRes.add((int) node);
}
}
globalResCost = solution.objectiveValue();
System.out.println("cost = " + globalResCost);
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
int[][] values = new int[4][4];
values[0][0] = 0;
values[0][1] = 5;
values[0][2] = 3;
values[0][3] = 6;
values[1][0] = 5;
values[1][1] = 0;
values[1][2] = 8;
values[1][3] = 1;
values[2][0] = 3;
values[2][1] = 8;
values[2][2] = 0;
values[2][3] = 4;
values[3][0] = 6;
values[3][1] = 1;
values[3][2] = 4;
values[3][3] = 0;
SimpleRoutingTest model = new SimpleRoutingTest(values);
model.solve();
}
}
| 3,427
| 29.070175
| 89
|
java
|
or-tools
|
or-tools-master/examples/contrib/StableMarriage.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class StableMarriage {
/**
* Solves some stable marriage problems. See
* http://www.hakank.org/google_or_tools/stable_marriage.py
*/
private static void solve(long[][][] ranks, String problem_name) {
Solver solver = new Solver("StableMarriage");
//
// data
//
System.out.println("\n#####################");
System.out.println("Problem: " + problem_name);
long[][] rankWomen = ranks[0];
long[][] rankMen = ranks[1];
int n = rankWomen.length;
//
// variables
//
IntVar[] wife = solver.makeIntVarArray(n, 0, n - 1, "wife");
IntVar[] husband = solver.makeIntVarArray(n, 0, n - 1, "husband");
//
// constraints
// (the comments are the Comet code)
// forall(m in Men)
// cp.post(husband[wife[m]] == m);
for (int m = 0; m < n; m++) {
solver.addConstraint(solver.makeEquality(solver.makeElement(husband, wife[m]), m));
}
// forall(w in Women)
// cp.post(wife[husband[w]] == w);
for (int w = 0; w < n; w++) {
solver.addConstraint(solver.makeEquality(solver.makeElement(wife, husband[w]), w));
}
// forall(m in Men, o in Women)
// cp.post(rankMen[m,o] < rankMen[m, wife[m]] =>
// rankWomen[o,husband[o]] < rankWomen[o,m]);
for (int m = 0; m < n; m++) {
for (int o = 0; o < n; o++) {
IntVar b1 = solver.makeIsGreaterCstVar(
solver.makeElement(rankMen[m], wife[m]).var(), rankMen[m][o]);
IntVar b2 = solver.makeIsLessCstVar(
solver.makeElement(rankWomen[o], husband[o]).var(), rankWomen[o][m]);
solver.addConstraint(solver.makeLessOrEqual(solver.makeDifference(b1, b2), 0));
}
}
// forall(w in Women, o in Men)
// cp.post(rankWomen[w,o] < rankWomen[w,husband[w]] =>
// rankMen[o,wife[o]] < rankMen[o,w]);
for (int w = 0; w < n; w++) {
for (int o = 0; o < n; o++) {
IntVar b1 = solver.makeIsGreaterCstVar(
solver.makeElement(rankWomen[w], husband[w]).var(), rankWomen[w][o]);
IntVar b2 =
solver.makeIsLessCstVar(solver.makeElement(rankMen[o], wife[o]).var(), rankMen[o][w]);
solver.addConstraint(solver.makeLessOrEqual(solver.makeDifference(b1, b2), 0));
}
}
//
// search
//
DecisionBuilder db = solver.makePhase(wife, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
System.out.print("wife : ");
for (int i = 0; i < n; i++) {
System.out.print(wife[i].value() + " ");
}
System.out.print("\nhusband: ");
for (int i = 0; i < n; i++) {
System.out.print(husband[i].value() + " ");
}
System.out.println("\n");
}
solver.endSearch();
// Statistics
// System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
//
// From Pascal Van Hentenryck's OPL book
//
long[][][] van_hentenryck = {// rankWomen
{{1, 2, 4, 3, 5}, {3, 5, 1, 2, 4}, {5, 4, 2, 1, 3}, {1, 3, 5, 4, 2}, {4, 2, 3, 5, 1}},
// rankMen
{{5, 1, 2, 4, 3}, {4, 1, 3, 2, 5}, {5, 3, 2, 4, 1}, {1, 5, 4, 3, 2}, {4, 3, 2, 1, 5}}};
//
// Data from MathWorld
// http://mathworld.wolfram.com/StableMarriageProblem.html
//
long[][][] mathworld = {// rankWomen
{{3, 1, 5, 2, 8, 7, 6, 9, 4}, {9, 4, 8, 1, 7, 6, 3, 2, 5}, {3, 1, 8, 9, 5, 4, 2, 6, 7},
{8, 7, 5, 3, 2, 6, 4, 9, 1}, {6, 9, 2, 5, 1, 4, 7, 3, 8}, {2, 4, 5, 1, 6, 8, 3, 9, 7},
{9, 3, 8, 2, 7, 5, 4, 6, 1}, {6, 3, 2, 1, 8, 4, 5, 9, 7}, {8, 2, 6, 4, 9, 1, 3, 7, 5}},
// rankMen
{{7, 3, 8, 9, 6, 4, 2, 1, 5}, {5, 4, 8, 3, 1, 2, 6, 7, 9}, {4, 8, 3, 9, 7, 5, 6, 1, 2},
{9, 7, 4, 2, 5, 8, 3, 1, 6}, {2, 6, 4, 9, 8, 7, 5, 1, 3}, {2, 7, 8, 6, 5, 3, 4, 1, 9},
{1, 6, 2, 3, 8, 5, 4, 9, 7}, {5, 6, 9, 1, 2, 8, 4, 3, 7}, {6, 1, 4, 7, 5, 8, 3, 9, 2}}};
//
// Data from
// http://www.csee.wvu.edu/~ksmani/courses/fa01/random/lecnotes/lecture5.pdf
//
long[][][] problem3 = {// rankWomen
{{1, 2, 3, 4}, {4, 3, 2, 1}, {1, 2, 3, 4}, {3, 4, 1, 2}},
// rankMen"
{{1, 2, 3, 4}, {2, 1, 3, 4}, {1, 4, 3, 2}, {4, 3, 1, 2}}};
//
// Data from
// http://www.comp.rgu.ac.uk/staff/ha/ZCSP/additional_problems/stable_marriage/stable_marriage.pdf
// page 4
//
long[][][] problem4 = {// rankWomen
{{1, 5, 4, 6, 2, 3}, {4, 1, 5, 2, 6, 3}, {6, 4, 2, 1, 5, 3}, {1, 5, 2, 4, 3, 6},
{4, 2, 1, 5, 6, 3}, {2, 6, 3, 5, 1, 4}},
// rankMen
{{1, 4, 2, 5, 6, 3}, {3, 4, 6, 1, 5, 2}, {1, 6, 4, 2, 3, 5}, {6, 5, 3, 4, 2, 1},
{3, 1, 2, 4, 5, 6}, {2, 3, 1, 6, 5, 4}}};
StableMarriage.solve(van_hentenryck, "Van Hentenryck");
StableMarriage.solve(mathworld, "MathWorld");
StableMarriage.solve(problem3, "Problem 3");
StableMarriage.solve(problem4, "Problem 4");
}
}
| 6,232
| 34.214689
| 102
|
java
|
or-tools
|
or-tools-master/examples/contrib/StiglerMIP.java
|
/*
* Copyright 2017 Darian Sastre darian.sastre@minimaxlabs.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ************************************************************************
*
* This model was created by Hakan Kjellerstrand (hakank@gmail.com)
*
* Java version by Darian Sastre (darian.sastre@minimaxlabs.com)
*/
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.linearsolver.MPConstraint;
import com.google.ortools.linearsolver.MPObjective;
import com.google.ortools.linearsolver.MPSolver;
import com.google.ortools.linearsolver.MPVariable;
import java.math.RoundingMode;
import java.text.DecimalFormat;
public class StiglerMIP {
private static void solve(String solverType) {
System.out.println("---- StiglerMIP with " + solverType);
MPSolver solver = MPSolver.createSolver(solverType);
if (solver == null)
return;
double infinity = MPSolver.infinity();
/** invariants */
double days = 365.25;
int nutrientsCount = 9;
int commoditiesCount = 77;
String[] nutrients = {
"calories", // Calories, unit = 1000
"protein", // Protein, unit = grams
"calcium", // Calcium, unit = grams
"iron", // Iron, unit = milligrams
"vitaminA", // Vitamin A, unit = 1000 International Units
"thiamine", // Thiamine, Vit. B1, unit = milligrams
"riboflavin", // Riboflavin, Vit. B2, unit = milligrams
"niacin", // Niacin (Nicotinic Acid), unit = milligrams
"ascorbicAcid" // Ascorbic Acid, Vit. C, unit = milligrams
};
String[] commodities = {"Wheat Flour (Enriched), 10 lb.", "Macaroni, 1 lb.",
"Wheat Cereal (Enriched), 28 oz.", "Corn Flakes, 8 oz.", "Corn Meal, 1 lb.",
"Hominy Grits, 24 oz.", "Rice, 1 lb.", "Rolled Oats, 1 lb.",
"White Bread (Enriched), 1 lb.", "Whole Wheat Bread, 1 lb.", "Rye Bread, 1 lb.",
"Pound Cake, 1 lb.", "Soda Crackers, 1 lb.", "Milk, 1 qt.",
"Evaporated Milk (can), 14.5 oz.", "Butter, 1 lb.", "Oleomargarine, 1 lb.", "Eggs, 1 doz.",
"Cheese (Cheddar), 1 lb.", "Cream, 1/2 pt.", "Peanut Butter, 1 lb.", "Mayonnaise, 1/2 pt.",
"Crisco, 1 lb.", "Lard, 1 lb.", "Sirloin Steak, 1 lb.", "Round Steak, 1 lb.",
"Rib Roast, 1 lb.", "Chuck Roast, 1 lb.", "Plate, 1 lb.", "Liver (Beef), 1 lb.",
"Leg of Lamb, 1 lb.", "Lamb Chops (Rib), 1 lb.", "Pork Chops, 1 lb.",
"Pork Loin Roast, 1 lb.", "Bacon, 1 lb.", "Ham - smoked, 1 lb.", "Salt Pork, 1 lb.",
"Roasting Chicken, 1 lb.", "Veal Cutlets, 1 lb.", "Salmon, Pink (can), 16 oz.",
"Apples, 1 lb.", "Bananas, 1 lb.", "Lemons, 1 doz.", "Oranges, 1 doz.",
"Green Beans, 1 lb.", "Cabbage, 1 lb.", "Carrots, 1 bunch", "Celery, 1 stalk",
"Lettuce, 1 head", "Onions, 1 lb.", "Potatoes, 15 lb.", "Spinach, 1 lb.",
"Sweet Potatoes, 1 lb.", "Peaches (can), No. 2 1/2", "Pears (can), No. 2 1/2,",
"Pineapple (can), No. 2 1/2", "Asparagus (can), No. 2", "Grean Beans (can), No. 2",
"Pork and Beans (can), 16 oz.", "Corn (can), No. 2", "Peas (can), No. 2",
"Tomatoes (can), No. 2", "Tomato Soup (can), 10 1/2 oz.", "Peaches, Dried, 1 lb.",
"Prunes, Dried, 1 lb.", "Raisins, Dried, 15 oz.", "Peas, Dried, 1 lb.",
"Lima Beans, Dried, 1 lb.", "Navy Beans, Dried, 1 lb.", "Coffee, 1 lb.", "Tea, 1/4 lb.",
"Cocoa, 8 oz.", "Chocolate, 8 oz.", "Sugar, 10 lb.", "Corn Sirup, 24 oz.",
"Molasses, 18 oz.", "Strawberry Preserve, 1 lb."};
// price and weight per unit correspond to the two first columns
double[][] data = {{36.0, 12600.0, 44.7, 1411.0, 2.0, 365.0, 0.0, 55.4, 33.3, 441.0, 0.0},
{14.1, 3217.0, 11.6, 418.0, 0.7, 54.0, 0.0, 3.2, 1.9, 68.0, 0.0},
{24.2, 3280.0, 11.8, 377.0, 14.4, 175.0, 0.0, 14.4, 8.8, 114.0, 0.0},
{7.1, 3194.0, 11.4, 252.0, 0.1, 56.0, 0.0, 13.5, 2.3, 68.0, 0.0},
{4.6, 9861.0, 36.0, 897.0, 1.7, 99.0, 30.9, 17.4, 7.9, 106.0, 0.0},
{8.5, 8005.0, 28.6, 680.0, 0.8, 80.0, 0.0, 10.6, 1.6, 110.0, 0.0},
{7.5, 6048.0, 21.2, 460.0, 0.6, 41.0, 0.0, 2.0, 4.8, 60.0, 0.0},
{7.1, 6389.0, 25.3, 907.0, 5.1, 341.0, 0.0, 37.1, 8.9, 64.0, 0.0},
{7.9, 5742.0, 15.6, 488.0, 2.5, 115.0, 0.0, 13.8, 8.5, 126.0, 0.0},
{9.1, 4985.0, 12.2, 484.0, 2.7, 125.0, 0.0, 13.9, 6.4, 160.0, 0.0},
{9.2, 4930.0, 12.4, 439.0, 1.1, 82.0, 0.0, 9.9, 3.0, 66.0, 0.0},
{24.8, 1829.0, 8.0, 130.0, 0.4, 31.0, 18.9, 2.8, 3.0, 17.0, 0.0},
{15.1, 3004.0, 12.5, 288.0, 0.5, 50.0, 0.0, 0.0, 0.0, 0.0, 0.0},
{11.0, 8867.0, 6.1, 310.0, 10.5, 18.0, 16.8, 4.0, 16.0, 7.0, 177.0},
{6.7, 6035.0, 8.4, 422.0, 15.1, 9.0, 26.0, 3.0, 23.5, 11.0, 60.0},
{20.8, 1473.0, 10.8, 9.0, 0.2, 3.0, 44.2, 0.0, 0.2, 2.0, 0.0},
{16.1, 2817.0, 20.6, 17.0, 0.6, 6.0, 55.8, 0.2, 0.0, 0.0, 0.0},
{32.6, 1857.0, 2.9, 238.0, 1.0, 52.0, 18.6, 2.8, 6.5, 1.0, 0.0},
{24.2, 1874.0, 7.4, 448.0, 16.4, 19.0, 28.1, 0.8, 10.3, 4.0, 0.0},
{14.1, 1689.0, 3.5, 49.0, 1.7, 3.0, 16.9, 0.6, 2.5, 0.0, 17.0},
{17.9, 2534.0, 15.7, 661.0, 1.0, 48.0, 0.0, 9.6, 8.1, 471.0, 0.0},
{16.7, 1198.0, 8.6, 18.0, 0.2, 8.0, 2.7, 0.4, 0.5, 0.0, 0.0},
{20.3, 2234.0, 20.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0},
{9.8, 4628.0, 41.7, 0.0, 0.0, 0.0, 0.2, 0.0, 0.5, 5.0, 0.0},
{39.6, 1145.0, 2.9, 166.0, 0.1, 34.0, 0.2, 2.1, 2.9, 69.0, 0.0},
{36.4, 1246.0, 2.2, 214.0, 0.1, 32.0, 0.4, 2.5, 2.4, 87.0, 0.0},
{29.2, 1553.0, 3.4, 213.0, 0.1, 33.0, 0.0, 0.0, 2.0, 0.0, 0.0},
{22.6, 2007.0, 3.6, 309.0, 0.2, 46.0, 0.4, 1.0, 4.0, 120.0, 0.0},
{14.6, 3107.0, 8.5, 404.0, 0.2, 62.0, 0.0, 0.9, 0.0, 0.0, 0.0},
{26.8, 1692.0, 2.2, 333.0, 0.2, 139.0, 169.2, 6.4, 50.8, 316.0, 525.0},
{27.6, 1643.0, 3.1, 245.0, 0.1, 20.0, 0.0, 2.8, 3.0, 86.0, 0.0},
{36.6, 1239.0, 3.3, 140.0, 0.1, 15.0, 0.0, 1.7, 2.7, 54.0, 0.0},
{30.7, 1477.0, 3.5, 196.0, 0.2, 80.0, 0.0, 17.4, 2.7, 60.0, 0.0},
{24.2, 1874.0, 4.4, 249.0, 0.3, 37.0, 0.0, 18.2, 3.6, 79.0, 0.0},
{25.6, 1772.0, 10.4, 152.0, 0.2, 23.0, 0.0, 1.8, 1.8, 71.0, 0.0},
{27.4, 1655.0, 6.7, 212.0, 0.2, 31.0, 0.0, 9.9, 3.3, 50.0, 0.0},
{16.0, 2835.0, 18.8, 164.0, 0.1, 26.0, 0.0, 1.4, 1.8, 0.0, 0.0},
{30.3, 1497.0, 1.8, 184.0, 0.1, 30.0, 0.1, 0.9, 1.8, 68.0, 46.0},
{42.3, 1072.0, 1.7, 156.0, 0.1, 24.0, 0.0, 1.4, 2.4, 57.0, 0.0},
{13.0, 3489.0, 5.8, 705.0, 6.8, 45.0, 3.5, 1.0, 4.9, 209.0, 0.0},
{4.4, 9072.0, 5.8, 27.0, 0.5, 36.0, 7.3, 3.6, 2.7, 5.0, 544.0},
{6.1, 4982.0, 4.9, 60.0, 0.4, 30.0, 17.4, 2.5, 3.5, 28.0, 498.0},
{26.0, 2380.0, 1.0, 21.0, 0.5, 14.0, 0.0, 0.5, 0.0, 4.0, 952.0},
{30.9, 4439.0, 2.2, 40.0, 1.1, 18.0, 11.1, 3.6, 1.3, 10.0, 1993.0},
{7.1, 5750.0, 2.4, 138.0, 3.7, 80.0, 69.0, 4.3, 5.8, 37.0, 862.0},
{3.7, 8949.0, 2.6, 125.0, 4.0, 36.0, 7.2, 9.0, 4.5, 26.0, 5369.0},
{4.7, 6080.0, 2.7, 73.0, 2.8, 43.0, 188.5, 6.1, 4.3, 89.0, 608.0},
{7.3, 3915.0, 0.9, 51.0, 3.0, 23.0, 0.9, 1.4, 1.4, 9.0, 313.0},
{8.2, 2247.0, 0.4, 27.0, 1.1, 22.0, 112.4, 1.8, 3.4, 11.0, 449.0},
{3.6, 11844.0, 5.8, 166.0, 3.8, 59.0, 16.6, 4.7, 5.9, 21.0, 1184.0},
{34.0, 16810.0, 14.3, 336.0, 1.8, 118.0, 6.7, 29.4, 7.1, 198.0, 2522.0},
{8.1, 4592.0, 1.1, 106.0, 0.0, 138.0, 918.4, 5.7, 13.8, 33.0, 2755.0},
{5.1, 7649.0, 9.6, 138.0, 2.7, 54.0, 290.7, 8.4, 5.4, 83.0, 1912.0},
{16.8, 4894.0, 3.7, 20.0, 0.4, 10.0, 21.5, 0.5, 1.0, 31.0, 196.0},
{20.4, 4030.0, 3.0, 8.0, 0.3, 8.0, 0.8, 0.8, 0.8, 5.0, 81.0},
{21.3, 3993.0, 2.4, 16.0, 0.4, 8.0, 2.0, 2.8, 0.8, 7.0, 399.0},
{27.7, 1945.0, 0.4, 33.0, 0.3, 12.0, 16.3, 1.4, 2.1, 17.0, 272.0},
{10.0, 5386.0, 1.0, 54.0, 2.0, 65.0, 53.9, 1.6, 4.3, 32.0, 431.0},
{7.1, 6389.0, 7.5, 364.0, 4.0, 134.0, 3.5, 8.3, 7.7, 56.0, 0.0},
{10.4, 5452.0, 5.2, 136.0, 0.2, 16.0, 12.0, 1.6, 2.7, 42.0, 218.0},
{13.8, 4109.0, 2.3, 136.0, 0.6, 45.0, 34.9, 4.9, 2.5, 37.0, 370.0},
{8.6, 6263.0, 1.3, 63.0, 0.7, 38.0, 53.2, 3.4, 2.5, 36.0, 1253.0},
{7.6, 3917.0, 1.6, 71.0, 0.6, 43.0, 57.9, 3.5, 2.4, 67.0, 862.0},
{15.7, 2889.0, 8.5, 87.0, 1.7, 173.0, 86.8, 1.2, 4.3, 55.0, 57.0},
{9.0, 4284.0, 12.8, 99.0, 2.5, 154.0, 85.7, 3.9, 4.3, 65.0, 257.0},
{9.4, 4524.0, 13.5, 104.0, 2.5, 136.0, 4.5, 6.3, 1.4, 24.0, 136.0},
{7.9, 5742.0, 20.0, 1367.0, 4.2, 345.0, 2.9, 28.7, 18.4, 162.0, 0.0},
{8.9, 5097.0, 17.4, 1055.0, 3.7, 459.0, 5.1, 26.9, 38.2, 93.0, 0.0},
{5.9, 7688.0, 26.9, 1691.0, 11.4, 792.0, 0.0, 38.4, 24.6, 217.0, 0.0},
{22.4, 2025.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 5.1, 50.0, 0.0},
{17.4, 652.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.3, 42.0, 0.0},
{8.6, 2637.0, 8.7, 237.0, 3.0, 72.0, 0.0, 2.0, 11.9, 40.0, 0.0},
{16.2, 1400.0, 8.0, 77.0, 1.3, 39.0, 0.0, 0.9, 3.4, 14.0, 0.0},
{51.7, 8773.0, 34.9, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0},
{13.7, 4996.0, 14.7, 0.0, 0.5, 74.0, 0.0, 0.0, 0.0, 5.0, 0.0},
{13.6, 3752.0, 9.0, 0.0, 10.3, 244.0, 0.0, 1.9, 7.5, 146.0, 0.0},
{20.5, 2213.0, 6.4, 11.0, 0.4, 7.0, 0.2, 0.2, 0.4, 3.0, 0.0}};
// recommended daily nutritional allowance
double[] allowance = {3.0, 70.0, 0.8, 12.0, 5.0, 1.8, 2.7, 18.0, 75.0};
/** variables */
MPVariable[] x = solver.makeNumVarArray(commoditiesCount, 0, 1000);
MPVariable[] xCost = solver.makeNumVarArray(commoditiesCount, 0, 1000);
MPVariable[] quant = solver.makeNumVarArray(commoditiesCount, 0, 1000);
MPVariable totalCost = solver.makeNumVar(0, 1000, "total_cost");
/** constraints & objective */
MPObjective obj = solver.objective();
MPConstraint[] costConstraint = new MPConstraint[2 * commoditiesCount];
MPConstraint[] quantConstraint = new MPConstraint[2 * commoditiesCount];
MPConstraint totalCostConstraint = solver.makeConstraint(0, 0);
for (int i = 0; i < commoditiesCount; i++) {
totalCostConstraint.setCoefficient(x[i], days);
costConstraint[i] = solver.makeConstraint(0, 0);
costConstraint[i].setCoefficient(x[i], days);
costConstraint[i].setCoefficient(xCost[i], -1);
quantConstraint[i] = solver.makeConstraint(0, 0);
quantConstraint[i].setCoefficient(x[i], days * 100 / data[i][0]);
quantConstraint[i].setCoefficient(quant[i], -1);
obj.setCoefficient(x[i], 1);
}
totalCostConstraint.setCoefficient(totalCost, -1);
MPConstraint[] nutrientConstraint = new MPConstraint[nutrientsCount];
for (int i = 0; i < nutrientsCount; i++) {
nutrientConstraint[i] = solver.makeConstraint(allowance[i], infinity);
for (int j = 0; j < commoditiesCount; j++) {
nutrientConstraint[i].setCoefficient(x[j], data[j][i + 2]);
}
}
final MPSolver.ResultStatus resultStatus = solver.solve();
/** printing */
if (resultStatus != MPSolver.ResultStatus.OPTIMAL) {
System.err.println("The problem does not have an optimal solution!");
}
DecimalFormat df = new DecimalFormat("#.##");
df.setRoundingMode(RoundingMode.CEILING);
System.out.println("Min cost: " + df.format(obj.value()));
System.out.println("Total cost: " + df.format(totalCost.solutionValue()));
for (int i = 0; i < commoditiesCount; i++) {
if (x[i].solutionValue() > 0) {
System.out.println(commodities[i] + ": " + df.format(xCost[i].solutionValue()) + " "
+ df.format(quant[i].solutionValue()));
}
}
}
public static void main(String[] args) {
Loader.loadNativeLibraries();
solve("SCIP");
solve("CBC");
solve("GLPK");
}
}
| 12,315
| 52.547826
| 99
|
java
|
or-tools
|
or-tools-master/examples/contrib/Strimko2.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Strimko2 {
/** Solves a Strimko problem. See http://www.hakank.org/google_or_tools/strimko2.py */
private static void solve() {
Solver solver = new Solver("Strimko2");
//
// data
//
int[][] streams = {{1, 1, 2, 2, 2, 2, 2}, {1, 1, 2, 3, 3, 3, 2}, {1, 4, 1, 3, 3, 5, 5},
{4, 4, 3, 1, 3, 5, 5}, {4, 6, 6, 6, 7, 7, 5}, {6, 4, 6, 4, 5, 5, 7}, {6, 6, 4, 7, 7, 7, 7}};
// Note: This is 1-based
int[][] placed = {{2, 1, 1}, {2, 3, 7}, {2, 5, 6}, {2, 7, 4}, {3, 2, 7}, {3, 6, 1}, {4, 1, 4},
{4, 7, 5}, {5, 2, 2}, {5, 6, 6}};
int n = streams.length;
int num_placed = placed.length;
//
// variables
//
IntVar[][] x = new IntVar[n][n];
IntVar[] x_flat = new IntVar[n * n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
x[i][j] = solver.makeIntVar(1, n, "x[" + i + "," + j + "]");
x_flat[i * n + j] = x[i][j];
}
}
//
// constraints
//
// all rows and columns must be unique, i.e. a Latin Square
for (int i = 0; i < n; i++) {
IntVar[] row = new IntVar[n];
IntVar[] col = new IntVar[n];
for (int j = 0; j < n; j++) {
row[j] = x[i][j];
col[j] = x[j][i];
}
solver.addConstraint(solver.makeAllDifferent(row));
solver.addConstraint(solver.makeAllDifferent(col));
}
// streams
for (int s = 1; s <= n; s++) {
ArrayList<IntVar> tmp = new ArrayList<IntVar>();
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (streams[i][j] == s) {
tmp.add(x[i][j]);
}
}
}
solver.addConstraint(solver.makeAllDifferent(tmp.toArray(new IntVar[1])));
}
// placed
for (int i = 0; i < num_placed; i++) {
// note: also adjust to 0-based
solver.addConstraint(
solver.makeEquality(x[placed[i][0] - 1][placed[i][1] - 1], placed[i][2]));
}
//
// search
//
DecisionBuilder db = solver.makePhase(x_flat, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT);
solver.newSearch(db);
//
// output
//
while (solver.nextSolution()) {
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(x[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Strimko2.solve();
}
}
| 3,716
| 28.975806
| 100
|
java
|
or-tools
|
or-tools-master/examples/contrib/Sudoku.java
|
// Copyright 2011 Hakan Kjellerstrand hakank@gmail.com
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ortools.contrib;
import com.google.ortools.Loader;
import com.google.ortools.constraintsolver.DecisionBuilder;
import com.google.ortools.constraintsolver.IntVar;
import com.google.ortools.constraintsolver.Solver;
import java.io.*;
import java.text.*;
import java.util.*;
public class Sudoku {
/** Solves a Sudoku problem. */
private static void solve() {
Solver solver = new Solver("Sudoku");
int cell_size = 3;
int n = cell_size * cell_size;
// 0 marks an unknown value
int[][] initial_grid = new int[][] {{0, 6, 0, 0, 5, 0, 0, 2, 0}, {0, 0, 0, 3, 0, 0, 0, 9, 0},
{7, 0, 0, 6, 0, 0, 0, 1, 0}, {0, 0, 6, 0, 3, 0, 4, 0, 0}, {0, 0, 4, 0, 7, 0, 1, 0, 0},
{0, 0, 5, 0, 9, 0, 8, 0, 0}, {0, 4, 0, 0, 0, 1, 0, 0, 6}, {0, 3, 0, 0, 0, 8, 0, 0, 0},
{0, 2, 0, 0, 4, 0, 0, 5, 0}};
//
// variables
//
IntVar[][] grid = new IntVar[n][n];
IntVar[] grid_flat = new IntVar[n * n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
grid[i][j] = solver.makeIntVar(1, 9, "grid[" + i + "," + j + "]");
grid_flat[i * n + j] = grid[i][j];
}
}
//
// constraints
//
// init and rows
for (int i = 0; i < n; i++) {
IntVar[] row = new IntVar[n];
for (int j = 0; j < n; j++) {
if (initial_grid[i][j] > 0) {
solver.addConstraint(solver.makeEquality(grid[i][j], initial_grid[i][j]));
}
row[j] = grid[i][j];
}
solver.addConstraint(solver.makeAllDifferent(row));
}
// columns
for (int j = 0; j < n; j++) {
IntVar[] col = new IntVar[n];
for (int i = 0; i < n; i++) {
col[i] = grid[i][j];
}
solver.addConstraint(solver.makeAllDifferent(col));
}
// cells
for (int i = 0; i < cell_size; i++) {
for (int j = 0; j < cell_size; j++) {
IntVar[] cell = new IntVar[n];
for (int di = 0; di < cell_size; di++) {
for (int dj = 0; dj < cell_size; dj++) {
cell[di * cell_size + dj] = grid[i * cell_size + di][j * cell_size + dj];
}
}
solver.addConstraint(solver.makeAllDifferent(cell));
}
}
//
// Search
//
DecisionBuilder db =
solver.makePhase(grid_flat, solver.INT_VAR_SIMPLE, solver.INT_VALUE_SIMPLE);
solver.newSearch(db);
while (solver.nextSolution()) {
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
System.out.print(grid[i][j].value() + " ");
}
System.out.println();
}
System.out.println();
}
solver.endSearch();
// Statistics
System.out.println();
System.out.println("Solutions: " + solver.solutions());
System.out.println("Failures: " + solver.failures());
System.out.println("Branches: " + solver.branches());
System.out.println("Wall time: " + solver.wallTime() + "ms");
}
public static void main(String[] args) throws Exception {
Loader.loadNativeLibraries();
Sudoku.solve();
}
}
| 3,659
| 29.5
| 97
|
java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.