repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
soot
|
soot-master/src/main/java/soot/jimple/internal/JOrExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.OrExpr;
import soot.util.Switch;
public class JOrExpr extends AbstractJimpleIntLongBinopExpr implements OrExpr {
public JOrExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public String getSymbol() {
return " | ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseOrExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newOrInst(this.getOp1().getType());
}
@Override
public Object clone() {
return new JOrExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 1,571
| 25.2
| 93
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JRemExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.ValueBox;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.RemExpr;
import soot.util.Switch;
public class JRemExpr extends AbstractJimpleFloatBinopExpr implements RemExpr {
public JRemExpr(Value op1, Value op2) {
super(op1, op2);
}
public JRemExpr(ValueBox op1, ValueBox op2) {
super(op1, op2);
}
@Override
public String getSymbol() {
return " % ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseRemExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newRemInst(this.getOp1().getType());
}
@Override
public Object clone() {
return new JRemExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 1,672
| 24.738462
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JRetStmt.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import soot.UnitPrinter;
import soot.Value;
import soot.ValueBox;
import soot.jimple.Jimple;
import soot.jimple.RetStmt;
import soot.jimple.StmtSwitch;
import soot.util.Switch;
public class JRetStmt extends AbstractStmt implements RetStmt {
protected final ValueBox stmtAddressBox;
public JRetStmt(Value stmtAddress) {
this(Jimple.v().newLocalBox(stmtAddress));
}
protected JRetStmt(ValueBox stmtAddressBox) {
this.stmtAddressBox = stmtAddressBox;
}
@Override
public Object clone() {
return new JRetStmt(Jimple.cloneIfNecessary(getStmtAddress()));
}
@Override
public String toString() {
return Jimple.RET + " " + stmtAddressBox.getValue().toString();
}
@Override
public void toString(UnitPrinter up) {
up.literal(Jimple.RET + " ");
stmtAddressBox.toString(up);
}
@Override
public Value getStmtAddress() {
return stmtAddressBox.getValue();
}
@Override
public ValueBox getStmtAddressBox() {
return stmtAddressBox;
}
@Override
public void setStmtAddress(Value stmtAddress) {
stmtAddressBox.setValue(stmtAddress);
}
@Override
public List<ValueBox> getUseBoxes() {
List<ValueBox> useBoxes = new ArrayList<ValueBox>(stmtAddressBox.getValue().getUseBoxes());
useBoxes.add(stmtAddressBox);
return useBoxes;
}
@Override
public void apply(Switch sw) {
((StmtSwitch) sw).caseRetStmt(this);
}
@Override
public boolean fallsThrough() {
return true;
}
@Override
public boolean branches() {
return false;
}
}
| 2,418
| 22.715686
| 95
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JReturnStmt.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.List;
import soot.Unit;
import soot.UnitPrinter;
import soot.Value;
import soot.ValueBox;
import soot.baf.Baf;
import soot.jimple.ConvertToBaf;
import soot.jimple.Jimple;
import soot.jimple.JimpleToBafContext;
import soot.jimple.ReturnStmt;
import soot.jimple.StmtSwitch;
import soot.util.Switch;
public class JReturnStmt extends AbstractOpStmt implements ReturnStmt {
public JReturnStmt(Value returnValue) {
this(Jimple.v().newImmediateBox(returnValue));
}
protected JReturnStmt(ValueBox returnValueBox) {
super(returnValueBox);
}
@Override
public Object clone() {
return new JReturnStmt(Jimple.cloneIfNecessary(getOp()));
}
@Override
public String toString() {
return Jimple.RETURN + " " + opBox.getValue().toString();
}
@Override
public void toString(UnitPrinter up) {
up.literal(Jimple.RETURN + " ");
opBox.toString(up);
}
@Override
public void apply(Switch sw) {
((StmtSwitch) sw).caseReturnStmt(this);
}
@Override
public void convertToBaf(JimpleToBafContext context, List<Unit> out) {
((ConvertToBaf) (getOp())).convertToBaf(context, out);
Unit u = Baf.v().newReturnInst(getOp().getType());
u.addAllTagsOf(this);
out.add(u);
}
@Override
public boolean fallsThrough() {
return false;
}
@Override
public boolean branches() {
return false;
}
}
| 2,205
| 23.786517
| 72
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JReturnVoidStmt.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.List;
import soot.Unit;
import soot.UnitPrinter;
import soot.baf.Baf;
import soot.jimple.Jimple;
import soot.jimple.JimpleToBafContext;
import soot.jimple.ReturnVoidStmt;
import soot.jimple.StmtSwitch;
import soot.util.Switch;
public class JReturnVoidStmt extends AbstractStmt implements ReturnVoidStmt {
public JReturnVoidStmt() {
}
@Override
public Object clone() {
return new JReturnVoidStmt();
}
@Override
public String toString() {
return Jimple.RETURN;
}
@Override
public void toString(UnitPrinter up) {
up.literal(Jimple.RETURN);
}
@Override
public void apply(Switch sw) {
((StmtSwitch) sw).caseReturnVoidStmt(this);
}
@Override
public void convertToBaf(JimpleToBafContext context, List<Unit> out) {
Unit u = Baf.v().newReturnVoidInst();
u.addAllTagsOf(this);
out.add(u);
}
@Override
public boolean fallsThrough() {
return false;
}
@Override
public boolean branches() {
return false;
}
}
| 1,831
| 22.487179
| 77
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JShlExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.IntType;
import soot.LongType;
import soot.Type;
import soot.Unit;
import soot.UnknownType;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.ShlExpr;
import soot.util.Switch;
public class JShlExpr extends AbstractJimpleIntLongBinopExpr implements ShlExpr {
public JShlExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public String getSymbol() {
return " << ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseShlExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newShlInst(this.getOp1().getType());
}
@Override
public Type getType() {
if (isIntLikeType(op2Box.getValue().getType())) {
final Type t1 = op1Box.getValue().getType();
if (isIntLikeType(t1)) {
return IntType.v();
}
final LongType tyLong = LongType.v();
if (tyLong.equals(t1)) {
return tyLong;
}
}
return UnknownType.v();
}
@Override
public Object clone() {
return new JShlExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 2,002
| 24.679487
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JShrExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.IntType;
import soot.LongType;
import soot.Type;
import soot.Unit;
import soot.UnknownType;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.ShrExpr;
import soot.util.Switch;
public class JShrExpr extends AbstractJimpleIntLongBinopExpr implements ShrExpr {
public JShrExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public String getSymbol() {
return " >> ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseShrExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newShrInst(this.getOp1().getType());
}
@Override
public Type getType() {
if (isIntLikeType(op2Box.getValue().getType())) {
final Type t1 = op1Box.getValue().getType();
if (isIntLikeType(t1)) {
return IntType.v();
}
final LongType tyLong = LongType.v();
if (tyLong.equals(t1)) {
return tyLong;
}
}
return UnknownType.v();
}
@Override
public Object clone() {
return new JShrExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 2,002
| 24.679487
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JSpecialInvokeExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* Copyright (C) 2004 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import soot.Local;
import soot.SootMethodRef;
import soot.Value;
import soot.jimple.Jimple;
public class JSpecialInvokeExpr extends AbstractSpecialInvokeExpr {
public JSpecialInvokeExpr(Local base, SootMethodRef methodRef, List<? extends Value> args) {
super(Jimple.v().newLocalBox(base), methodRef, new ImmediateBox[args.size()]);
final Jimple jimp = Jimple.v();
for (ListIterator<? extends Value> it = args.listIterator(); it.hasNext();) {
Value v = it.next();
this.argBoxes[it.previousIndex()] = jimp.newImmediateBox(v);
}
}
@Override
public Object clone() {
final int count = getArgCount();
List<Value> clonedArgs = new ArrayList<Value>(count);
for (int i = 0; i < count; i++) {
clonedArgs.add(Jimple.cloneIfNecessary(getArg(i)));
}
return new JSpecialInvokeExpr((Local) getBase(), methodRef, clonedArgs);
}
}
| 1,828
| 31.087719
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JStaticInvokeExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* Copyright (C) 2004 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import soot.SootMethodRef;
import soot.Value;
import soot.ValueBox;
import soot.jimple.Jimple;
public class JStaticInvokeExpr extends AbstractStaticInvokeExpr {
public JStaticInvokeExpr(SootMethodRef methodRef, List<? extends Value> args) {
super(methodRef, new ValueBox[args.size()]);
final Jimple jimp = Jimple.v();
for (ListIterator<? extends Value> it = args.listIterator(); it.hasNext();) {
Value v = it.next();
this.argBoxes[it.previousIndex()] = jimp.newImmediateBox(v);
}
}
@Override
public Object clone() {
final int count = getArgCount();
List<Value> clonedArgs = new ArrayList<Value>(count);
for (int i = 0; i < count; i++) {
clonedArgs.add(Jimple.cloneIfNecessary(getArg(i)));
}
return new JStaticInvokeExpr(methodRef, clonedArgs);
}
}
| 1,762
| 29.929825
| 81
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JSubExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.SubExpr;
import soot.util.Switch;
public class JSubExpr extends AbstractJimpleFloatBinopExpr implements SubExpr {
public JSubExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public final String getSymbol() {
return " - ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseSubExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newSubInst(this.getOp1().getType());
}
@Override
public Object clone() {
return new JSubExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 1,582
| 25.383333
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JTableSwitchStmt.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import soot.Unit;
import soot.UnitBox;
import soot.UnitPrinter;
import soot.Value;
import soot.ValueBox;
import soot.baf.Baf;
import soot.baf.PlaceholderInst;
import soot.jimple.ConvertToBaf;
import soot.jimple.Jimple;
import soot.jimple.JimpleToBafContext;
import soot.jimple.StmtSwitch;
import soot.jimple.TableSwitchStmt;
import soot.util.Switch;
public class JTableSwitchStmt extends AbstractSwitchStmt implements TableSwitchStmt {
protected int lowIndex;
protected int highIndex;
public JTableSwitchStmt(Value key, int lowIndex, int highIndex, List<? extends Unit> targets, Unit defaultTarget) {
this(Jimple.v().newImmediateBox(key), lowIndex, highIndex, getTargetBoxesArray(targets, Jimple.v()::newStmtBox),
Jimple.v().newStmtBox(defaultTarget));
}
public JTableSwitchStmt(Value key, int lowIndex, int highIndex, List<? extends UnitBox> targets, UnitBox defaultTarget) {
this(Jimple.v().newImmediateBox(key), lowIndex, highIndex, targets.toArray(new UnitBox[targets.size()]), defaultTarget);
}
protected JTableSwitchStmt(ValueBox keyBox, int lowIndex, int highIndex, UnitBox[] targetBoxes, UnitBox defaultTargetBox) {
super(keyBox, defaultTargetBox, targetBoxes);
if (lowIndex > highIndex) {
throw new RuntimeException(
"Error creating tableswitch: lowIndex(" + lowIndex + ") can't be greater than highIndex(" + highIndex + ").");
}
this.lowIndex = lowIndex;
this.highIndex = highIndex;
}
@Override
public Object clone() {
return new JTableSwitchStmt(Jimple.cloneIfNecessary(getKey()), lowIndex, highIndex, getTargets(), getDefaultTarget());
}
@Override
public String toString() {
final char endOfLine = ' ';
StringBuilder buf = new StringBuilder(Jimple.TABLESWITCH + "(");
buf.append(keyBox.getValue().toString()).append(')').append(endOfLine);
buf.append('{').append(endOfLine);
// In this for-loop, we cannot use "<=" since 'i' would wrap around.
// The case for "i == highIndex" is handled separately after the loop.
final int low = lowIndex, high = highIndex;
for (int i = low; i < high; i++) {
buf.append(" " + Jimple.CASE + " ").append(i).append(": " + Jimple.GOTO + " ");
Unit target = getTarget(i - low);
buf.append(target == this ? "self" : target).append(';').append(endOfLine);
}
{
buf.append(" " + Jimple.CASE + " ").append(high).append(": " + Jimple.GOTO + " ");
Unit target = getTarget(high - low);
buf.append(target == this ? "self" : target).append(';').append(endOfLine);
}
{
Unit target = getDefaultTarget();
buf.append(" " + Jimple.DEFAULT + ": " + Jimple.GOTO + " ");
buf.append(target == this ? "self" : target).append(';').append(endOfLine);
}
buf.append('}');
return buf.toString();
}
@Override
public void toString(UnitPrinter up) {
up.literal(Jimple.TABLESWITCH);
up.literal("(");
keyBox.toString(up);
up.literal(")");
up.newline();
up.literal("{");
up.newline();
// In this for-loop, we cannot use "<=" since 'i' would wrap around.
// The case for "i == highIndex" is handled separately after the loop.
final int high = highIndex;
for (int i = lowIndex; i < high; i++) {
printCaseTarget(up, i);
}
printCaseTarget(up, high);
up.literal(" " + Jimple.DEFAULT + ": " + Jimple.GOTO + " ");
defaultTargetBox.toString(up);
up.literal(";");
up.newline();
up.literal("}");
}
private void printCaseTarget(UnitPrinter up, int targetIndex) {
up.literal(" " + Jimple.CASE + " ");
up.literal(Integer.toString(targetIndex));
up.literal(": " + Jimple.GOTO + " ");
targetBoxes[targetIndex - lowIndex].toString(up);
up.literal(";");
up.newline();
}
@Override
public void setLowIndex(int lowIndex) {
this.lowIndex = lowIndex;
}
@Override
public void setHighIndex(int highIndex) {
this.highIndex = highIndex;
}
@Override
public int getLowIndex() {
return lowIndex;
}
@Override
public int getHighIndex() {
return highIndex;
}
@Override
public void apply(Switch sw) {
((StmtSwitch) sw).caseTableSwitchStmt(this);
}
@Override
public void convertToBaf(JimpleToBafContext context, List<Unit> out) {
((ConvertToBaf) getKey()).convertToBaf(context, out);
final Baf vaf = Baf.v();
final List<Unit> targets = getTargets();
List<PlaceholderInst> targetPlaceholders = new ArrayList<PlaceholderInst>(targets.size());
for (Unit target : targets) {
targetPlaceholders.add(vaf.newPlaceholderInst(target));
}
Unit u = vaf.newTableSwitchInst(vaf.newPlaceholderInst(getDefaultTarget()), lowIndex, highIndex, targetPlaceholders);
u.addAllTagsOf(this);
out.add(u);
}
}
| 5,686
| 31.129944
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JThrowStmt.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.List;
import soot.Unit;
import soot.UnitPrinter;
import soot.Value;
import soot.ValueBox;
import soot.baf.Baf;
import soot.jimple.ConvertToBaf;
import soot.jimple.Jimple;
import soot.jimple.JimpleToBafContext;
import soot.jimple.StmtSwitch;
import soot.jimple.ThrowStmt;
import soot.util.Switch;
public class JThrowStmt extends AbstractOpStmt implements ThrowStmt {
public JThrowStmt(Value op) {
this(Jimple.v().newImmediateBox(op));
}
protected JThrowStmt(ValueBox opBox) {
super(opBox);
}
@Override
public Object clone() {
return new JThrowStmt(Jimple.cloneIfNecessary(getOp()));
}
@Override
public String toString() {
return Jimple.THROW + " " + opBox.getValue().toString();
}
@Override
public void toString(UnitPrinter up) {
up.literal(Jimple.THROW + " ");
opBox.toString(up);
}
@Override
public void apply(Switch sw) {
((StmtSwitch) sw).caseThrowStmt(this);
}
@Override
public void convertToBaf(JimpleToBafContext context, List<Unit> out) {
((ConvertToBaf) getOp()).convertToBaf(context, out);
Unit u = Baf.v().newThrowInst();
u.addAllTagsOf(this);
out.add(u);
}
@Override
public boolean fallsThrough() {
return false;
}
@Override
public boolean branches() {
return false;
}
}
| 2,140
| 23.05618
| 72
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JTrap.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AbstractTrap;
import soot.SootClass;
import soot.Unit;
import soot.UnitBox;
import soot.jimple.Jimple;
public class JTrap extends AbstractTrap {
public JTrap(SootClass exception, Unit beginStmt, Unit endStmt, Unit handlerStmt) {
super(exception, Jimple.v().newStmtBox(beginStmt), Jimple.v().newStmtBox(endStmt), Jimple.v().newStmtBox(handlerStmt));
}
public JTrap(SootClass exception, UnitBox beginStmt, UnitBox endStmt, UnitBox handlerStmt) {
super(exception, beginStmt, endStmt, handlerStmt);
}
@Override
public Object clone() {
return new JTrap(exception, getBeginUnit(), getEndUnit(), getHandlerUnit());
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder("Trap :");
buf.append("\nbegin : ").append(getBeginUnit());
buf.append("\nend : ").append(getEndUnit());
buf.append("\nhandler: ").append(getHandlerUnit());
return buf.toString();
}
}
| 1,769
| 31.181818
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JUshrExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.IntType;
import soot.LongType;
import soot.Type;
import soot.Unit;
import soot.UnknownType;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.UshrExpr;
import soot.util.Switch;
public class JUshrExpr extends AbstractJimpleIntLongBinopExpr implements UshrExpr {
public JUshrExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public final String getSymbol() {
return " >>> ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseUshrExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newUshrInst(this.getOp1().getType());
}
@Override
public Type getType() {
if (isIntLikeType(op2Box.getValue().getType())) {
final Type t1 = op1Box.getValue().getType();
if (isIntLikeType(t1)) {
return IntType.v();
}
final LongType tyLong = LongType.v();
if (tyLong.equals(t1)) {
return LongType.v();
}
}
return UnknownType.v();
}
@Override
public Object clone() {
return new JUshrExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 2,022
| 24.935897
| 95
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JVirtualInvokeExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* Copyright (C) 2004 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import soot.SootClass;
import soot.SootMethodRef;
import soot.Value;
import soot.ValueBox;
import soot.jimple.Jimple;
import soot.options.Options;
import soot.tagkit.SourceFileTag;
public class JVirtualInvokeExpr extends AbstractVirtualInvokeExpr {
public JVirtualInvokeExpr(Value base, SootMethodRef methodRef, List<? extends Value> args) {
super(Jimple.v().newLocalBox(base), methodRef, new ValueBox[args.size()]);
if (!Options.v().ignore_resolution_errors()) {
final SootClass sc = methodRef.declaringClass();
// Check that the method's class is resolved enough
sc.checkLevelIgnoreResolving(SootClass.HIERARCHY);
// now check if the class is valid
if (sc.isInterface()) {
SourceFileTag tag = (SourceFileTag) sc.getTag(SourceFileTag.NAME);
throw new RuntimeException("Trying to create virtual invoke expression for interface type (" + sc.getName()
+ " in file " + (tag != null ? tag.getAbsolutePath() : "unknown") + "). Use JInterfaceInvokeExpr instead!");
}
}
final Jimple jimp = Jimple.v();
for (ListIterator<? extends Value> it = args.listIterator(); it.hasNext();) {
Value v = it.next();
this.argBoxes[it.previousIndex()] = jimp.newImmediateBox(v);
}
}
@Override
public Object clone() {
final int count = getArgCount();
List<Value> clonedArgs = new ArrayList<Value>(count);
for (int i = 0; i < count; i++) {
clonedArgs.add(Jimple.cloneIfNecessary(getArg(i)));
}
return new JVirtualInvokeExpr(getBase(), methodRef, clonedArgs);
}
}
| 2,524
| 34.069444
| 120
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JXorExpr.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.baf.Baf;
import soot.jimple.ExprSwitch;
import soot.jimple.Jimple;
import soot.jimple.XorExpr;
import soot.util.Switch;
public class JXorExpr extends AbstractJimpleIntLongBinopExpr implements XorExpr {
public JXorExpr(Value op1, Value op2) {
super(op1, op2);
}
@Override
public final String getSymbol() {
return " ^ ";
}
@Override
public void apply(Switch sw) {
((ExprSwitch) sw).caseXorExpr(this);
}
@Override
protected Unit makeBafInst(Type opType) {
return Baf.v().newXorInst(this.getOp1().getType());
}
@Override
public Object clone() {
return new JXorExpr(Jimple.cloneIfNecessary(getOp1()), Jimple.cloneIfNecessary(getOp2()));
}
}
| 1,584
| 25.416667
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JimpleLocal.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Collections;
import java.util.List;
import soot.Local;
import soot.Scene;
import soot.Type;
import soot.Unit;
import soot.UnitPrinter;
import soot.ValueBox;
import soot.baf.Baf;
import soot.jimple.ConvertToBaf;
import soot.jimple.JimpleToBafContext;
import soot.jimple.JimpleValueSwitch;
import soot.util.Numberer;
import soot.util.Switch;
public class JimpleLocal implements Local, ConvertToBaf {
protected String name;
protected Type type;
private volatile int number = 0;
/** Constructs a JimpleLocal of the given name and type. */
public JimpleLocal(String name, Type type) {
setName(name);
setType(type);
addToNumberer();
}
protected void addToNumberer() {
Numberer<Local> numberer = Scene.v().getLocalNumberer();
if (numberer != null) {
numberer.add(this);
}
}
/** Returns true if the given object is structurally equal to this one. */
@Override
public boolean equivTo(Object o) {
return this.equals(o);
}
/**
* Returns a hash code for this object, consistent with structural equality.
*/
@Override
public int equivHashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
/** Returns a clone of the current JimpleLocal. */
@Override
public Object clone() {
// do not intern the name again
JimpleLocal local = new JimpleLocal(null, type);
local.name = name;
return local;
}
/** Returns the name of this object. */
@Override
public String getName() {
return name;
}
/** Sets the name of this object as given. */
@Override
public void setName(String name) {
this.name = (name == null) ? null : name.intern();
}
/** Returns the type of this local. */
@Override
public Type getType() {
return type;
}
/** Sets the type of this local. */
@Override
public void setType(Type t) {
this.type = t;
}
@Override
public String toString() {
return getName();
}
@Override
public void toString(UnitPrinter up) {
up.local(this);
}
@Override
public final List<ValueBox> getUseBoxes() {
return Collections.emptyList();
}
@Override
public void apply(Switch sw) {
((JimpleValueSwitch) sw).caseLocal(this);
}
@Override
public void convertToBaf(JimpleToBafContext context, List<Unit> out) {
Unit u = Baf.v().newLoadInst(getType(), context.getBafLocalOfJimpleLocal(this));
u.addAllTagsOf(context.getCurrentUnit());
out.add(u);
}
@Override
public final int getNumber() {
return number;
}
@Override
public void setNumber(int number) {
this.number = number;
}
@Override
public boolean isStackLocal() {
String n = getName();
return n != null && n.charAt(0) == '$';
}
}
| 3,709
| 22.935484
| 84
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/JimpleLocalBox.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AbstractValueBox;
import soot.Value;
public class JimpleLocalBox extends AbstractValueBox {
public JimpleLocalBox(Value value) {
setValue(value);
}
@Override
public boolean canContainValue(Value value) {
return value instanceof JimpleLocal;
}
}
| 1,105
| 27.358974
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/RValueBox.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AbstractValueBox;
import soot.Immediate;
import soot.Value;
import soot.jimple.ConcreteRef;
import soot.jimple.Expr;
public class RValueBox extends AbstractValueBox {
public RValueBox(Value value) {
setValue(value);
}
@Override
public boolean canContainValue(Value value) {
return value instanceof Immediate || value instanceof ConcreteRef || value instanceof Expr;
}
}
| 1,230
| 28.309524
| 95
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/StmtBox.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AbstractUnitBox;
import soot.Unit;
import soot.jimple.Stmt;
public class StmtBox extends AbstractUnitBox {
public StmtBox(Stmt s) {
setUnit(s);
}
@Override
public boolean canContainUnit(Unit u) {
return u == null || u instanceof Stmt;
}
}
| 1,099
| 26.5
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/internal/VariableBox.java
|
package soot.jimple.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1999 Patrick Lam
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AbstractValueBox;
import soot.Local;
import soot.Value;
import soot.jimple.ConcreteRef;
public class VariableBox extends AbstractValueBox {
public VariableBox(Value value) {
setValue(value);
}
@Override
public boolean canContainValue(Value value) {
return value instanceof Local || value instanceof ConcreteRef;
}
}
| 1,176
| 27.707317
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/paddle/IPaddleTransformer.java
|
package soot.jimple.paddle;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2005 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.SceneTransformer;
/**
* Soot interface to the PaddleTransformer.
*
* @author Ondrej Lhotak
*/
public abstract class IPaddleTransformer extends SceneTransformer {
/**
* This is called when Soot finishes executing all interprocedural phases. Paddle uses it to stop profiling if profiling is
* enabled.
*/
public abstract void finishPhases();
}
| 1,199
| 29.769231
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/paddle/PaddleField.java
|
package soot.jimple.paddle;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 - 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.util.Numberable;
/**
* Represents a field.
*
* @author Ondrej Lhotak
*/
public interface PaddleField extends Numberable {
}
| 976
| 27.735294
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/paddle/PaddleHook.java
|
package soot.jimple.paddle;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2004 - 2005 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Map;
import soot.G;
import soot.SceneTransformer;
import soot.Singletons;
/**
* Loads the Paddle plugin.
*
* @author Ondrej Lhotak
*/
public class PaddleHook extends SceneTransformer {
public PaddleHook(Singletons.Global g) {
}
public static PaddleHook v() {
return G.v().soot_jimple_paddle_PaddleHook();
}
private IPaddleTransformer paddleTransformer;
public IPaddleTransformer paddleTransformer() {
if (paddleTransformer == null) {
paddleTransformer = (IPaddleTransformer) instantiate("soot.jimple.paddle.PaddleTransformer");
}
return paddleTransformer;
}
protected void internalTransform(String phaseName, Map<String, String> options) {
paddleTransformer().transform(phaseName, options);
}
public Object instantiate(String className) {
Object ret;
try {
ret = Class.forName(className).newInstance();
} catch (ClassNotFoundException e) {
throw new RuntimeException("Could not find " + className + ". Did you include Paddle on your Java classpath?");
} catch (InstantiationException e) {
throw new RuntimeException("Could not instantiate " + className + ": " + e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Could not instantiate " + className + ": " + e);
}
return ret;
}
private Object paddleG;
public Object paddleG() {
if (paddleG == null) {
paddleG = instantiate("soot.PaddleG");
}
return paddleG;
}
/**
* This is called when Soot finishes executing all interprocedural phases. Paddle uses it to stop profiling if profiling is
* enabled.
*/
public void finishPhases() {
if (paddleTransformer != null) {
paddleTransformer().finishPhases();
}
}
}
| 2,591
| 27.8
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/BodyExtractorWalker.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.SootClass;
import soot.SootMethod;
import soot.SootResolver;
import soot.Type;
import soot.jimple.JimpleBody;
import soot.jimple.parser.node.AFieldMember;
import soot.jimple.parser.node.AFile;
import soot.jimple.parser.node.AFullMethodBody;
import soot.jimple.parser.node.AMethodMember;
import soot.jimple.parser.node.PModifier;
import soot.options.Options;
/**
* Walks a jimple AST and constructs the method bodies for all the methods of the SootClass associated with this walker (see
* constructor). note: Contrary to the plain "Walker", this walker does not create a SootClass, or interact with the scene.
* It merely adds method bodies for each of the methods of the SootClass it was initialized with.
*/
/* Modified By Marc Berndl May 17th */
public class BodyExtractorWalker extends Walker {
private static final Logger logger = LoggerFactory.getLogger(BodyExtractorWalker.class);
Map<SootMethod, JimpleBody> methodToParsedBodyMap;
/**
* Constructs a walker, and attaches it to the given SootClass, sending bodies to the given methodToParsedBodyMap.
*/
public BodyExtractorWalker(SootClass sc, SootResolver resolver, Map<SootMethod, JimpleBody> methodToParsedBodyMap) {
super(sc, resolver);
this.methodToParsedBodyMap = methodToParsedBodyMap;
}
/*
* file = modifier* file_type class_name extends_clause? implements_clause? file_body;
*/
public void caseAFile(AFile node) {
inAFile(node);
{
Object temp[] = node.getModifier().toArray();
for (Object element : temp) {
((PModifier) element).apply(this);
}
}
if (node.getFileType() != null) {
node.getFileType().apply(this);
}
if (node.getClassName() != null) {
node.getClassName().apply(this);
}
String className = (String) mProductions.removeLast();
if (!className.equals(mSootClass.getName())) {
throw new RuntimeException("expected: " + className + ", but got: " + mSootClass.getName());
}
if (node.getExtendsClause() != null) {
node.getExtendsClause().apply(this);
}
if (node.getImplementsClause() != null) {
node.getImplementsClause().apply(this);
}
if (node.getFileBody() != null) {
node.getFileBody().apply(this);
}
outAFile(node);
}
public void outAFile(AFile node) {
if (node.getImplementsClause() != null) {
mProductions.removeLast(); // implements_clause
}
if (node.getExtendsClause() != null) {
mProductions.removeLast(); // extends_clause
}
mProductions.removeLast(); // file_type
mProductions.addLast(mSootClass);
}
/*
* member = {field} modifier* type name semicolon | {method} modifier* type name l_paren parameter_list? r_paren
* throws_clause? method_body;
*/
public void outAFieldMember(AFieldMember node) {
mProductions.removeLast(); // name
mProductions.removeLast(); // type
}
public void outAMethodMember(AMethodMember node) {
Type type;
String name;
List<Type> parameterList = new ArrayList<Type>();
List throwsClause = null;
JimpleBody methodBody = null;
if (node.getMethodBody() instanceof AFullMethodBody) {
methodBody = (JimpleBody) mProductions.removeLast();
}
if (node.getThrowsClause() != null) {
throwsClause = (List) mProductions.removeLast();
}
if (node.getParameterList() != null) {
parameterList = (List) mProductions.removeLast();
}
name = (String) mProductions.removeLast(); // name
type = (Type) mProductions.removeLast(); // type
SootMethod sm = mSootClass.getMethodUnsafe(SootMethod.getSubSignature(name, parameterList, type));
if (sm != null) {
if (Options.v().verbose()) {
logger.debug("[Jimple parser] " + SootMethod.getSubSignature(name, parameterList, type));
}
} else {
logger.debug("[!!! Couldn't parse !!] " + SootMethod.getSubSignature(name, parameterList, type));
logger.debug("[!] Methods in class are:");
for (SootMethod next : mSootClass.getMethods()) {
logger.debug("" + next.getSubSignature());
}
}
if (sm.isConcrete() && methodBody != null) {
if (Options.v().verbose()) {
logger.debug("[Parsed] " + sm.getDeclaration());
}
methodBody.setMethod(sm);
methodToParsedBodyMap.put(sm, methodBody);
} else if (node.getMethodBody() instanceof AFullMethodBody) {
if (sm.isPhantom() && Options.v().verbose()) {
logger.debug("[jimple parser] phantom method!");
}
throw new RuntimeException("Impossible: !concrete => ! instanceof " + sm.getName());
}
}
}
| 5,596
| 31.352601
| 124
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/CstPoolExtractor.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import soot.Scene;
import soot.jimple.parser.analysis.DepthFirstAdapter;
import soot.jimple.parser.node.AFullIdentClassName;
import soot.jimple.parser.node.AFullIdentNonvoidType;
import soot.jimple.parser.node.AIdentClassName;
import soot.jimple.parser.node.AIdentNonvoidType;
import soot.jimple.parser.node.AQuotedClassName;
import soot.jimple.parser.node.AQuotedNonvoidType;
import soot.jimple.parser.node.Start;
import soot.util.StringTools;
/**
* Walks a jimple AST, extracting all the contained reference type names.
*/
class CstPoolExtractor {
private Set<String> mRefTypes = null;
private Start mParseTree;
public CstPoolExtractor(Start parseTree) {
mParseTree = parseTree;
}
public Set<String> getCstPool() {
if (mRefTypes == null) {
mRefTypes = new HashSet<String>();
CstPoolExtractorWalker walker = new CstPoolExtractorWalker();
mParseTree.apply(walker);
mParseTree = null; // allow garbage collection
}
return mRefTypes;
}
private class CstPoolExtractorWalker extends DepthFirstAdapter {
CstPoolExtractorWalker() {
}
public void inStart(Start node) {
defaultIn(node);
}
public void outAQuotedClassName(AQuotedClassName node) {
String tokenString = node.getQuotedName().getText();
tokenString = tokenString.substring(1, tokenString.length() - 1);
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
public void outAIdentClassName(AIdentClassName node) {
String tokenString = node.getIdentifier().getText();
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
public void outAFullIdentClassName(AFullIdentClassName node) {
String tokenString = node.getFullIdentifier().getText();
tokenString = Scene.v().unescapeName(tokenString);
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
public void outAQuotedNonvoidType(AQuotedNonvoidType node) {
String tokenString = node.getQuotedName().getText();
tokenString = tokenString.substring(1, tokenString.length() - 1);
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
public void outAFullIdentNonvoidType(AFullIdentNonvoidType node) {
String tokenString = node.getFullIdentifier().getText();
tokenString = Scene.v().unescapeName(tokenString);
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
public void outAIdentNonvoidType(AIdentNonvoidType node) {
String tokenString = node.getIdentifier().getText();
tokenString = StringTools.getUnEscapedStringOf(tokenString);
mRefTypes.add(tokenString);
}
}
}
| 3,714
| 30.483051
| 73
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/JimpleAST.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PushbackReader;
import java.util.HashMap;
import java.util.Set;
import soot.Body;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.SootResolver;
import soot.jimple.JimpleBody;
import soot.jimple.parser.lexer.Lexer;
import soot.jimple.parser.lexer.LexerException;
import soot.jimple.parser.node.Start;
import soot.jimple.parser.parser.Parser;
import soot.jimple.parser.parser.ParserException;
/**
* This class encapsulates a JimpleAST instance and provides methods to act on it.
*/
public class JimpleAST {
private Start mTree = null;
private HashMap<SootMethod, JimpleBody> methodToParsedBodyMap = null;
/**
* Constructs a JimpleAST and generates its parse tree from the given InputStream.
*
* @param aJIS
* The InputStream to parse.
*/
public JimpleAST(InputStream aJIS) throws ParserException, LexerException, IOException {
Parser p = new Parser(new Lexer(new PushbackReader(new BufferedReader(new InputStreamReader(aJIS)), 1024)));
mTree = p.parse();
}
/**
* Reads an entire class from jimple, creates the Soot objects & returns it.
*/
public SootClass createSootClass() {
Walker w = new Walker(SootResolver.v());
mTree.apply(w);
return w.getSootClass();
}
/**
* Applies a SkeletonExtractorWalker to the given SootClass, using the given Resolver to resolve the reference types it
* contains. The given SootClass instance will be filled to contain a class skeleton: that is no Body instances will be
* created for the class' methods.
*
* @param sc
* a SootClass to fill in.
*/
public void getSkeleton(SootClass sc) {
Walker w = new SkeletonExtractorWalker(SootResolver.v(), sc);
mTree.apply(w);
}
/**
* Returns a body corresponding to the parsed jimple for m. If necessary, applies the BodyExtractorWalker to initialize the
* bodies map.
*
* @param m
* the method we want to get a body for.
* @return the actual body for the given method.
*/
public Body getBody(SootMethod m) {
if (methodToParsedBodyMap == null) {
synchronized (this) {
if (methodToParsedBodyMap == null) {
stashBodiesForClass(m.getDeclaringClass());
}
}
}
return methodToParsedBodyMap.get(m);
}
/**
* Extracts the reference constant pool for this JimpleAST.
*
* @return the Set of RefTypes for the reference types contained this AST.
*/
public Set<String> getCstPool() {
CstPoolExtractor cpe = new CstPoolExtractor(mTree);
return cpe.getCstPool();
}
/** Returns the SootResolver currently in use. */
public SootResolver getResolver() {
return SootResolver.v();
}
/*
* Runs a Walker on the InputStream associated to this object. The SootClass which we want bodies for is passed as the
* argument.
*/
private void stashBodiesForClass(SootClass sc) {
HashMap<SootMethod, JimpleBody> methodToBodyMap = new HashMap<SootMethod, JimpleBody>();
Walker w = new BodyExtractorWalker(sc, SootResolver.v(), methodToBodyMap);
boolean oldPhantomValue = Scene.v().getPhantomRefs();
Scene.v().setPhantomRefs(true);
mTree.apply(w);
Scene.v().setPhantomRefs(oldPhantomValue);
methodToParsedBodyMap = methodToBodyMap;
}
} // Parse
| 4,268
| 30.160584
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/Parse.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PushbackReader;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.jimple.JimpleBody;
import soot.jimple.parser.lexer.Lexer;
import soot.jimple.parser.lexer.LexerException;
import soot.jimple.parser.node.Start;
import soot.jimple.parser.parser.Parser;
import soot.jimple.parser.parser.ParserException;
import soot.util.EscapedReader;
/** Provides a test-driver for the Jimple parser. */
@Deprecated
public class Parse {
private static final Logger logger = LoggerFactory.getLogger(Parse.class);
private static final String EXT = ".jimple";
private static final String USAGE = "usage: java Parse [options] " + "jimple_file [jimple_file ...]";
/*
* Parses a jimple input stream. If you just want to get the method bodies for a SootClass, pass as the second argument the
* SootClass you want fill it's method bodies. If you want to create a SootClass for the inputStream set the 2nd arg to
* null.
*/
static public SootClass parse(InputStream istream, SootClass sc) {
Start tree = null;
Parser p = new Parser(
new Lexer(new PushbackReader(new EscapedReader(new BufferedReader(new InputStreamReader(istream))), 1024)));
try {
tree = p.parse();
} catch (ParserException e) {
throw new RuntimeException("Parser exception occurred: " + e);
} catch (LexerException e) {
throw new RuntimeException("Lexer exception occurred: " + e);
} catch (IOException e) {
throw new RuntimeException("IOException occurred: " + e);
}
Walker w;
if (sc == null) {
w = new Walker(null);
} else {
w = new BodyExtractorWalker(sc, null, new HashMap<SootMethod, JimpleBody>());
}
tree.apply(w);
return w.getSootClass();
}
public static void main(String args[]) throws java.lang.Exception
{
boolean verbose = false;
InputStream inFile;
// check arguments
if (args.length < 1) {
logger.debug("" + USAGE);
System.exit(0);
}
Scene.v().setPhantomRefs(true);
for (String arg : args) {
if (arg.startsWith("-")) {
arg = arg.substring(1);
if (arg.equals("d")) {
} else if (arg.equals("v")) {
verbose = true;
}
} else {
try {
if (verbose) {
logger.debug(" ... looking for " + arg);
}
inFile = new FileInputStream(arg);
} catch (FileNotFoundException e) {
if (arg.endsWith(EXT)) {
logger.debug(" *** can't find " + arg);
continue;
}
arg = arg + EXT;
try {
if (verbose) {
logger.debug(" ... looking for " + arg);
}
inFile = new BufferedInputStream(new FileInputStream(arg));
} catch (FileNotFoundException ee) {
logger.debug(" *** can't find " + arg);
continue;
}
}
Parser p = new Parser(new Lexer(new PushbackReader(new InputStreamReader(inFile), 1024)));
Start tree = p.parse();
tree.apply(new Walker(null));
}
}
} // main
} // Parse
| 4,273
| 28.888112
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/SkeletonExtractorWalker.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import soot.Modifier;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.SootResolver;
import soot.Type;
import soot.jimple.parser.node.AFile;
import soot.jimple.parser.node.AMethodMember;
import soot.jimple.parser.node.AThrowsClause;
import soot.jimple.parser.node.PModifier;
/*
Walks a jimple AST and extracts the fields, and method signatures and produces
a new squeleton SootClass instance.
*/
public class SkeletonExtractorWalker extends Walker {
public SkeletonExtractorWalker(SootResolver aResolver, SootClass aSootClass) {
super(aSootClass, aResolver);
}
public SkeletonExtractorWalker(SootResolver aResolver) {
super(aResolver);
}
public void caseAFile(AFile node) {
inAFile(node);
{
Object temp[] = node.getModifier().toArray();
for (Object element : temp) {
((PModifier) element).apply(this);
}
}
if (node.getFileType() != null) {
node.getFileType().apply(this);
}
if (node.getClassName() != null) {
node.getClassName().apply(this);
}
String className = (String) mProductions.removeLast();
if (mSootClass == null) {
mSootClass = new SootClass(className);
mSootClass.setResolvingLevel(SootClass.SIGNATURES);
} else {
if (!className.equals(mSootClass.getName())) {
throw new RuntimeException("expected: " + className + ", but got: " + mSootClass.getName());
}
}
if (node.getExtendsClause() != null) {
node.getExtendsClause().apply(this);
}
if (node.getImplementsClause() != null) {
node.getImplementsClause().apply(this);
}
if (node.getFileBody() != null) {
node.getFileBody().apply(this);
}
outAFile(node);
}
public void outAFile(AFile node) {
List implementsList = null;
String superClass = null;
String classType = null;
if (node.getImplementsClause() != null) {
implementsList = (List) mProductions.removeLast();
}
if (node.getExtendsClause() != null) {
superClass = (String) mProductions.removeLast();
}
classType = (String) mProductions.removeLast();
int modifierFlags = processModifiers(node.getModifier());
if (classType.equals("interface")) {
modifierFlags |= Modifier.INTERFACE;
}
mSootClass.setModifiers(modifierFlags);
if (superClass != null) {
mSootClass.setSuperclass(mResolver.makeClassRef(superClass));
}
if (implementsList != null) {
Iterator implIt = implementsList.iterator();
while (implIt.hasNext()) {
SootClass interfaceClass = mResolver.makeClassRef((String) implIt.next());
mSootClass.addInterface(interfaceClass);
}
}
mProductions.addLast(mSootClass);
}
/*
* member = {field} modifier* type name semicolon | {method} modifier* type name l_paren parameter_list? r_paren
* throws_clause? method_body;
*/
public void caseAMethodMember(AMethodMember node) {
inAMethodMember(node);
{
Object temp[] = node.getModifier().toArray();
for (Object element : temp) {
((PModifier) element).apply(this);
}
}
if (node.getType() != null) {
node.getType().apply(this);
}
if (node.getName() != null) {
node.getName().apply(this);
}
if (node.getLParen() != null) {
node.getLParen().apply(this);
}
if (node.getParameterList() != null) {
node.getParameterList().apply(this);
}
if (node.getRParen() != null) {
node.getRParen().apply(this);
}
if (node.getThrowsClause() != null) {
node.getThrowsClause().apply(this);
}
/*
* if(node.getMethodBody() != null) { node.getMethodBody().apply(this); }
*/
outAMethodMember(node);
}
public void outAMethodMember(AMethodMember node) {
int modifier = 0;
Type type;
String name;
List parameterList = null;
List<SootClass> throwsClause = null;
if (node.getThrowsClause() != null) {
throwsClause = (List<SootClass>) mProductions.removeLast();
}
if (node.getParameterList() != null) {
parameterList = (List) mProductions.removeLast();
} else {
parameterList = new ArrayList();
}
Object o = mProductions.removeLast();
name = (String) o;
type = (Type) mProductions.removeLast();
modifier = processModifiers(node.getModifier());
SootMethod method;
if (throwsClause != null) {
method = Scene.v().makeSootMethod(name, parameterList, type, modifier, throwsClause);
} else {
method = Scene.v().makeSootMethod(name, parameterList, type, modifier);
}
mSootClass.addMethod(method);
}
/*
* throws_clause = throws class_name_list;
*/
public void outAThrowsClause(AThrowsClause node) {
List l = (List) mProductions.removeLast();
Iterator it = l.iterator();
List<SootClass> exceptionClasses = new ArrayList<SootClass>(l.size());
while (it.hasNext()) {
String className = (String) it.next();
exceptionClasses.add(mResolver.makeClassRef(className));
}
mProductions.addLast(exceptionClasses);
}
}
| 6,025
| 26.642202
| 114
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/parser/Walker.java
|
package soot.jimple.parser;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2000 Patrice Pominville
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.ArrayType;
import soot.BooleanType;
import soot.ByteType;
import soot.CharType;
import soot.DoubleType;
import soot.FloatType;
import soot.Immediate;
import soot.IntType;
import soot.Local;
import soot.LongType;
import soot.Modifier;
import soot.NullType;
import soot.RefType;
import soot.Scene;
import soot.ShortType;
import soot.SootClass;
import soot.SootField;
import soot.SootFieldRef;
import soot.SootMethod;
import soot.SootMethodRef;
import soot.SootResolver;
import soot.Trap;
import soot.Type;
import soot.Unit;
import soot.UnitBox;
import soot.UnknownType;
import soot.Value;
import soot.VoidType;
import soot.jimple.BinopExpr;
import soot.jimple.ClassConstant;
import soot.jimple.DoubleConstant;
import soot.jimple.Expr;
import soot.jimple.FloatConstant;
import soot.jimple.IntConstant;
import soot.jimple.Jimple;
import soot.jimple.JimpleBody;
import soot.jimple.LongConstant;
import soot.jimple.NullConstant;
import soot.jimple.Stmt;
import soot.jimple.StringConstant;
import soot.jimple.UnopExpr;
import soot.jimple.parser.analysis.DepthFirstAdapter;
import soot.jimple.parser.node.AAbstractModifier;
import soot.jimple.parser.node.AAndBinop;
import soot.jimple.parser.node.AAnnotationModifier;
import soot.jimple.parser.node.AArrayDescriptor;
import soot.jimple.parser.node.AArrayNewExpr;
import soot.jimple.parser.node.AArrayReference;
import soot.jimple.parser.node.AAssignStatement;
import soot.jimple.parser.node.ABaseNonvoidType;
import soot.jimple.parser.node.ABinopBoolExpr;
import soot.jimple.parser.node.ABinopExpr;
import soot.jimple.parser.node.ABooleanBaseType;
import soot.jimple.parser.node.ABooleanBaseTypeNoName;
import soot.jimple.parser.node.ABreakpointStatement;
import soot.jimple.parser.node.AByteBaseType;
import soot.jimple.parser.node.AByteBaseTypeNoName;
import soot.jimple.parser.node.ACaseStmt;
import soot.jimple.parser.node.ACastExpression;
import soot.jimple.parser.node.ACatchClause;
import soot.jimple.parser.node.ACharBaseType;
import soot.jimple.parser.node.ACharBaseTypeNoName;
import soot.jimple.parser.node.AClassFileType;
import soot.jimple.parser.node.AClassNameBaseType;
import soot.jimple.parser.node.AClassNameMultiClassNameList;
import soot.jimple.parser.node.AClassNameSingleClassNameList;
import soot.jimple.parser.node.AClzzConstant;
import soot.jimple.parser.node.ACmpBinop;
import soot.jimple.parser.node.ACmpeqBinop;
import soot.jimple.parser.node.ACmpgBinop;
import soot.jimple.parser.node.ACmpgeBinop;
import soot.jimple.parser.node.ACmpgtBinop;
import soot.jimple.parser.node.ACmplBinop;
import soot.jimple.parser.node.ACmpleBinop;
import soot.jimple.parser.node.ACmpltBinop;
import soot.jimple.parser.node.ACmpneBinop;
import soot.jimple.parser.node.AConstantCaseLabel;
import soot.jimple.parser.node.ADeclaration;
import soot.jimple.parser.node.ADivBinop;
import soot.jimple.parser.node.ADoubleBaseType;
import soot.jimple.parser.node.ADoubleBaseTypeNoName;
import soot.jimple.parser.node.ADynamicInvokeExpr;
import soot.jimple.parser.node.AEntermonitorStatement;
import soot.jimple.parser.node.AEnumModifier;
import soot.jimple.parser.node.AExitmonitorStatement;
import soot.jimple.parser.node.AFieldMember;
import soot.jimple.parser.node.AFieldSignature;
import soot.jimple.parser.node.AFile;
import soot.jimple.parser.node.AFinalModifier;
import soot.jimple.parser.node.AFloatBaseType;
import soot.jimple.parser.node.AFloatBaseTypeNoName;
import soot.jimple.parser.node.AFloatConstant;
import soot.jimple.parser.node.AFullIdentNonvoidType;
import soot.jimple.parser.node.AFullMethodBody;
import soot.jimple.parser.node.AGotoStatement;
import soot.jimple.parser.node.AIdentNonvoidType;
import soot.jimple.parser.node.AIdentityNoTypeStatement;
import soot.jimple.parser.node.AIdentityStatement;
import soot.jimple.parser.node.AIfStatement;
import soot.jimple.parser.node.AInstanceofExpression;
import soot.jimple.parser.node.AIntBaseType;
import soot.jimple.parser.node.AIntBaseTypeNoName;
import soot.jimple.parser.node.AIntegerConstant;
import soot.jimple.parser.node.AInterfaceFileType;
import soot.jimple.parser.node.AInterfaceNonstaticInvoke;
import soot.jimple.parser.node.AInvokeStatement;
import soot.jimple.parser.node.ALabelStatement;
import soot.jimple.parser.node.ALengthofUnop;
import soot.jimple.parser.node.ALocalFieldRef;
import soot.jimple.parser.node.ALocalImmediate;
import soot.jimple.parser.node.ALocalVariable;
import soot.jimple.parser.node.ALongBaseType;
import soot.jimple.parser.node.ALongBaseTypeNoName;
import soot.jimple.parser.node.ALookupswitchStatement;
import soot.jimple.parser.node.AMethodMember;
import soot.jimple.parser.node.AMethodSignature;
import soot.jimple.parser.node.AMinusBinop;
import soot.jimple.parser.node.AModBinop;
import soot.jimple.parser.node.AMultBinop;
import soot.jimple.parser.node.AMultiArgList;
import soot.jimple.parser.node.AMultiLocalNameList;
import soot.jimple.parser.node.AMultiNewExpr;
import soot.jimple.parser.node.AMultiParameterList;
import soot.jimple.parser.node.ANativeModifier;
import soot.jimple.parser.node.ANegUnop;
import soot.jimple.parser.node.ANonstaticInvokeExpr;
import soot.jimple.parser.node.ANopStatement;
import soot.jimple.parser.node.ANovoidType;
import soot.jimple.parser.node.ANullBaseType;
import soot.jimple.parser.node.ANullBaseTypeNoName;
import soot.jimple.parser.node.ANullConstant;
import soot.jimple.parser.node.AOrBinop;
import soot.jimple.parser.node.APlusBinop;
import soot.jimple.parser.node.APrivateModifier;
import soot.jimple.parser.node.AProtectedModifier;
import soot.jimple.parser.node.APublicModifier;
import soot.jimple.parser.node.AQuotedNonvoidType;
import soot.jimple.parser.node.ARetStatement;
import soot.jimple.parser.node.AReturnStatement;
import soot.jimple.parser.node.AShlBinop;
import soot.jimple.parser.node.AShortBaseType;
import soot.jimple.parser.node.AShortBaseTypeNoName;
import soot.jimple.parser.node.AShrBinop;
import soot.jimple.parser.node.ASigFieldRef;
import soot.jimple.parser.node.ASimpleNewExpr;
import soot.jimple.parser.node.ASingleArgList;
import soot.jimple.parser.node.ASingleLocalNameList;
import soot.jimple.parser.node.ASingleParameterList;
import soot.jimple.parser.node.ASpecialNonstaticInvoke;
import soot.jimple.parser.node.AStaticInvokeExpr;
import soot.jimple.parser.node.AStaticModifier;
import soot.jimple.parser.node.AStrictfpModifier;
import soot.jimple.parser.node.AStringConstant;
import soot.jimple.parser.node.ASynchronizedModifier;
import soot.jimple.parser.node.ATableswitchStatement;
import soot.jimple.parser.node.AThrowStatement;
import soot.jimple.parser.node.AThrowsClause;
import soot.jimple.parser.node.ATransientModifier;
import soot.jimple.parser.node.AUnknownJimpleType;
import soot.jimple.parser.node.AUnnamedMethodSignature;
import soot.jimple.parser.node.AUnopExpr;
import soot.jimple.parser.node.AUnopExpression;
import soot.jimple.parser.node.AUshrBinop;
import soot.jimple.parser.node.AVirtualNonstaticInvoke;
import soot.jimple.parser.node.AVoidType;
import soot.jimple.parser.node.AVolatileModifier;
import soot.jimple.parser.node.AXorBinop;
import soot.jimple.parser.node.Node;
import soot.jimple.parser.node.PModifier;
import soot.jimple.parser.node.Start;
import soot.jimple.parser.node.TAtIdentifier;
import soot.jimple.parser.node.TFloatConstant;
import soot.jimple.parser.node.TFullIdentifier;
import soot.jimple.parser.node.TIdentifier;
import soot.jimple.parser.node.TIntegerConstant;
import soot.jimple.parser.node.TQuotedName;
import soot.jimple.parser.node.TStringConstant;
import soot.jimple.parser.node.Token;
import soot.util.StringTools;
/*Modified By Marc Berndl 17th May */
public class Walker extends DepthFirstAdapter {
private static final Logger logger = LoggerFactory.getLogger(Walker.class);
boolean debug = false;
LinkedList mProductions = new LinkedList();
SootClass mSootClass = null;
Map<String, Local> mLocals = null;
Value mValue = IntConstant.v(1);
Map<Object, Unit> mLabelToStmtMap; // maps a label to the stmt following it
// in the jimple source
Map<String, List> mLabelToPatchList; // maps a label to the a list of stmts
// that refer to the label (ie goto
// lableX)
protected final SootResolver mResolver;
public Walker(SootResolver resolver) {
mResolver = resolver;
if (debug) {
mProductions = new LinkedList() {
public Object removeLast() {
Object o = super.removeLast();
if (debug) {
logger.debug("popped: " + o);
}
return o;
}
};
}
}
public Walker(SootClass sc, SootResolver resolver) {
mSootClass = sc;
mResolver = resolver;
}
public void outStart(Start node) {
SootClass c = (SootClass) mProductions.removeLast();
}
public SootClass getSootClass() {
if (mSootClass == null) {
throw new RuntimeException("did not parse class yet....");
}
return mSootClass;
}
/*
* file = modifier* file_type class_name extends_clause? implements_clause? file_body;
*/
public void inAFile(AFile node) {
if (debug) {
logger.debug("reading class " + node.getClassName());
}
}
public void caseAFile(AFile node) {
inAFile(node);
{
Object temp[] = node.getModifier().toArray();
for (Object element : temp) {
((PModifier) element).apply(this);
}
}
if (node.getFileType() != null) {
node.getFileType().apply(this);
}
if (node.getClassName() != null) {
node.getClassName().apply(this);
}
String className = (String) mProductions.removeLast();
if (mSootClass == null) {
mSootClass = new SootClass(className);
mSootClass.setResolvingLevel(SootClass.BODIES);
} else {
if (!mSootClass.getName().equals(className)) {
throw new RuntimeException("Invalid SootClass for this JimpleAST. The SootClass provided is of type: >"
+ mSootClass.getName() + "< whereas this parse tree is for type: >" + className + "<");
}
}
if (node.getExtendsClause() != null) {
node.getExtendsClause().apply(this);
}
if (node.getImplementsClause() != null) {
node.getImplementsClause().apply(this);
}
if (node.getFileBody() != null) {
node.getFileBody().apply(this);
}
outAFile(node);
}
public void outAFile(AFile node) {
// not not pop members; they have been taken care of.
List implementsList = null;
String superClass = null;
String classType = null;
if (node.getImplementsClause() != null) {
implementsList = (List) mProductions.removeLast();
}
if (node.getExtendsClause() != null) {
superClass = (String) mProductions.removeLast();
}
classType = (String) mProductions.removeLast();
int modifierCount = node.getModifier().size();
int modifierFlags = processModifiers(node.getModifier());
if (classType.equals("interface")) {
modifierFlags |= Modifier.INTERFACE;
}
mSootClass.setModifiers(modifierFlags);
if (superClass != null) {
mSootClass.setSuperclass(mResolver.makeClassRef(superClass));
}
if (implementsList != null) {
Iterator implIt = implementsList.iterator();
while (implIt.hasNext()) {
SootClass interfaceClass = mResolver.makeClassRef((String) implIt.next());
mSootClass.addInterface(interfaceClass);
}
}
mProductions.addLast(mSootClass);
}
/*
* member = {field} modifier* type name semicolon | {method} modifier* type name l_paren parameter_list? r_paren
* throws_clause? method_body;
*/
public void outAFieldMember(AFieldMember node) {
int modifier = 0;
Type type = null;
String name = null;
name = (String) mProductions.removeLast();
type = (Type) mProductions.removeLast();
modifier = processModifiers(node.getModifier());
SootField f = Scene.v().makeSootField(name, type, modifier);
mSootClass.addField(f);
}
public void outAMethodMember(AMethodMember node) {
int modifier = 0;
Type type;
String name;
List parameterList = null;
List<SootClass> throwsClause = null;
JimpleBody methodBody = null;
if (node.getMethodBody() instanceof AFullMethodBody) {
methodBody = (JimpleBody) mProductions.removeLast();
}
if (node.getThrowsClause() != null) {
throwsClause = (List<SootClass>) mProductions.removeLast();
}
if (node.getParameterList() != null) {
parameterList = (List) mProductions.removeLast();
} else {
parameterList = new ArrayList();
}
Object o = mProductions.removeLast();
name = (String) o;
type = (Type) mProductions.removeLast();
modifier = processModifiers(node.getModifier());
SootMethod method;
if (throwsClause != null) {
method = Scene.v().makeSootMethod(name, parameterList, type, modifier, throwsClause);
} else {
method = Scene.v().makeSootMethod(name, parameterList, type, modifier);
}
mSootClass.addMethod(method);
if (method.isConcrete()) {
methodBody.setMethod(method);
method.setActiveBody(methodBody);
} else if (node.getMethodBody() instanceof AFullMethodBody) {
throw new RuntimeException("Impossible: !concrete => ! instanceof");
}
}
/*
* type = {void} void | {novoid} nonvoid_type;
*/
public void outAVoidType(AVoidType node) {
mProductions.addLast(VoidType.v());
}
/*
* nonvoid_type = {base} base_type_no_name array_brackets*; {quoted} quoted_name array_brackets* | {ident} identifier
* array_brackets* | {full_ident} full_identifier array_brackets*;
*/
public void outABaseNonvoidType(ABaseNonvoidType node) {
Type t = (Type) mProductions.removeLast();
int dim = node.getArrayBrackets().size();
if (dim > 0) {
t = ArrayType.v(t, dim);
}
mProductions.addLast(t);
}
public void outAQuotedNonvoidType(AQuotedNonvoidType node) {
String typeName = (String) mProductions.removeLast();
Type t = RefType.v(typeName);
int dim = node.getArrayBrackets().size();
if (dim > 0) {
t = ArrayType.v(t, dim);
}
mProductions.addLast(t);
}
public void outAIdentNonvoidType(AIdentNonvoidType node) {
String typeName = (String) mProductions.removeLast();
Type t = RefType.v(typeName);
int dim = node.getArrayBrackets().size();
if (dim > 0) {
t = ArrayType.v(t, dim);
}
mProductions.addLast(t);
}
public void outAFullIdentNonvoidType(AFullIdentNonvoidType node) {
String typeName = (String) mProductions.removeLast();
Type t = RefType.v(typeName);
int dim = node.getArrayBrackets().size();
if (dim > 0) {
t = ArrayType.v(t, dim);
}
mProductions.addLast(t);
}
/*
* base_type_no_name = {boolean} boolean | {byte} byte | {char} char | {short} short | {int} int | {long} long | {float}
* float | {double} double | {null} null_type;
*/
public void outABooleanBaseTypeNoName(ABooleanBaseTypeNoName node) {
mProductions.addLast(BooleanType.v());
}
public void outAByteBaseTypeNoName(AByteBaseTypeNoName node) {
mProductions.addLast(ByteType.v());
}
public void outACharBaseTypeNoName(ACharBaseTypeNoName node) {
mProductions.addLast(CharType.v());
}
public void outAShortBaseTypeNoName(AShortBaseTypeNoName node) {
mProductions.addLast(ShortType.v());
}
public void outAIntBaseTypeNoName(AIntBaseTypeNoName node) {
mProductions.addLast(IntType.v());
}
public void outALongBaseTypeNoName(ALongBaseTypeNoName node) {
mProductions.addLast(LongType.v());
}
public void outAFloatBaseTypeNoName(AFloatBaseTypeNoName node) {
mProductions.addLast(FloatType.v());
}
public void outADoubleBaseTypeNoName(ADoubleBaseTypeNoName node) {
mProductions.addLast(DoubleType.v());
}
public void outANullBaseTypeNoName(ANullBaseTypeNoName node) {
mProductions.addLast(NullType.v());
}
/*
* base_type = {boolean} boolean | {byte} byte | {char} char | {short} short | {int} int | {long} long | {float} float |
* {double} double | {null} null_type | {class_name} class_name;
*/
public void outABooleanBaseType(ABooleanBaseType node) {
mProductions.addLast(BooleanType.v());
}
public void outAByteBaseType(AByteBaseType node) {
mProductions.addLast(ByteType.v());
}
public void outACharBaseType(ACharBaseType node) {
mProductions.addLast(CharType.v());
}
public void outAShortBaseType(AShortBaseType node) {
mProductions.addLast(ShortType.v());
}
public void outAIntBaseType(AIntBaseType node) {
mProductions.addLast(IntType.v());
}
public void outALongBaseType(ALongBaseType node) {
mProductions.addLast(LongType.v());
}
public void outAFloatBaseType(AFloatBaseType node) {
mProductions.addLast(FloatType.v());
}
public void outADoubleBaseType(ADoubleBaseType node) {
mProductions.addLast(DoubleType.v());
}
public void outANullBaseType(ANullBaseType node) {
mProductions.addLast(NullType.v());
}
public void outAClassNameBaseType(AClassNameBaseType node) {
String type = (String) mProductions.removeLast();
if (type.equals("int")) {
throw new RuntimeException();
}
mProductions.addLast(RefType.v(type));
}
/*
* method_body = {empty} semicolon | {full} l_brace declaration* statement* catch_clause* r_brace;
*/
public void inAFullMethodBody(AFullMethodBody node) {
mLocals = new HashMap<String, Local>();
mLabelToStmtMap = new HashMap<Object, Unit>();
mLabelToPatchList = new HashMap<String, List>();
}
public void outAFullMethodBody(AFullMethodBody node) {
JimpleBody jBody = Jimple.v().newBody();
if (node.getCatchClause() != null) {
int size = node.getCatchClause().size();
for (int i = 0; i < size; i++) {
jBody.getTraps().addFirst((Trap) mProductions.removeLast());
}
}
if (node.getStatement() != null) {
int size = node.getStatement().size();
Unit lastStmt = null;
for (int i = 0; i < size; i++) {
Object o = mProductions.removeLast();
if (o instanceof Unit) {
jBody.getUnits().addFirst((Unit) o);
lastStmt = (Unit) o;
} else if (o instanceof String) {
if (lastStmt == null) {
throw new RuntimeException("impossible");
}
mLabelToStmtMap.put(o, lastStmt);
} else {
throw new RuntimeException("impossible");
}
}
}
if (node.getDeclaration() != null) {
int size = node.getDeclaration().size();
for (int i = 0; i < size; i++) {
List<Local> localList = (List<Local>) mProductions.removeLast();
jBody.getLocals().addAll(localList);
}
}
Iterator<String> it = mLabelToPatchList.keySet().iterator();
while (it.hasNext()) {
String label = it.next();
Unit target = mLabelToStmtMap.get(label);
Iterator patchIt = mLabelToPatchList.get(label).iterator();
while (patchIt.hasNext()) {
UnitBox box = (UnitBox) patchIt.next();
box.setUnit(target);
}
}
/*
* Iterator it = mLabelToStmtMap.keySet().iterator(); while(it.hasNext()) { String label = (String) it.next(); Unit
* target = (Unit) mLabelToStmtMap.get(label);
*
* List l = (List) mLabelToPatchList.get(label); if(l != null) { Iterator patchIt = l.iterator();
* while(patchIt.hasNext()) { UnitBox box = (UnitBox) patchIt.next(); box.setUnit(target); } } }
*/
mProductions.addLast(jBody);
}
public void outANovoidType(ANovoidType node) {
}
/*
* parameter_list = {single} parameter | {multi} parameter comma parameter_list;
*/
public void outASingleParameterList(ASingleParameterList node) {
List<Type> l = new ArrayList<Type>();
l.add((Type) mProductions.removeLast());
mProductions.addLast(l);
}
public void outAMultiParameterList(AMultiParameterList node) {
List<Type> l = (List<Type>) mProductions.removeLast();
l.add(0, (Type) mProductions.removeLast());
mProductions.addLast(l);
}
/*
* arg_list = {single} immediate | {multi} immediate comma arg_list;
*/
public void outASingleArgList(ASingleArgList node) {
List<Value> l = new ArrayList<Value>();
l.add((Value) mProductions.removeLast());
mProductions.addLast(l);
}
public void outAMultiArgList(AMultiArgList node) {
List<Value> l = (List<Value>) mProductions.removeLast();
l.add(0, (Value) mProductions.removeLast());
mProductions.addLast(l);
}
/*
* class_name_list = {class_name_single} class_name | {class_name_multi} class_name comma class_name_list;
*/
public void outAClassNameSingleClassNameList(AClassNameSingleClassNameList node) {
List<String> l = new ArrayList<String>();
l.add((String) mProductions.removeLast());
mProductions.addLast(l);
}
public void outAClassNameMultiClassNameList(AClassNameMultiClassNameList node) {
List<String> l = (List<String>) mProductions.removeLast();
l.add(0, (String) mProductions.removeLast());
mProductions.addLast(l);
}
/*
* file_type = {class} [theclass]:class | {interface} interface;
*/
public void outAClassFileType(AClassFileType node) {
mProductions.addLast("class");
}
public void outAInterfaceFileType(AInterfaceFileType node) {
mProductions.addLast("interface");
}
/*
* catch_clause = catch [name]:class_name from [from_label]:label_name to [to_label]:label_name with
* [with_label]:label_name semicolon;
*/
// public void caseACatchClause(ACatchClause node){}
public void outACatchClause(ACatchClause node) {
String exceptionName;
UnitBox withUnit, fromUnit, toUnit;
withUnit = Jimple.v().newStmtBox(null);
addBoxToPatch((String) mProductions.removeLast(), withUnit);
toUnit = Jimple.v().newStmtBox(null);
addBoxToPatch((String) mProductions.removeLast(), toUnit);
fromUnit = Jimple.v().newStmtBox(null);
addBoxToPatch((String) mProductions.removeLast(), fromUnit);
exceptionName = (String) mProductions.removeLast();
Trap trap = Jimple.v().newTrap(mResolver.makeClassRef(exceptionName), fromUnit, toUnit, withUnit);
mProductions.addLast(trap);
}
/*
* declaration = jimple_type local_name_list semicolon;
*/
public void outADeclaration(ADeclaration node) {
List localNameList = (List) mProductions.removeLast();
Type type = (Type) mProductions.removeLast();
Iterator it = localNameList.iterator();
List<Local> localList = new ArrayList<Local>();
while (it.hasNext()) {
Local l = Jimple.v().newLocal((String) it.next(), type);
mLocals.put(l.getName(), l);
localList.add(l);
}
mProductions.addLast(localList);
}
/*
* jimple_type = {unknown} unknown | {nonvoid} nonvoid_type;
*/
public void outAUnknownJimpleType(AUnknownJimpleType node) {
mProductions.addLast(UnknownType.v());
}
/*
* local_name_list = {single} local_name | {multi} local_name comma local_name_list;
*/
public void outASingleLocalNameList(ASingleLocalNameList node) {
List<String> l = new ArrayList<String>();
l.add((String) mProductions.removeLast());
mProductions.addLast(l);
}
public void outAMultiLocalNameList(AMultiLocalNameList node) {
List<String> l = (List<String>) mProductions.removeLast();
l.add(0, (String) mProductions.removeLast());
mProductions.addLast(l);
}
/*
* statement = {label} label_name colon | {breakpoint} breakpoint semicolon | {entermonitor} entermonitor immediate
* semicolon | {exitmonitor} exitmonitor immediate semicolon | {switch} switch l_paren immediate r_paren l_brace case_stmt+
* r_brace semicolon | {identity} local_name colon_equals at_identifier type semicolon | {identity_no_type} local_name
* colon_equals at_identifier semicolon | {assign} variable equals expression semicolon | {if} if bool_expr goto_stmt |
* {goto} goto_stmt | {nop} nop semicolon | {ret} ret immediate? semicolon | {return} return immediate? semicolon | {throw}
* throw immediate semicolon | {invoke} invoke_expr semicolon;
*/
public void outALabelStatement(ALabelStatement node) {
}
public void outABreakpointStatement(ABreakpointStatement node) {
Unit u = Jimple.v().newBreakpointStmt();
mProductions.addLast(u);
}
public void outAEntermonitorStatement(AEntermonitorStatement node) {
Value op = (Value) mProductions.removeLast();
Unit u = Jimple.v().newEnterMonitorStmt(op);
mProductions.addLast(u);
}
public void outAExitmonitorStatement(AExitmonitorStatement node) {
Value op = (Value) mProductions.removeLast();
Unit u = Jimple.v().newExitMonitorStmt(op);
mProductions.addLast(u);
}
/*
* case_label = {constant} case minus? integer_constant | {default} default;
*/
/*
* case_stmt = case_label colon goto_stmt;
*/
public void outACaseStmt(ACaseStmt node) {
String labelName = (String) mProductions.removeLast();
UnitBox box = Jimple.v().newStmtBox(null);
addBoxToPatch(labelName, box);
Value labelValue = null;
if (node.getCaseLabel() instanceof AConstantCaseLabel) {
labelValue = (Value) mProductions.removeLast();
}
// if labelValue == null, this is the default label.
if (labelValue == null) {
mProductions.addLast(box);
} else {
Object[] valueTargetPair = new Object[2];
valueTargetPair[0] = labelValue;
valueTargetPair[1] = box;
mProductions.addLast(valueTargetPair);
}
}
public void outATableswitchStatement(ATableswitchStatement node) {
List<UnitBox> targets = new ArrayList<UnitBox>();
UnitBox defaultTarget = null;
int lowIndex = 0, highIndex = 0;
if (node.getCaseStmt() != null) {
int size = node.getCaseStmt().size();
for (int i = 0; i < size; i++) {
Object valueTargetPair = mProductions.removeLast();
if (valueTargetPair instanceof UnitBox) {
if (defaultTarget != null) {
throw new RuntimeException("error: can't ;have more than 1 default stmt");
}
defaultTarget = (UnitBox) valueTargetPair;
} else {
Object[] pair = (Object[]) valueTargetPair;
if ((i == 0 && defaultTarget == null) || (i == 1 && defaultTarget != null)) {
highIndex = ((IntConstant) pair[0]).value;
}
if (i == (size - 1)) {
lowIndex = ((IntConstant) pair[0]).value;
}
targets.add(0, (UnitBox) pair[1]);
}
}
} else {
throw new RuntimeException("error: switch stmt has no case stmts");
}
Value key = (Value) mProductions.removeLast();
Unit switchStmt = Jimple.v().newTableSwitchStmt(key, lowIndex, highIndex, targets, defaultTarget);
mProductions.addLast(switchStmt);
}
public void outALookupswitchStatement(ALookupswitchStatement node) {
List<IntConstant> lookupValues = new ArrayList<IntConstant>();
List<UnitBox> targets = new ArrayList<UnitBox>();
UnitBox defaultTarget = null;
if (node.getCaseStmt() != null) {
int size = node.getCaseStmt().size();
for (int i = 0; i < size; i++) {
Object valueTargetPair = mProductions.removeLast();
if (valueTargetPair instanceof UnitBox) {
if (defaultTarget != null) {
throw new RuntimeException("error: can't ;have more than 1 default stmt");
}
defaultTarget = (UnitBox) valueTargetPair;
} else {
Object[] pair = (Object[]) valueTargetPair;
lookupValues.add(0, (IntConstant) pair[0]);
targets.add(0, (UnitBox) pair[1]);
}
}
} else {
throw new RuntimeException("error: switch stmt has no case stmts");
}
Value key = (Value) mProductions.removeLast();
Unit switchStmt = Jimple.v().newLookupSwitchStmt(key, lookupValues, targets, defaultTarget);
mProductions.addLast(switchStmt);
}
public void outAIdentityStatement(AIdentityStatement node) {
Type identityRefType = (Type) mProductions.removeLast();
String atClause = (String) mProductions.removeLast();
Value local = mLocals.get(mProductions.removeLast()); // the local ref
// from it's
// identifier
Value ref = null;
if (atClause.startsWith("@this")) {
ref = Jimple.v().newThisRef((RefType) identityRefType);
} else if (atClause.startsWith("@parameter")) {
int index = Integer.parseInt(atClause.substring(10, atClause.length() - 1));
ref = Jimple.v().newParameterRef(identityRefType, index);
} else {
throw new RuntimeException("shouldn't @caughtexception be handled by outAIdentityNoTypeStatement: got" + atClause);
}
Unit u = Jimple.v().newIdentityStmt(local, ref);
mProductions.addLast(u);
}
public void outAIdentityNoTypeStatement(AIdentityNoTypeStatement node) {
mProductions.removeLast(); // get rid of @caughtexception string
// presently on top of the stack
Value local = mLocals.get(mProductions.removeLast()); // the local ref
// from it's
// identifier
Unit u = Jimple.v().newIdentityStmt(local, Jimple.v().newCaughtExceptionRef());
mProductions.addLast(u);
}
public void outAAssignStatement(AAssignStatement node) {
Object removeLast = mProductions.removeLast();
Value rvalue = (Value) removeLast;
Value variable = (Value) mProductions.removeLast();
Unit u = Jimple.v().newAssignStmt(variable, rvalue);
mProductions.addLast(u);
}
public void outAIfStatement(AIfStatement node) {
String targetLabel = (String) mProductions.removeLast();
Value condition = (Value) mProductions.removeLast();
UnitBox box = Jimple.v().newStmtBox(null);
Unit u = Jimple.v().newIfStmt(condition, box);
addBoxToPatch(targetLabel, box);
mProductions.addLast(u);
}
public void outAReturnStatement(AReturnStatement node) {
Immediate v;
Stmt s = null;
if (node.getImmediate() != null) {
v = (Immediate) mProductions.removeLast();
s = Jimple.v().newReturnStmt(v);
} else {
s = Jimple.v().newReturnVoidStmt();
}
mProductions.addLast(s);
}
public void outAGotoStatement(AGotoStatement node) {
String targetLabel = (String) mProductions.removeLast();
UnitBox box = Jimple.v().newStmtBox(null);
Unit branch = Jimple.v().newGotoStmt(box);
addBoxToPatch(targetLabel, box);
mProductions.addLast(branch);
}
public void outANopStatement(ANopStatement node) {
Unit u = Jimple.v().newNopStmt();
mProductions.addLast(u);
}
public void outARetStatement(ARetStatement node) {
throw new RuntimeException("ret not yet implemented.");
}
public void outAThrowStatement(AThrowStatement node) {
Value op = (Value) mProductions.removeLast();
Unit u = Jimple.v().newThrowStmt(op);
mProductions.addLast(u);
}
public void outAInvokeStatement(AInvokeStatement node) {
Value op = (Value) mProductions.removeLast();
Unit u = Jimple.v().newInvokeStmt(op);
mProductions.addLast(u);
}
/*
* case_label = {constant} case minus? integer_constant | {default} default;
*/
public void outAConstantCaseLabel(AConstantCaseLabel node) {
String s = (String) mProductions.removeLast();
int sign = 1;
if (node.getMinus() != null) {
sign = -1;
}
if (s.endsWith("L")) {
mProductions.addLast(LongConstant.v(sign * Long.parseLong(s.substring(0, s.length() - 1))));
} else if (s.equals("2147483648")) {
mProductions.addLast(IntConstant.v(sign * Integer.MIN_VALUE));
} else {
mProductions.addLast(IntConstant.v(sign * Integer.parseInt(s)));
}
}
/*
* immediate = {local} local_name | {constant} constant;
*/
public void outALocalImmediate(ALocalImmediate node) {
String local = (String) mProductions.removeLast();
Local l = mLocals.get(local);
if (l == null) {
throw new RuntimeException("did not find local: " + local);
}
mProductions.addLast(l);
}
/*
* constant = {integer} minus? integer_constant | {float} minus? float_constant | {string} string_constant | {null} null;
*/
public void outANullConstant(ANullConstant node) {
mProductions.addLast(NullConstant.v());
}
public void outAIntegerConstant(AIntegerConstant node) {
String s = (String) mProductions.removeLast();
StringBuffer buf = new StringBuffer();
if (node.getMinus() != null) {
buf.append('-');
}
buf.append(s);
s = buf.toString();
if (s.endsWith("L")) {
mProductions.addLast(LongConstant.v(Long.parseLong(s.substring(0, s.length() - 1))));
} else if (s.equals("2147483648")) {
mProductions.addLast(IntConstant.v(Integer.MIN_VALUE));
} else {
mProductions.addLast(IntConstant.v(Integer.parseInt(s)));
}
}
public void outAStringConstant(AStringConstant node) {
String s = (String) mProductions.removeLast();
mProductions.addLast(StringConstant.v(s));
/*
* try { String t = StringTools.getUnEscapedStringOf(s);
*
* mProductions.push(StringConstant.v(t)); } catch(RuntimeException e) { logger.debug(""+s); throw e; }
*/
}
public void outAClzzConstant(AClzzConstant node) {
String s = (String) mProductions.removeLast();
mProductions.addLast(ClassConstant.v(s));
}
/* ('#' (('-'? 'Infinity') | 'NaN') ('f' | 'F')? ) ; */
public void outAFloatConstant(AFloatConstant node) {
String s = (String) mProductions.removeLast();
boolean isDouble = true;
float value = 0;
double dvalue = 0;
if (s.endsWith("f") || s.endsWith("F")) {
isDouble = false;
}
if (s.charAt(0) == '#') {
if (s.charAt(1) == '-') {
if (isDouble) {
dvalue = Double.NEGATIVE_INFINITY;
} else {
value = Float.NEGATIVE_INFINITY;
}
} else if (s.charAt(1) == 'I') {
if (isDouble) {
dvalue = Double.POSITIVE_INFINITY;
} else {
value = Float.POSITIVE_INFINITY;
}
} else {
if (isDouble) {
dvalue = Double.NaN;
} else {
value = Float.NaN;
}
}
} else {
StringBuffer buf = new StringBuffer();
if (node.getMinus() != null) {
buf.append('-');
}
buf.append(s);
s = buf.toString();
if (isDouble) {
dvalue = Double.parseDouble(s);
} else {
value = Float.parseFloat(s);
}
}
Object res;
if (isDouble) {
res = DoubleConstant.v(dvalue);
} else {
res = FloatConstant.v(value);
}
mProductions.addLast(res);
}
/*
* binop_expr = [left]:immediate binop [right]:immediate;
*/
public void outABinopExpr(ABinopExpr node) {
Value right = (Value) mProductions.removeLast();
BinopExpr expr = (BinopExpr) mProductions.removeLast();
Value left = (Value) mProductions.removeLast();
expr.setOp1(left);
expr.setOp2(right);
mProductions.addLast(expr);
}
public void outABinopBoolExpr(ABinopBoolExpr node) {
}
public void outAUnopExpression(AUnopExpression node) {
}
/*
* binop = {and} and | {or} or | {xor} xor | {mod} mod |
*
* {cmp} cmp | {cmpg} cmpg | {cmpl} cmpl | {cmpeq} cmpeq |
*
* {cmpne} cmpne | {cmpgt} cmpgt | {cmpge} cmpge | {cmplt} cmplt |
*
* {cmple} cmple | {shl} shl | {shr} shr | {ushr} ushr |
*
* {plus} plus | {minus} minus | {mult} mult | {div} div;
*/
public void outAAndBinop(AAndBinop node) {
mProductions.addLast(Jimple.v().newAndExpr(mValue, mValue));
}
public void outAOrBinop(AOrBinop node) {
mProductions.addLast(Jimple.v().newOrExpr(mValue, mValue));
}
public void outAXorBinop(AXorBinop node) {
mProductions.addLast(Jimple.v().newXorExpr(mValue, mValue));
}
public void outAModBinop(AModBinop node) {
mProductions.addLast(Jimple.v().newRemExpr(mValue, mValue));
}
public void outACmpBinop(ACmpBinop node) {
mProductions.addLast(Jimple.v().newCmpExpr(mValue, mValue));
}
public void outACmpgBinop(ACmpgBinop node) {
mProductions.addLast(Jimple.v().newCmpgExpr(mValue, mValue));
}
public void outACmplBinop(ACmplBinop node) {
mProductions.addLast(Jimple.v().newCmplExpr(mValue, mValue));
}
public void outACmpeqBinop(ACmpeqBinop node) {
mProductions.addLast(Jimple.v().newEqExpr(mValue, mValue));
}
public void outACmpneBinop(ACmpneBinop node) {
mProductions.addLast(Jimple.v().newNeExpr(mValue, mValue));
}
public void outACmpgtBinop(ACmpgtBinop node) {
mProductions.addLast(Jimple.v().newGtExpr(mValue, mValue));
}
public void outACmpgeBinop(ACmpgeBinop node) {
mProductions.addLast(Jimple.v().newGeExpr(mValue, mValue));
}
public void outACmpltBinop(ACmpltBinop node) {
mProductions.addLast(Jimple.v().newLtExpr(mValue, mValue));
}
public void outACmpleBinop(ACmpleBinop node) {
mProductions.addLast(Jimple.v().newLeExpr(mValue, mValue));
}
public void outAShlBinop(AShlBinop node) {
mProductions.addLast(Jimple.v().newShlExpr(mValue, mValue));
}
public void outAShrBinop(AShrBinop node) {
mProductions.addLast(Jimple.v().newShrExpr(mValue, mValue));
}
public void outAUshrBinop(AUshrBinop node) {
mProductions.addLast(Jimple.v().newUshrExpr(mValue, mValue));
}
public void outAPlusBinop(APlusBinop node) {
mProductions.addLast(Jimple.v().newAddExpr(mValue, mValue));
}
public void outAMinusBinop(AMinusBinop node) {
mProductions.addLast(Jimple.v().newSubExpr(mValue, mValue));
}
public void outAMultBinop(AMultBinop node) {
mProductions.addLast(Jimple.v().newMulExpr(mValue, mValue));
}
public void outADivBinop(ADivBinop node) {
mProductions.addLast(Jimple.v().newDivExpr(mValue, mValue));
}
/*
* throws_clause = throws class_name_list;
*/
public void outAThrowsClause(AThrowsClause node) {
List l = (List) mProductions.removeLast();
Iterator it = l.iterator();
List<SootClass> exceptionClasses = new ArrayList<SootClass>(l.size());
while (it.hasNext()) {
String className = (String) it.next();
exceptionClasses.add(mResolver.makeClassRef(className));
}
mProductions.addLast(exceptionClasses);
}
/*
* variable = {reference} reference | {local} local_name;
*/
public void outALocalVariable(ALocalVariable node) {
String local = (String) mProductions.removeLast();
Local l = mLocals.get(local);
if (l == null) {
throw new RuntimeException("did not find local: " + local);
}
mProductions.addLast(l);
}
/*
* public void caseAReferenceVariable(AReferenceVariable node) { }
*/
/*
* array_ref = identifier fixed_array_descriptor;
*/
public void outAArrayReference(AArrayReference node) {
Value immediate = (Value) mProductions.removeLast();
String identifier = (String) mProductions.removeLast();
Local l = mLocals.get(identifier);
if (l == null) {
throw new RuntimeException("did not find local: " + identifier);
}
mProductions.addLast(Jimple.v().newArrayRef(l, immediate));
}
/*
* field_ref = {local} local_name dot field_signature | {sig} field_signature;
*/
public void outALocalFieldRef(ALocalFieldRef node) {
SootFieldRef field = (SootFieldRef) mProductions.removeLast();
String local = (String) mProductions.removeLast();
Local l = mLocals.get(local);
if (l == null) {
throw new RuntimeException("did not find local: " + local);
}
mProductions.addLast(Jimple.v().newInstanceFieldRef(l, field));
}
public void outASigFieldRef(ASigFieldRef node) {
SootFieldRef field = (SootFieldRef) mProductions.removeLast();
field = Scene.v().makeFieldRef(field.declaringClass(), field.name(), field.type(), true);
mProductions.addLast(Jimple.v().newStaticFieldRef(field));
}
/*
* field_signature = cmplt [class_name]:class_name [first]:colon type [field_name]:name cmpgt;
*/
public void outAFieldSignature(AFieldSignature node) {
String className, fieldName;
Type t;
fieldName = (String) mProductions.removeLast();
t = (Type) mProductions.removeLast();
className = (String) mProductions.removeLast();
SootClass cl = mResolver.makeClassRef(className);
SootFieldRef field = Scene.v().makeFieldRef(cl, fieldName, t, false);
mProductions.addLast(field);
}
/*
* expression = {new} new_expr | {cast} l_paren nonvoid_type r_paren local_name | {instanceof} immediate instanceof
* nonvoid_type | {invoke} invoke_expr |
*
* {reference} reference | {binop} binop_expr | {unop} unop_expr | {immediate} immediate;
*/
public void outACastExpression(ACastExpression node) {
Value val = (Value) mProductions.removeLast();
Type type = (Type) mProductions.removeLast();
mProductions.addLast(Jimple.v().newCastExpr(val, type));
}
public void outAInstanceofExpression(AInstanceofExpression node) {
Type nonvoidType = (Type) mProductions.removeLast();
Value immediate = (Value) mProductions.removeLast();
mProductions.addLast(Jimple.v().newInstanceOfExpr(immediate, nonvoidType));
}
/*
* unop_expr = unop immediate;
*/
public void outAUnopExpr(AUnopExpr node) {
Value v = (Value) mProductions.removeLast();
UnopExpr expr = (UnopExpr) mProductions.removeLast();
expr.setOp(v);
mProductions.addLast(expr);
}
/*
* unop = {lengthof} lengthof | {neg} neg;
*/
public void outALengthofUnop(ALengthofUnop node) {
mProductions.addLast(Jimple.v().newLengthExpr(mValue));
}
public void outANegUnop(ANegUnop node) {
mProductions.addLast(Jimple.v().newNegExpr(mValue));
}
/*
* invoke_expr = {nonstatic} nonstatic_invoke local_name dot method_signature l_paren arg_list? r_paren | {static}
* staticinvoke method_signature l_paren arg_list? r_paren;
*/
public void outANonstaticInvokeExpr(ANonstaticInvokeExpr node) {
List args;
if (node.getArgList() != null) {
args = (List) mProductions.removeLast();
} else {
args = new ArrayList();
}
SootMethodRef method = (SootMethodRef) mProductions.removeLast();
String local = (String) mProductions.removeLast();
Local l = mLocals.get(local);
if (l == null) {
throw new RuntimeException("did not find local: " + local);
}
Node invokeType = node.getNonstaticInvoke();
Expr invokeExpr;
if (invokeType instanceof ASpecialNonstaticInvoke) {
invokeExpr = Jimple.v().newSpecialInvokeExpr(l, method, args);
} else if (invokeType instanceof AVirtualNonstaticInvoke) {
invokeExpr = Jimple.v().newVirtualInvokeExpr(l, method, args);
} else {
if (debug) {
if (!(invokeType instanceof AInterfaceNonstaticInvoke)) {
throw new RuntimeException("expected interface invoke.");
}
}
invokeExpr = Jimple.v().newInterfaceInvokeExpr(l, method, args);
}
mProductions.addLast(invokeExpr);
}
public void outAStaticInvokeExpr(AStaticInvokeExpr node) {
List args;
if (node.getArgList() != null) {
args = (List) mProductions.removeLast();
} else {
args = new ArrayList();
}
SootMethodRef method = (SootMethodRef) mProductions.removeLast();
method = Scene.v().makeMethodRef(method.declaringClass(), method.name(), method.parameterTypes(), method.returnType(),
true);
mProductions.addLast(Jimple.v().newStaticInvokeExpr(method, args));
}
public void outADynamicInvokeExpr(ADynamicInvokeExpr node) {
List<Value> bsmArgs;
if (node.getStaticargs() != null) {
bsmArgs = (List) mProductions.removeLast();
} else {
bsmArgs = Collections.emptyList();
}
SootMethodRef bsmMethodRef = (SootMethodRef) mProductions.removeLast();
List<Value> dynArgs;
if (node.getDynargs() != null) {
dynArgs = (List) mProductions.removeLast();
} else {
dynArgs = Collections.emptyList();
}
SootMethodRef dynMethodRef = (SootMethodRef) mProductions.removeLast();
mProductions.addLast(Jimple.v().newDynamicInvokeExpr(bsmMethodRef, bsmArgs, dynMethodRef, dynArgs));
}
public void outAUnnamedMethodSignature(AUnnamedMethodSignature node) {
String className, methodName;
List parameterList = new ArrayList();
if (node.getParameterList() != null) {
parameterList = (List) mProductions.removeLast();
}
Type type = (Type) mProductions.removeLast();
String name = (String) mProductions.removeLast();
SootClass sootClass = mResolver.makeClassRef(SootClass.INVOKEDYNAMIC_DUMMY_CLASS_NAME);
SootMethodRef sootMethod = Scene.v().makeMethodRef(sootClass, name, parameterList, type, false);
mProductions.addLast(sootMethod);
}
/*
* method_signature = cmplt [class_name]:class_name [first]:colon type [method_name]:name l_paren parameter_list? r_paren
* cmpgt;
*/
public void outAMethodSignature(AMethodSignature node) {
String className, methodName;
List parameterList = new ArrayList();
if (node.getParameterList() != null) {
parameterList = (List) mProductions.removeLast();
}
methodName = (String) mProductions.removeLast();
Type type = (Type) mProductions.removeLast();
className = (String) mProductions.removeLast();
SootClass sootClass = mResolver.makeClassRef(className);
SootMethodRef sootMethod = Scene.v().makeMethodRef(sootClass, methodName, parameterList, type, false);
mProductions.addLast(sootMethod);
}
/*
* new_expr = {simple} new base_type | {array} newarray l_paren nonvoid_type r_paren fixed_array_descriptor | {multi}
* newmultiarray l_paren base_type r_paren array_descriptor+;
*/
public void outASimpleNewExpr(ASimpleNewExpr node) {
mProductions.addLast(Jimple.v().newNewExpr((RefType) mProductions.removeLast()));
}
public void outAArrayNewExpr(AArrayNewExpr node) {
Value size = (Value) mProductions.removeLast();
Type type = (Type) mProductions.removeLast();
mProductions.addLast(Jimple.v().newNewArrayExpr(type, size));
}
public void outAMultiNewExpr(AMultiNewExpr node) {
LinkedList arrayDesc = node.getArrayDescriptor();
int descCnt = arrayDesc.size();
List sizes = new LinkedList();
Iterator it = arrayDesc.iterator();
while (it.hasNext()) {
AArrayDescriptor o = (AArrayDescriptor) it.next();
if (o.getImmediate() != null) {
sizes.add(0, mProductions.removeLast());
} else {
break;
}
}
Type type = (Type) mProductions.removeLast();
ArrayType arrayType = ArrayType.v(type, descCnt);
mProductions.addLast(Jimple.v().newNewMultiArrayExpr(arrayType, sizes));
}
public void defaultCase(Node node) {
if (node instanceof TQuotedName || node instanceof TFullIdentifier || node instanceof TIdentifier
|| node instanceof TStringConstant ||
node instanceof TIntegerConstant || node instanceof TFloatConstant || node instanceof TAtIdentifier
) {
if (debug) {
logger.debug("Default case -pushing token:" + ((Token) node).getText());
}
String tokenString = ((Token) node).getText();
if (node instanceof TStringConstant || node instanceof TQuotedName) {
tokenString = tokenString.substring(1, tokenString.length() - 1);
} else if (node instanceof TFullIdentifier) {
tokenString = Scene.v().unescapeName(tokenString);
}
if (node instanceof TIdentifier || node instanceof TFullIdentifier || node instanceof TQuotedName
|| node instanceof TStringConstant) {
try {
tokenString = StringTools.getUnEscapedStringOf(tokenString);
} catch (RuntimeException e) {
logger.debug("Invalid escaped string: " + tokenString);
// just used the unescaped string, better than nothing
}
}
mProductions.addLast(tokenString);
}
}
protected int processModifiers(List l) {
int modifier = 0;
Iterator it = l.iterator();
while (it.hasNext()) {
Object t = it.next();
if (t instanceof AAbstractModifier) {
modifier |= Modifier.ABSTRACT;
} else if (t instanceof AFinalModifier) {
modifier |= Modifier.FINAL;
} else if (t instanceof ANativeModifier) {
modifier |= Modifier.NATIVE;
} else if (t instanceof APublicModifier) {
modifier |= Modifier.PUBLIC;
} else if (t instanceof AProtectedModifier) {
modifier |= Modifier.PROTECTED;
} else if (t instanceof APrivateModifier) {
modifier |= Modifier.PRIVATE;
} else if (t instanceof AStaticModifier) {
modifier |= Modifier.STATIC;
} else if (t instanceof ASynchronizedModifier) {
modifier |= Modifier.SYNCHRONIZED;
} else if (t instanceof ATransientModifier) {
modifier |= Modifier.TRANSIENT;
} else if (t instanceof AVolatileModifier) {
modifier |= Modifier.VOLATILE;
} else if (t instanceof AStrictfpModifier) {
modifier |= Modifier.STRICTFP;
} else if (t instanceof AEnumModifier) {
modifier |= Modifier.ENUM;
} else if (t instanceof AAnnotationModifier) {
modifier |= Modifier.ANNOTATION;
} else {
throw new RuntimeException(
"Impossible: modifier unknown - Have you added a new modifier and not updated this file?");
}
}
return modifier;
}
private void addBoxToPatch(String aLabelName, UnitBox aUnitBox) {
List<UnitBox> patchList = mLabelToPatchList.get(aLabelName);
if (patchList == null) {
patchList = new ArrayList<UnitBox>();
mLabelToPatchList.put(aLabelName, patchList);
}
patchList.add(aUnitBox);
}
}
| 50,864
| 30.475866
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/SparkTransformer.java
|
package soot.jimple.spark;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Date;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.G;
import soot.Local;
import soot.PointsToAnalysis;
import soot.Scene;
import soot.SceneTransformer;
import soot.Singletons;
import soot.SootClass;
import soot.SootMethod;
import soot.SourceLocator;
import soot.Unit;
import soot.Value;
import soot.jimple.DefinitionStmt;
import soot.jimple.FieldRef;
import soot.jimple.ReachingTypeDumper;
import soot.jimple.Stmt;
import soot.jimple.spark.builder.ContextInsensitiveBuilder;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.ondemand.DemandCSPointsTo;
import soot.jimple.spark.pag.AllocDotField;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.PAG2HTML;
import soot.jimple.spark.pag.PAGDumper;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.spark.sets.PointsToSetInternal;
import soot.jimple.spark.solver.EBBCollapser;
import soot.jimple.spark.solver.PropAlias;
import soot.jimple.spark.solver.PropCycle;
import soot.jimple.spark.solver.PropIter;
import soot.jimple.spark.solver.PropMerge;
import soot.jimple.spark.solver.PropWorklist;
import soot.jimple.spark.solver.Propagator;
import soot.jimple.spark.solver.SCCCollapser;
import soot.jimple.toolkits.callgraph.CallGraphBuilder;
import soot.options.SparkOptions;
import soot.tagkit.Host;
import soot.tagkit.StringTag;
import soot.tagkit.Tag;
/**
* Main entry point for Spark.
*
* @author Ondrej Lhotak
*/
public class SparkTransformer extends SceneTransformer {
private static final Logger logger = LoggerFactory.getLogger(SparkTransformer.class);
public SparkTransformer(Singletons.Global g) {
}
public static SparkTransformer v() {
return G.v().soot_jimple_spark_SparkTransformer();
}
protected void internalTransform(String phaseName, Map<String, String> options) {
SparkOptions opts = new SparkOptions(options);
final String output_dir = SourceLocator.v().getOutputDir();
// Build pointer assignment graph
ContextInsensitiveBuilder b = new ContextInsensitiveBuilder();
if (opts.pre_jimplify()) {
b.preJimplify();
}
if (opts.force_gc()) {
doGC();
}
Date startBuild = new Date();
final PAG pag = b.setup(opts);
b.build();
Date endBuild = new Date();
reportTime("Pointer Assignment Graph", startBuild, endBuild);
if (opts.force_gc()) {
doGC();
}
// Build type masks
Date startTM = new Date();
pag.getTypeManager().makeTypeMask();
Date endTM = new Date();
reportTime("Type masks", startTM, endTM);
if (opts.force_gc()) {
doGC();
}
if (opts.verbose()) {
logger.debug("VarNodes: " + pag.getVarNodeNumberer().size());
logger.debug("FieldRefNodes: " + pag.getFieldRefNodeNumberer().size());
logger.debug("AllocNodes: " + pag.getAllocNodeNumberer().size());
}
// Simplify pag
Date startSimplify = new Date();
// We only simplify if on_fly_cg is false. But, if vta is true, it
// overrides on_fly_cg, so we can still simplify. Something to handle
// these option interdependencies more cleanly would be nice...
if ((opts.simplify_sccs() && !opts.on_fly_cg()) || opts.vta()) {
new SCCCollapser(pag, opts.ignore_types_for_sccs()).collapse();
}
if (opts.simplify_offline() && !opts.on_fly_cg()) {
new EBBCollapser(pag).collapse();
}
if (true || opts.simplify_sccs() || opts.vta() || opts.simplify_offline()) {
pag.cleanUpMerges();
}
Date endSimplify = new Date();
reportTime("Pointer Graph simplified", startSimplify, endSimplify);
if (opts.force_gc()) {
doGC();
}
// Dump pag
PAGDumper dumper = null;
if (opts.dump_pag() || opts.dump_solution()) {
dumper = new PAGDumper(pag, output_dir);
}
if (opts.dump_pag()) {
dumper.dump();
}
// Propagate
Date startProp = new Date();
propagatePAG(opts, pag);
Date endProp = new Date();
reportTime("Propagation", startProp, endProp);
reportTime("Solution found", startSimplify, endProp);
if (opts.force_gc()) {
doGC();
}
if (!opts.on_fly_cg() || opts.vta()) {
CallGraphBuilder cgb = new CallGraphBuilder(pag);
cgb.build();
}
if (opts.verbose()) {
logger.debug("[Spark] Number of reachable methods: " + Scene.v().getReachableMethods().size());
}
if (opts.set_mass()) {
findSetMass(pag);
}
if (opts.dump_answer()) {
new ReachingTypeDumper(pag, output_dir).dump();
}
if (opts.dump_solution()) {
dumper.dumpPointsToSets();
}
if (opts.dump_html()) {
new PAG2HTML(pag, output_dir).dump();
}
Scene.v().setPointsToAnalysis(pag);
if (opts.add_tags()) {
addTags(pag);
}
if (opts.geom_pta()) {
if (opts.simplify_offline() || opts.simplify_sccs()) {
logger.debug("" + "Please turn off the simplify-offline and simplify-sccs to run the geometric points-to analysis");
logger.debug("Now, we keep the SPARK result for querying.");
} else {
// We perform the geometric points-to analysis
GeomPointsTo geomPTA = (GeomPointsTo) pag;
geomPTA.parametrize(endProp.getTime() - startSimplify.getTime());
geomPTA.solve();
}
}
if (opts.cs_demand()) {
// replace by demand-driven refinement-based context-sensitive analysis
Date startOnDemand = new Date();
PointsToAnalysis onDemandAnalysis = DemandCSPointsTo.makeWithBudget(opts.traversal(), opts.passes(), opts.lazy_pts());
Date endOndemand = new Date();
reportTime("Initialized on-demand refinement-based context-sensitive analysis", startOnDemand, endOndemand);
Scene.v().setPointsToAnalysis(onDemandAnalysis);
}
}
protected void propagatePAG(SparkOptions opts, final PAG pag) {
Propagator propagator = null;
switch (opts.propagator()) {
case SparkOptions.propagator_iter:
propagator = new PropIter(pag);
break;
case SparkOptions.propagator_worklist:
propagator = new PropWorklist(pag);
break;
case SparkOptions.propagator_cycle:
propagator = new PropCycle(pag);
break;
case SparkOptions.propagator_merge:
propagator = new PropMerge(pag);
break;
case SparkOptions.propagator_alias:
propagator = new PropAlias(pag);
break;
case SparkOptions.propagator_none:
break;
default:
throw new RuntimeException();
}
if (propagator != null) {
propagator.propagate();
}
}
protected void addTags(PAG pag) {
final Tag unknown = new StringTag("Untagged Spark node");
final Map<Node, Tag> nodeToTag = pag.getNodeTags();
for (final SootClass c : Scene.v().getClasses()) {
for (final SootMethod m : c.getMethods()) {
if (!m.isConcrete()) {
continue;
}
if (!m.hasActiveBody()) {
continue;
}
for (final Unit u : m.getActiveBody().getUnits()) {
final Stmt s = (Stmt) u;
if (s instanceof DefinitionStmt) {
Value lhs = ((DefinitionStmt) s).getLeftOp();
VarNode v = null;
if (lhs instanceof Local) {
v = pag.findLocalVarNode(lhs);
} else if (lhs instanceof FieldRef) {
v = pag.findGlobalVarNode(((FieldRef) lhs).getField());
}
if (v != null) {
PointsToSetInternal p2set = v.getP2Set();
p2set.forall(new P2SetVisitor() {
public final void visit(Node n) {
addTag(s, n, nodeToTag, unknown);
}
});
Node[] simpleSources = pag.simpleInvLookup(v);
for (Node element : simpleSources) {
addTag(s, element, nodeToTag, unknown);
}
simpleSources = pag.allocInvLookup(v);
for (Node element : simpleSources) {
addTag(s, element, nodeToTag, unknown);
}
simpleSources = pag.loadInvLookup(v);
for (Node element : simpleSources) {
addTag(s, element, nodeToTag, unknown);
}
}
}
}
}
}
}
protected static void reportTime(String desc, Date start, Date end) {
long time = end.getTime() - start.getTime();
logger.debug("[Spark] " + desc + " in " + time / 1000 + "." + (time / 100) % 10 + " seconds.");
}
protected static void doGC() {
// Do 5 times because the garbage collector doesn't seem to always collect
// everything on the first try.
System.gc();
System.gc();
System.gc();
System.gc();
System.gc();
}
protected void addTag(Host h, Node n, Map<Node, Tag> nodeToTag, Tag unknown) {
if (nodeToTag.containsKey(n)) {
h.addTag(nodeToTag.get(n));
} else {
h.addTag(unknown);
}
}
protected void findSetMass(PAG pag) {
int mass = 0;
int varMass = 0;
int adfs = 0;
int scalars = 0;
for (final VarNode v : pag.getVarNodeNumberer()) {
scalars++;
PointsToSetInternal set = v.getP2Set();
if (set != null) {
mass += set.size();
}
if (set != null) {
varMass += set.size();
}
}
for (final AllocNode an : pag.allocSources()) {
for (final AllocDotField adf : an.getFields()) {
PointsToSetInternal set = adf.getP2Set();
if (set != null) {
mass += set.size();
}
if (set != null && set.size() > 0) {
adfs++;
}
}
}
logger.debug("Set mass: " + mass);
logger.debug("Variable mass: " + varMass);
logger.debug("Scalars: " + scalars);
logger.debug("adfs: " + adfs);
// Compute points-to set sizes of dereference sites BEFORE
// trimming sets by declared type
int[] deRefCounts = new int[30001];
for (VarNode v : pag.getDereferences()) {
PointsToSetInternal set = v.getP2Set();
int size = 0;
if (set != null) {
size = set.size();
}
deRefCounts[size]++;
}
int total = 0;
for (int element : deRefCounts) {
total += element;
}
logger.debug("Dereference counts BEFORE trimming (total = " + total + "):");
for (int i = 0; i < deRefCounts.length; i++) {
if (deRefCounts[i] > 0) {
logger.debug("" + i + " " + deRefCounts[i] + " " + (deRefCounts[i] * 100.0 / total) + "%");
}
}
}
}
| 11,479
| 30.36612
| 124
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/builder/ContextInsensitiveBuilder.java
|
package soot.jimple.spark.builder;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.internal.SparkNativeHelper;
import soot.jimple.spark.pag.MethodPAG;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.solver.OnFlyCallGraph;
import soot.jimple.toolkits.callgraph.CallGraphBuilder;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.callgraph.ReachableMethods;
import soot.jimple.toolkits.pointer.DumbPointerAnalysis;
import soot.jimple.toolkits.pointer.util.NativeMethodDriver;
import soot.options.SparkOptions;
import soot.util.queue.QueueReader;
/**
* A context insensitive pointer assignment graph builder.
*
* @author Ondrej Lhotak
*/
public class ContextInsensitiveBuilder {
private static final Logger logger = LoggerFactory.getLogger(ContextInsensitiveBuilder.class);
public void preJimplify() {
boolean change = true;
while (change) {
change = false;
for (Iterator<SootClass> cIt = new ArrayList<>(Scene.v().getClasses()).iterator(); cIt.hasNext();) {
final SootClass c = cIt.next();
for (final SootMethod m : c.getMethods()) {
if (!m.isConcrete()) {
continue;
}
if (m.isNative()) {
continue;
}
if (m.isPhantom()) {
continue;
}
if (!m.hasActiveBody()) {
change = true;
m.retrieveActiveBody();
}
}
}
}
}
/** Creates an empty pointer assignment graph. */
public PAG setup(SparkOptions opts) {
pag = opts.geom_pta() ? new GeomPointsTo(opts) : new PAG(opts);
if (opts.simulate_natives()) {
pag.nativeMethodDriver = new NativeMethodDriver(new SparkNativeHelper(pag));
}
if (opts.on_fly_cg() && !opts.vta()) {
ofcg = new OnFlyCallGraph(pag, opts.apponly());
pag.setOnFlyCallGraph(ofcg);
} else {
cgb = new CallGraphBuilder(DumbPointerAnalysis.v());
}
return pag;
}
/** Fills in the pointer assignment graph returned by setup. */
public void build() {
QueueReader<Edge> callEdges;
if (ofcg != null) {
callEdges = ofcg.callGraph().listener();
ofcg.build();
reachables = ofcg.reachableMethods();
reachables.update();
} else {
callEdges = cgb.getCallGraph().listener();
cgb.build();
reachables = cgb.reachables();
}
for (final SootClass c : Scene.v().getClasses()) {
handleClass(c);
}
while (callEdges.hasNext()) {
Edge e = callEdges.next();
if (!e.isInvalid() && e.getTgt().method().getDeclaringClass().isConcrete()) {
if (e.tgt().isConcrete() || e.tgt().isNative()) {
MethodPAG.v(pag, e.tgt()).addToPAG(null);
}
pag.addCallTarget(e);
}
}
if (pag.getOpts().verbose()) {
logger.debug("Total methods: " + totalMethods);
logger.debug("Initially reachable methods: " + analyzedMethods);
logger.debug("Classes with at least one reachable method: " + classes);
}
}
/* End of public methods. */
/* End of package methods. */
protected void handleClass(SootClass c) {
boolean incedClasses = false;
if (c.isConcrete()) {
for (SootMethod m : c.getMethods()) {
if (!m.isConcrete() && !m.isNative()) {
continue;
}
totalMethods++;
if (reachables.contains(m)) {
MethodPAG mpag = MethodPAG.v(pag, m);
mpag.build();
mpag.addToPAG(null);
analyzedMethods++;
if (!incedClasses) {
incedClasses = true;
classes++;
}
}
}
}
}
protected PAG pag;
protected CallGraphBuilder cgb;
protected OnFlyCallGraph ofcg;
protected ReachableMethods reachables;
int classes = 0;
int totalMethods = 0;
int analyzedMethods = 0;
int stmts = 0;
}
| 4,856
| 29.167702
| 106
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/builder/GlobalNodeFactory.java
|
package soot.jimple.spark.builder;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AnySubType;
import soot.ArrayType;
import soot.PointsToAnalysis;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.ContextVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.VarNode;
import soot.toolkits.scalar.Pair;
/**
* Factory for nodes not specific to a given method.
*
* @author Ondrej Lhotak
*/
public class GlobalNodeFactory {
protected final RefType rtObject;
protected final RefType rtClassLoader;
protected final RefType rtString;
protected final RefType rtThread;
protected final RefType rtThreadGroup;
protected final RefType rtThrowable;
public GlobalNodeFactory(PAG pag) {
this.pag = pag;
this.rtObject = RefType.v("java.lang.Object");
this.rtClassLoader = RefType.v("java.lang.ClassLoader");
this.rtString = RefType.v("java.lang.String");
this.rtThread = RefType.v("java.lang.Thread");
this.rtThreadGroup = RefType.v("java.lang.ThreadGroup");
this.rtThrowable = RefType.v("java.lang.Throwable");
}
final public Node caseDefaultClassLoader() {
AllocNode a = pag.makeAllocNode(PointsToAnalysis.DEFAULT_CLASS_LOADER, AnySubType.v(rtClassLoader), null);
VarNode v = pag.makeGlobalVarNode(PointsToAnalysis.DEFAULT_CLASS_LOADER_LOCAL, rtClassLoader);
pag.addEdge(a, v);
return v;
}
final public Node caseMainClassNameString() {
AllocNode a = pag.makeAllocNode(PointsToAnalysis.MAIN_CLASS_NAME_STRING, rtString, null);
VarNode v = pag.makeGlobalVarNode(PointsToAnalysis.MAIN_CLASS_NAME_STRING_LOCAL, rtString);
pag.addEdge(a, v);
return v;
}
final public Node caseMainThreadGroup() {
AllocNode threadGroupNode = pag.makeAllocNode(PointsToAnalysis.MAIN_THREAD_GROUP_NODE, rtThreadGroup, null);
VarNode threadGroupNodeLocal = pag.makeGlobalVarNode(PointsToAnalysis.MAIN_THREAD_GROUP_NODE_LOCAL, rtThreadGroup);
pag.addEdge(threadGroupNode, threadGroupNodeLocal);
return threadGroupNodeLocal;
}
final public Node casePrivilegedActionException() {
AllocNode a = pag.makeAllocNode(PointsToAnalysis.PRIVILEGED_ACTION_EXCEPTION,
AnySubType.v(RefType.v("java.security.PrivilegedActionException")), null);
VarNode v = pag.makeGlobalVarNode(PointsToAnalysis.PRIVILEGED_ACTION_EXCEPTION_LOCAL,
RefType.v("java.security.PrivilegedActionException"));
pag.addEdge(a, v);
return v;
}
final public Node caseCanonicalPath() {
AllocNode a = pag.makeAllocNode(PointsToAnalysis.CANONICAL_PATH, rtString, null);
VarNode v = pag.makeGlobalVarNode(PointsToAnalysis.CANONICAL_PATH_LOCAL, rtString);
pag.addEdge(a, v);
return v;
}
final public Node caseMainThread() {
AllocNode threadNode = pag.makeAllocNode(PointsToAnalysis.MAIN_THREAD_NODE, rtThread, null);
VarNode threadNodeLocal = pag.makeGlobalVarNode(PointsToAnalysis.MAIN_THREAD_NODE_LOCAL, rtThread);
pag.addEdge(threadNode, threadNodeLocal);
return threadNodeLocal;
}
final public Node caseFinalizeQueue() {
return pag.makeGlobalVarNode(PointsToAnalysis.FINALIZE_QUEUE, rtObject);
}
final public Node caseArgv() {
ArrayType strArray = ArrayType.v(rtString, 1);
AllocNode argv = pag.makeAllocNode(PointsToAnalysis.STRING_ARRAY_NODE, strArray, null);
VarNode sanl = pag.makeGlobalVarNode(PointsToAnalysis.STRING_ARRAY_NODE_LOCAL, strArray);
AllocNode stringNode = pag.makeAllocNode(PointsToAnalysis.STRING_NODE, rtString, null);
VarNode stringNodeLocal = pag.makeGlobalVarNode(PointsToAnalysis.STRING_NODE_LOCAL, rtString);
pag.addEdge(argv, sanl);
pag.addEdge(stringNode, stringNodeLocal);
pag.addEdge(stringNodeLocal, pag.makeFieldRefNode(sanl, ArrayElement.v()));
return sanl;
}
final public Node caseNewInstance(VarNode cls) {
if (cls instanceof ContextVarNode) {
cls = pag.findLocalVarNode(cls.getVariable());
}
VarNode local = pag.makeGlobalVarNode(cls, rtObject);
for (SootClass cl : Scene.v().dynamicClasses()) {
AllocNode site = pag.makeAllocNode(new Pair<VarNode, SootClass>(cls, cl), cl.getType(), null);
pag.addEdge(site, local);
}
return local;
}
public Node caseThrow() {
VarNode ret = pag.makeGlobalVarNode(PointsToAnalysis.EXCEPTION_NODE, rtThrowable);
ret.setInterProcTarget();
ret.setInterProcSource();
return ret;
}
/* End of public methods. */
/* End of package methods. */
protected PAG pag;
}
| 5,403
| 35.761905
| 119
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/builder/MethodNodeFactory.java
|
package soot.jimple.spark.builder;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.ArrayType;
import soot.Local;
import soot.PointsToAnalysis;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootFieldRef;
import soot.SootMethod;
import soot.SootMethodRef;
import soot.Type;
import soot.Value;
import soot.jimple.AbstractStmtSwitch;
import soot.jimple.ArrayRef;
import soot.jimple.AssignStmt;
import soot.jimple.CastExpr;
import soot.jimple.CaughtExceptionRef;
import soot.jimple.ClassConstant;
import soot.jimple.Expr;
import soot.jimple.IdentityRef;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InvokeExpr;
import soot.jimple.NewArrayExpr;
import soot.jimple.NewExpr;
import soot.jimple.NewMultiArrayExpr;
import soot.jimple.NullConstant;
import soot.jimple.ParameterRef;
import soot.jimple.ReturnStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.StaticInvokeExpr;
import soot.jimple.Stmt;
import soot.jimple.StringConstant;
import soot.jimple.ThisRef;
import soot.jimple.ThrowStmt;
import soot.jimple.VirtualInvokeExpr;
import soot.jimple.spark.internal.ClientAccessibilityOracle;
import soot.jimple.spark.internal.SparkLibraryHelper;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.MethodPAG;
import soot.jimple.spark.pag.NewInstanceNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.Parm;
import soot.jimple.spark.pag.VarNode;
import soot.options.CGOptions;
import soot.shimple.AbstractShimpleValueSwitch;
import soot.shimple.PhiExpr;
import soot.toolkits.scalar.Pair;
/**
* Class implementing builder parameters (this decides what kinds of nodes should be built for each kind of Soot value).
*
* @author Ondrej Lhotak
*/
public class MethodNodeFactory extends AbstractShimpleValueSwitch {
protected final RefType rtClass;
protected final RefType rtStringType;
protected final RefType rtHashSet;
protected final RefType rtHashMap;
protected final RefType rtLinkedList;
protected final RefType rtHashtableEmptyIterator;
protected final RefType rtHashtableEmptyEnumerator;
public MethodNodeFactory(PAG pag, MethodPAG mpag) {
this.pag = pag;
this.mpag = mpag;
this.rtClass = RefType.v("java.lang.Class");
this.rtStringType = RefType.v("java.lang.String");
this.rtHashSet = RefType.v("java.util.HashSet");
this.rtHashMap = RefType.v("java.util.HashMap");
this.rtLinkedList = RefType.v("java.util.LinkedList");
this.rtHashtableEmptyIterator = RefType.v("java.util.Hashtable$EmptyIterator");
this.rtHashtableEmptyEnumerator = RefType.v("java.util.Hashtable$EmptyEnumerator");
setCurrentMethod(mpag.getMethod());
}
/** Sets the method for which a graph is currently being built. */
private void setCurrentMethod(SootMethod m) {
method = m;
if (!m.isStatic()) {
SootClass c = m.getDeclaringClass();
if (c == null) {
throw new RuntimeException("Method " + m + " has no declaring class");
}
caseThis();
}
for (int i = 0; i < m.getParameterCount(); i++) {
if (m.getParameterType(i) instanceof RefLikeType) {
caseParm(i);
}
}
Type retType = m.getReturnType();
if (retType instanceof RefLikeType) {
caseRet();
}
}
public Node getNode(Value v) {
v.apply(this);
return getNode();
}
/** Adds the edges required for this statement to the graph. */
final public void handleStmt(Stmt s) {
// We only consider reflective class creation when it is enabled
if (s.containsInvokeExpr()) {
if (!pag.getCGOpts().types_for_invoke()) {
return;
}
InvokeExpr iexpr = s.getInvokeExpr();
if (iexpr instanceof VirtualInvokeExpr) {
if (!isReflectionNewInstance(iexpr)) {
return;
}
} else if (!(iexpr instanceof StaticInvokeExpr)) {
return;
}
}
s.apply(new AbstractStmtSwitch() {
@Override
final public void caseAssignStmt(AssignStmt as) {
Value l = as.getLeftOp();
Value r = as.getRightOp();
if (!(l.getType() instanceof RefLikeType)) {
return;
}
assert r.getType() instanceof RefLikeType : "Type mismatch in assignment " + as + " in method "
+ method.getSignature();
l.apply(MethodNodeFactory.this);
Node dest = getNode();
r.apply(MethodNodeFactory.this);
Node src = getNode();
if (l instanceof InstanceFieldRef) {
((InstanceFieldRef) l).getBase().apply(MethodNodeFactory.this);
pag.addDereference((VarNode) getNode());
}
if (r instanceof InstanceFieldRef) {
((InstanceFieldRef) r).getBase().apply(MethodNodeFactory.this);
pag.addDereference((VarNode) getNode());
} else if (r instanceof StaticFieldRef) {
StaticFieldRef sfr = (StaticFieldRef) r;
SootFieldRef s = sfr.getFieldRef();
if (pag.getOpts().empties_as_allocs()) {
if (s.declaringClass().getName().equals("java.util.Collections")) {
if (s.name().equals("EMPTY_SET")) {
src = pag.makeAllocNode(rtHashSet, rtHashSet, method);
} else if (s.name().equals("EMPTY_MAP")) {
src = pag.makeAllocNode(rtHashMap, rtHashMap, method);
} else if (s.name().equals("EMPTY_LIST")) {
src = pag.makeAllocNode(rtLinkedList, rtLinkedList, method);
}
} else if (s.declaringClass().getName().equals("java.util.Hashtable")) {
if (s.name().equals("emptyIterator")) {
src = pag.makeAllocNode(rtHashtableEmptyIterator, rtHashtableEmptyIterator, method);
} else if (s.name().equals("emptyEnumerator")) {
src = pag.makeAllocNode(rtHashtableEmptyEnumerator, rtHashtableEmptyEnumerator, method);
}
}
}
}
mpag.addInternalEdge(src, dest);
}
@Override
final public void caseReturnStmt(ReturnStmt rs) {
if (!(rs.getOp().getType() instanceof RefLikeType)) {
return;
}
rs.getOp().apply(MethodNodeFactory.this);
Node retNode = getNode();
mpag.addInternalEdge(retNode, caseRet());
}
@Override
final public void caseIdentityStmt(IdentityStmt is) {
if (!(is.getLeftOp().getType() instanceof RefLikeType)) {
return;
}
Value leftOp = is.getLeftOp();
Value rightOp = is.getRightOp();
leftOp.apply(MethodNodeFactory.this);
Node dest = getNode();
rightOp.apply(MethodNodeFactory.this);
Node src = getNode();
mpag.addInternalEdge(src, dest);
// in case library mode is activated add allocations to any
// possible type of this local and
// parameters of accessible methods
int libOption = pag.getCGOpts().library();
if (libOption != CGOptions.library_disabled && (accessibilityOracle.isAccessible(method))) {
if (rightOp instanceof IdentityRef) {
Type rt = rightOp.getType();
rt.apply(new SparkLibraryHelper(pag, src, method));
}
}
}
@Override
final public void caseThrowStmt(ThrowStmt ts) {
ts.getOp().apply(MethodNodeFactory.this);
mpag.addOutEdge(getNode(), pag.nodeFactory().caseThrow());
}
});
}
/**
* Checks whether the given invocation is for Class.newInstance()
*
* @param iexpr
* The invocation to check
* @return True if the given invocation is for Class.newInstance(), otherwise false
*/
private boolean isReflectionNewInstance(InvokeExpr iexpr) {
if (iexpr instanceof VirtualInvokeExpr) {
VirtualInvokeExpr vie = (VirtualInvokeExpr) iexpr;
if (vie.getBase().getType() instanceof RefType) {
RefType rt = (RefType) vie.getBase().getType();
if (rt.getSootClass().getName().equals("java.lang.Class")) {
if (vie.getMethodRef().name().equals("newInstance") && vie.getMethodRef().parameterTypes().size() == 0) {
return true;
}
}
}
}
return false;
}
final public Node getNode() {
return (Node) getResult();
}
final public Node caseThis() {
VarNode ret = pag.makeLocalVarNode(new Pair<SootMethod, String>(method, PointsToAnalysis.THIS_NODE),
method.getDeclaringClass().getType(), method);
ret.setInterProcTarget();
return ret;
}
final public Node caseParm(int index) {
//if we connect method with different param counts in virtualedges.xml, we may be calling caseParam with
//out-of-bound index. see PAG.addCallTarget
if (method.getParameterCount() < index + 1) {
return null;
}
VarNode ret = pag.makeLocalVarNode(new Pair<SootMethod, Integer>(method, new Integer(index)),
method.getParameterType(index), method);
ret.setInterProcTarget();
return ret;
}
@Override
final public void casePhiExpr(PhiExpr e) {
Pair<Expr, String> phiPair = new Pair<Expr, String>(e, PointsToAnalysis.PHI_NODE);
Node phiNode = pag.makeLocalVarNode(phiPair, e.getType(), method);
for (Value op : e.getValues()) {
op.apply(MethodNodeFactory.this);
Node opNode = getNode();
mpag.addInternalEdge(opNode, phiNode);
}
setResult(phiNode);
}
final public Node caseRet() {
VarNode ret = pag.makeLocalVarNode(Parm.v(method, PointsToAnalysis.RETURN_NODE), method.getReturnType(), method);
ret.setInterProcSource();
return ret;
}
final public Node caseArray(VarNode base) {
return pag.makeFieldRefNode(base, ArrayElement.v());
}
/* End of public methods. */
/* End of package methods. */
// OK, these ones are public, but they really shouldn't be; it's just
// that Java requires them to be, because they override those other
// public methods.
@Override
final public void caseArrayRef(ArrayRef ar) {
caseLocal((Local) ar.getBase());
setResult(caseArray((VarNode) getNode()));
}
@Override
final public void caseCastExpr(CastExpr ce) {
Pair<Expr, String> castPair = new Pair<Expr, String>(ce, PointsToAnalysis.CAST_NODE);
ce.getOp().apply(this);
Node opNode = getNode();
Node castNode = pag.makeLocalVarNode(castPair, ce.getCastType(), method);
mpag.addInternalEdge(opNode, castNode);
setResult(castNode);
}
@Override
final public void caseCaughtExceptionRef(CaughtExceptionRef cer) {
setResult(pag.nodeFactory().caseThrow());
}
@Override
final public void caseInstanceFieldRef(InstanceFieldRef ifr) {
if (pag.getOpts().field_based() || pag.getOpts().vta()) {
setResult(pag.makeGlobalVarNode(ifr.getField(), ifr.getField().getType()));
} else {
setResult(pag.makeLocalFieldRefNode(ifr.getBase(), ifr.getBase().getType(), ifr.getField(), method));
}
}
@Override
final public void caseLocal(Local l) {
setResult(pag.makeLocalVarNode(l, l.getType(), method));
}
@Override
final public void caseNewArrayExpr(NewArrayExpr nae) {
setResult(pag.makeAllocNode(nae, nae.getType(), method));
}
private boolean isStringBuffer(Type t) {
if (!(t instanceof RefType)) {
return false;
}
RefType rt = (RefType) t;
String s = rt.toString();
if (s.equals("java.lang.StringBuffer")) {
return true;
}
if (s.equals("java.lang.StringBuilder")) {
return true;
}
return false;
}
@Override
final public void caseNewExpr(NewExpr ne) {
if (pag.getOpts().merge_stringbuffer() && isStringBuffer(ne.getType())) {
setResult(pag.makeAllocNode(ne.getType(), ne.getType(), null));
} else {
setResult(pag.makeAllocNode(ne, ne.getType(), method));
}
}
@Override
final public void caseNewMultiArrayExpr(NewMultiArrayExpr nmae) {
ArrayType type = (ArrayType) nmae.getType();
AllocNode prevAn = pag.makeAllocNode(new Pair<Expr, Integer>(nmae, new Integer(type.numDimensions)), type, method);
VarNode prevVn = pag.makeLocalVarNode(prevAn, prevAn.getType(), method);
mpag.addInternalEdge(prevAn, prevVn);
setResult(prevAn);
while (true) {
Type t = type.getElementType();
if (!(t instanceof ArrayType)) {
break;
}
type = (ArrayType) t;
AllocNode an = pag.makeAllocNode(new Pair<Expr, Integer>(nmae, new Integer(type.numDimensions)), type, method);
VarNode vn = pag.makeLocalVarNode(an, an.getType(), method);
mpag.addInternalEdge(an, vn);
mpag.addInternalEdge(vn, pag.makeFieldRefNode(prevVn, ArrayElement.v()));
prevAn = an;
prevVn = vn;
}
}
@Override
final public void caseParameterRef(ParameterRef pr) {
setResult(caseParm(pr.getIndex()));
}
@Override
final public void caseStaticFieldRef(StaticFieldRef sfr) {
setResult(pag.makeGlobalVarNode(sfr.getField(), sfr.getField().getType()));
}
@Override
final public void caseStringConstant(StringConstant sc) {
AllocNode stringConstant;
if (pag.getOpts().string_constants() || Scene.v().containsClass(sc.value)
|| (sc.value.length() > 0 && sc.value.charAt(0) == '[')) {
stringConstant = pag.makeStringConstantNode(sc.value);
} else {
stringConstant = pag.makeAllocNode(PointsToAnalysis.STRING_NODE, rtStringType, null);
}
VarNode stringConstantLocal = pag.makeGlobalVarNode(stringConstant, rtStringType);
pag.addEdge(stringConstant, stringConstantLocal);
setResult(stringConstantLocal);
}
@Override
final public void caseThisRef(ThisRef tr) {
setResult(caseThis());
}
@Override
final public void caseNullConstant(NullConstant nr) {
setResult(null);
}
@Override
final public void caseClassConstant(ClassConstant cc) {
AllocNode classConstant = pag.makeClassConstantNode(cc);
VarNode classConstantLocal = pag.makeGlobalVarNode(classConstant, rtClass);
pag.addEdge(classConstant, classConstantLocal);
setResult(classConstantLocal);
}
@Override
final public void defaultCase(Object v) {
throw new RuntimeException("failed to handle " + v);
}
@Override
public void caseStaticInvokeExpr(StaticInvokeExpr v) {
SootMethodRef ref = v.getMethodRef();
if (v.getArgCount() == 1 && v.getArg(0) instanceof StringConstant && ref.name().equals("forName")
&& ref.declaringClass().getName().equals("java.lang.Class") && ref.parameterTypes().size() == 1) {
// This is a call to Class.forName
StringConstant classNameConst = (StringConstant) v.getArg(0);
caseClassConstant(ClassConstant.v("L" + classNameConst.value.replaceAll("\\.", "/") + ";"));
}
}
@Override
public void caseVirtualInvokeExpr(VirtualInvokeExpr v) {
if (isReflectionNewInstance(v)) {
NewInstanceNode newInstanceNode = pag.makeNewInstanceNode(v, Scene.v().getObjectType(), method);
v.getBase().apply(this);
Node srcNode = getNode();
mpag.addInternalEdge(srcNode, newInstanceNode);
setResult(newInstanceNode);
} else {
throw new RuntimeException("Unhandled case of VirtualInvokeExpr");
}
}
protected final PAG pag;
protected final MethodPAG mpag;
protected SootMethod method;
protected ClientAccessibilityOracle accessibilityOracle = Scene.v().getClientAccessibilityOracle();
}
| 16,235
| 33.181053
| 120
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldRWTag.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Set;
import soot.SootField;
import soot.tagkit.Tag;
/** Implements a tag that holds a list of fields read or written by a call. */
public abstract class FieldRWTag implements Tag {
private final String fieldNames;
FieldRWTag(Set<SootField> fields) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (SootField field : fields) {
if (first) {
first = false;
} else {
sb.append('%');
}
sb.append(field.getDeclaringClass().getName());
sb.append(':');
sb.append(field.getName());
}
this.fieldNames = sb.toString();
}
@Override
public byte[] getValue() {
byte[] bytes = fieldNames.getBytes();
byte[] ret = new byte[bytes.length + 2];
ret[0] = (byte) (bytes.length / 256);
ret[1] = (byte) (bytes.length % 256);
System.arraycopy(bytes, 0, ret, 2, bytes.length);
return ret;
}
@Override
public String toString() {
return getName() + fieldNames;
}
}
| 1,832
| 26.772727
| 78
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldReadTag.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Set;
import soot.SootField;
/**
* Implements a tag that holds a list of fields read by a call.
*/
public class FieldReadTag extends FieldRWTag {
public static final String NAME = "FieldReadTag";
FieldReadTag(Set<SootField> fields) {
super(fields);
}
@Override
public String getName() {
return NAME;
}
}
| 1,181
| 25.266667
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldReadTagAggregator.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.G;
import soot.Singletons;
import soot.tagkit.ImportantTagAggregator;
import soot.tagkit.Tag;
public class FieldReadTagAggregator extends ImportantTagAggregator {
public FieldReadTagAggregator(Singletons.Global g) {
}
public static FieldReadTagAggregator v() {
return G.v().soot_jimple_spark_fieldrw_FieldReadTagAggregator();
}
/** Decide whether this tag should be aggregated by this aggregator. */
public boolean wantTag(Tag t) {
return (t instanceof FieldReadTag);
}
public String aggregatedName() {
return "FieldRead";
}
}
| 1,407
| 28.957447
| 73
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldTagAggregator.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.LinkedList;
import java.util.Map;
import soot.Body;
import soot.G;
import soot.Singletons;
import soot.Unit;
import soot.tagkit.Tag;
import soot.tagkit.TagAggregator;
public class FieldTagAggregator extends TagAggregator {
public FieldTagAggregator(Singletons.Global g) {
}
public static FieldTagAggregator v() {
return G.v().soot_jimple_spark_fieldrw_FieldTagAggregator();
}
protected void internalTransform(Body b, String phaseName, Map options) {
FieldReadTagAggregator.v().transform(b, phaseName, options);
FieldWriteTagAggregator.v().transform(b, phaseName, options);
}
/** Decide whether this tag should be aggregated by this aggregator. */
public boolean wantTag(Tag t) {
throw new RuntimeException();
}
public void considerTag(Tag t, Unit u, LinkedList<Tag> tags, LinkedList<Unit> units) {
throw new RuntimeException();
}
public String aggregatedName() {
throw new RuntimeException();
}
}
| 1,805
| 28.606557
| 88
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldTagger.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import soot.Body;
import soot.BodyTransformer;
import soot.G;
import soot.MethodOrMethodContext;
import soot.PhaseOptions;
import soot.Scene;
import soot.Singletons;
import soot.SootMethod;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.FieldRef;
import soot.jimple.Stmt;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.TransitiveTargets;
import soot.util.HashMultiMap;
public class FieldTagger extends BodyTransformer {
public FieldTagger(Singletons.Global g) {
}
public static FieldTagger v() {
return G.v().soot_jimple_spark_fieldrw_FieldTagger();
}
private final HashSet<SootMethod> processedMethods = new HashSet<SootMethod>();
private final HashMultiMap methodToWrite = new HashMultiMap();
private final HashMultiMap methodToRead = new HashMultiMap();
protected void ensureProcessed(SootMethod m) {
if (processedMethods.contains(m)) {
return;
}
processedMethods.add(m);
if (!m.isConcrete()) {
return;
}
if (m.isPhantom()) {
return;
}
for (Iterator sIt = m.retrieveActiveBody().getUnits().iterator(); sIt.hasNext();) {
final Stmt s = (Stmt) sIt.next();
if (s instanceof AssignStmt) {
AssignStmt as = (AssignStmt) s;
Value l = as.getLeftOp();
if (l instanceof FieldRef) {
methodToWrite.put(m, ((FieldRef) l).getField());
}
Value r = as.getRightOp();
if (r instanceof FieldRef) {
methodToRead.put(m, ((FieldRef) r).getField());
}
}
}
}
protected void internalTransform(Body body, String phaseName, Map options) {
int threshold = PhaseOptions.getInt(options, "threshold");
ensureProcessed(body.getMethod());
CallGraph cg = Scene.v().getCallGraph();
TransitiveTargets tt = new TransitiveTargets(cg);
statement: for (Iterator sIt = body.getUnits().iterator(); sIt.hasNext();) {
final Stmt s = (Stmt) sIt.next();
HashSet read = new HashSet();
HashSet write = new HashSet();
Iterator<MethodOrMethodContext> it = tt.iterator(s);
while (it.hasNext()) {
SootMethod target = (SootMethod) it.next();
ensureProcessed(target);
if (target.isNative()) {
continue statement;
}
if (target.isPhantom()) {
continue statement;
}
read.addAll(methodToRead.get(target));
write.addAll(methodToWrite.get(target));
if (read.size() + write.size() > threshold) {
continue statement;
}
}
s.addTag(new FieldReadTag(read));
s.addTag(new FieldWriteTag(write));
}
}
}
| 3,573
| 29.810345
| 87
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldWriteTag.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Set;
import soot.SootField;
/**
* Implements a tag that holds a list of fields read by a call.
*/
public class FieldWriteTag extends FieldRWTag {
public static final String NAME = "FieldWriteTag";
FieldWriteTag(Set<SootField> fields) {
super(fields);
}
@Override
public String getName() {
return NAME;
}
}
| 1,184
| 25.333333
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/fieldrw/FieldWriteTagAggregator.java
|
package soot.jimple.spark.fieldrw;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2003 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.G;
import soot.Singletons;
import soot.tagkit.ImportantTagAggregator;
import soot.tagkit.Tag;
public class FieldWriteTagAggregator extends ImportantTagAggregator {
public FieldWriteTagAggregator(Singletons.Global g) {
}
public static FieldWriteTagAggregator v() {
return G.v().soot_jimple_spark_fieldrw_FieldWriteTagAggregator();
}
/** Decide whether this tag should be aggregated by this aggregator. */
public boolean wantTag(Tag t) {
return (t instanceof FieldWriteTag);
}
public String aggregatedName() {
return "FieldWrite";
}
}
| 1,413
| 29.085106
| 73
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/package-info.java
|
/**
* This package contains a context-sensitive points-to analysis using geometric encoding. The analysis extends the
* context-insensitive analysis from Spark.
*
* The analysis was presented in the following paper: Xiao Xiao and Charles Zhang. Geometric Encoding: Forging high
* performance context sensitive points-to analysis for Java . In ISSTA 2011: International Symposium on Software Testing and
* Analysis, Toronto, Canada, 2011
*/
package soot.jimple.spark.geom;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1997 - 2018 Raja Vallée-Rai and others
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
| 1,297
| 39.5625
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataMgr/ContextsCollector.java
|
package soot.jimple.spark.geom.dataMgr;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import soot.jimple.spark.geom.dataRep.SimpleInterval;
/**
* Manage context intervals.
*
* @author xiao
*
*/
public class ContextsCollector {
public List<SimpleInterval> bars;
protected List<SimpleInterval> backupList;
protected SimpleInterval tmp_si;
/*
* We are creating a performance-precision tunable container. When there are more than nBudget in the container, we merge
* them and create a super containing interval. nBudget = -1 means the intervals are never merged.
*/
protected int nBudget = -1;
public ContextsCollector() {
bars = new ArrayList<SimpleInterval>();
backupList = new ArrayList<SimpleInterval>();
tmp_si = new SimpleInterval();
}
public int getBudget() {
return nBudget;
}
public int setBudget(int n) {
int original = nBudget;
nBudget = n;
return original;
}
public boolean insert(long L, long R) {
backupList.clear();
// We search the list and merge the intersected intervals
tmp_si.L = L;
tmp_si.R = R;
long minL = L;
long maxR = R;
for (SimpleInterval old_si : bars) {
if (old_si.contains(tmp_si)) {
// We keep the context intervals disjoint
return false;
}
if (!tmp_si.merge(old_si)) {
if (old_si.L < minL) {
minL = old_si.L;
}
if (old_si.R > maxR) {
maxR = old_si.R;
}
backupList.add(old_si);
}
}
// We switch the backup list with the original list
List<SimpleInterval> tmpList = backupList;
backupList = bars;
bars = tmpList;
SimpleInterval new_si = new SimpleInterval(tmp_si);
bars.add(new_si);
// Merge the intervals
if (nBudget != -1 && bars.size() > nBudget) {
bars.clear();
new_si.L = minL;
new_si.R = maxR;
bars.add(new_si);
}
return true;
}
public void clear() {
bars.clear();
}
}
| 2,791
| 24.153153
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataMgr/Obj_full_extractor.java
|
package soot.jimple.spark.geom.dataMgr;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 - 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import soot.jimple.spark.geom.dataRep.IntervalContextVar;
import soot.jimple.spark.pag.Node;
/**
* Extracts the full context sensitive points-to result.
*
* @author xiao
*
*/
public class Obj_full_extractor extends PtSensVisitor<IntervalContextVar> {
private List<IntervalContextVar> backupList = new ArrayList<IntervalContextVar>();
private IntervalContextVar tmp_icv = new IntervalContextVar();
@Override
public boolean visit(Node var, long L, long R, int sm_int) {
if (readyToUse) {
return false;
}
List<IntervalContextVar> resList = tableView.get(var);
if (resList == null) {
// The first time this object is inserted
resList = new ArrayList<IntervalContextVar>();
} else {
// We search the list and merge the context sensitive objects
backupList.clear();
tmp_icv.L = L;
tmp_icv.R = R;
for (IntervalContextVar old_cv : resList) {
if (old_cv.contains(tmp_icv)) {
/*
* Becase we keep the intervals disjoint. It's impossible the passed in interval is contained in an interval or
* intersects with other intervals. In such case, we can directly return.
*/
return false;
}
if (!tmp_icv.merge(old_cv)) {
backupList.add(old_cv);
}
}
// We switch the backup list with the original list
List<IntervalContextVar> tmpList = backupList;
backupList = resList;
resList = tmpList;
// Write back
L = tmp_icv.L;
R = tmp_icv.R;
}
IntervalContextVar icv = new IntervalContextVar(L, R, var);
resList.add(icv);
tableView.put(var, resList);
return true;
}
}
| 2,591
| 28.793103
| 121
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataMgr/PtSensVisitor.java
|
package soot.jimple.spark.geom.dataMgr;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 - 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import soot.PointsToSet;
import soot.Scene;
import soot.jimple.spark.geom.dataRep.ContextVar;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.PointsToSetInternal;
/**
* A container for storing context sensitive querying result of geomPTA. Similar to the class PointsToSetInternal for SPARK.
*
* This class maintains two views for the results: 1. Table view: every object has a separate list of its context sensitive
* versions; 2. List view: all context sensitive objects are put in a single list.
*
*
* @author xiao
*/
public abstract class PtSensVisitor<VarType extends ContextVar> {
// Indicates if this visitor is prepared
protected boolean readyToUse = false;
protected GeomPointsTo ptsProvider = (GeomPointsTo) Scene.v().getPointsToAnalysis();
// The list view
public List<VarType> outList = new ArrayList<VarType>();
// The table view (cannot be accessed directly outside)
protected Map<Node, List<VarType>> tableView = new HashMap<Node, List<VarType>>();
/**
* Called before each round of collection.
*/
public void prepare() {
tableView.clear();
readyToUse = false;
}
/**
* Called after each round of collection.
*/
public void finish() {
if (readyToUse == false) {
// We flatten the list
readyToUse = true;
outList.clear();
if (tableView.size() == 0) {
return;
}
for (Map.Entry<Node, List<VarType>> entry : tableView.entrySet()) {
List<VarType> resList = entry.getValue();
outList.addAll(resList);
}
}
}
/**
* The visitor contains valid information only when this function returns true.
*
* @return
*/
public boolean getUsageState() {
return readyToUse;
}
/**
* Return the number of different points-to targets.
*/
public int numOfDiffObjects() {
return readyToUse ? outList.size() : tableView.size();
}
/**
* Tests if two containers have contain same things. Can be used to answer the alias query.
*/
public boolean hasNonEmptyIntersection(PtSensVisitor<VarType> other) {
// Using table view for comparison, that's faster
for (Map.Entry<Node, List<VarType>> entry : tableView.entrySet()) {
Node var = entry.getKey();
List<VarType> list1 = entry.getValue();
List<VarType> list2 = other.getCSList(var);
if (list1.size() == 0 || list2.size() == 0) {
continue;
}
for (VarType cv1 : list1) {
for (VarType cv2 : list2) {
if (cv1.intersect(cv2)) {
return true;
}
}
}
}
return false;
}
/**
* Obtain the list of context sensitive objects pointed to by var.
*
* @param var
* @return
*/
public List<VarType> getCSList(Node var) {
return tableView.get(var);
}
/**
* Transform the result to SPARK style context insensitive points-to set. The transformed result is stored in the points-to
* set of the querying pointer.
*
* @param vn:
* the querying pointer
* @return
*/
public PointsToSet toSparkCompatiableResult(VarNode vn) {
if (!readyToUse) {
finish();
}
PointsToSetInternal ptset = vn.makeP2Set();
for (VarType cv : outList) {
ptset.add(cv.var);
}
return ptset;
}
/**
* Print the objects.
*/
public void debugPrint() {
if (!readyToUse) {
finish();
}
for (VarType cv : outList) {
System.out.printf("\t%s\n", cv.toString());
}
}
/**
* We use visitor pattern to collect contexts. Derived classes decide how to deal with the variable with the contexts [L,
* R). Returning false means this interval [L, R) is covered by other intervals.
*
* @param var
* @param L
* @param R
* @param sm_int
* : the integer ID of the SootMethod
*/
public abstract boolean visit(Node var, long L, long R, int sm_int);
}
| 4,940
| 25.708108
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/CallsiteContextVar.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.Node;
/**
* A general interface for generating the traditional context sensitive variable representation.
*
* @author xiao
*
*/
public class CallsiteContextVar extends ContextVar {
/*
* If var is a local pointer or object, context is the callsite for the creation of the pointer or object. If var is a
* instance field, context is the callsite for the creation of its base object.
*/
public CgEdge context = null;
public CallsiteContextVar() {
}
public CallsiteContextVar(CgEdge c, Node v) {
context = c;
var = v;
}
public CallsiteContextVar(CallsiteContextVar o) {
context = o.context;
var = o.var;
}
@Override
public String toString() {
return "<" + context.toString() + ", " + var.toString() + ">";
}
@Override
public boolean equals(Object o) {
CallsiteContextVar other = (CallsiteContextVar) o;
return (other.context == context) && (other.var == var);
}
@Override
public int hashCode() {
int ch = 0;
if (context != null) {
ch = context.hashCode();
}
int ans = var.hashCode() + ch;
if (ans < 0) {
ans = -ans;
}
return ans;
}
@Override
public boolean contains(ContextVar cv) {
CallsiteContextVar ccv = (CallsiteContextVar) cv;
if (context == ccv.context) {
return true;
}
return false;
}
@Override
public boolean merge(ContextVar cv) {
// The behavior of merging callsite context sensitive variables is undefined.
return false;
}
@Override
public boolean intersect(ContextVar cv) {
return contains(cv);
}
}
| 2,455
| 24.061224
| 120
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/CgEdge.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.VarNode;
import soot.jimple.toolkits.callgraph.Edge;
/**
* The internal call graph edge representation.
*
* @author xiao
*
*/
public class CgEdge {
// The edge structure in soot
public Edge sootEdge;
// The source/destination
public int s, t;
// The starting context of function t
// Thus, the interval is: (1, |s|, map_offset + |s| - 1)
public long map_offset;
// Is this call edge a SCC edge, i.e two ends both in the same SCC?
public boolean scc_edge = false;
// Is this call edge still in service?
public boolean is_obsoleted = false;
// Base variable of this virtual call edge
public VarNode base_var = null;
// Next call edge
public CgEdge next = null;
// cg_edge inv_next = null;
public CgEdge(int ss, int tt, Edge se, CgEdge ne) {
s = ss;
t = tt;
sootEdge = se;
next = ne;
}
/**
* Copy itself.
*
* @return
*/
public CgEdge duplicate() {
CgEdge new_edge = new CgEdge(s, t, sootEdge, null);
new_edge.map_offset = map_offset;
new_edge.scc_edge = scc_edge;
new_edge.base_var = base_var;
return new_edge;
}
@Override
public String toString() {
if (sootEdge != null) {
return sootEdge.toString();
}
return "(" + s + "->" + t + ", " + map_offset + ")";
}
}
| 2,151
| 25.567901
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/ContextVar.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1997 - 2018 Raja Vallée-Rai and others
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.Node;
import soot.util.Numberable;
/**
* The root class for representing context sensitive pointer/object in explicit form.
*
* @author xiao
*
*/
public abstract class ContextVar implements Numberable {
// We use spark Node since it can be easily used by clients
public Node var = null;
public int id = -1;
// This class cannot be instantiated directly
// Use its derived classes
protected ContextVar() {
}
@Override
public void setNumber(int number) {
id = number;
}
@Override
public int getNumber() {
return id;
}
/**
* Test if current context variable contains the information for passed in variable
*
* @param cv
* @return
*/
public abstract boolean contains(ContextVar cv);
/**
* Merge two context variables if possible Merged information is written into current variable.
*
* @param cv
* @return true if mergable.
*/
public abstract boolean merge(ContextVar cv);
/**
* Two context sensitive variables have intersected contexts.
*
* @param cv
* @return
*/
public abstract boolean intersect(ContextVar cv);
}
| 2,009
| 24.443038
| 97
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/IntervalContextVar.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.Node;
/**
* A particular class to encode contexts in interval manner.
*
* @author xiao
*
*/
public class IntervalContextVar extends ContextVar implements Comparable<IntervalContextVar> {
// The interval is [L, R), which stands for a set of consecutive contexts
public long L = 0, R = 0;
public IntervalContextVar() {
}
public IntervalContextVar(long l, long r, Node v) {
assert l < r;
L = l;
R = r;
var = v;
}
public IntervalContextVar(IntervalContextVar o) {
L = o.L;
R = o.R;
var = o.var;
}
@Override
public String toString() {
return "<" + var.toString() + ", " + L + ", " + R + ">";
}
@Override
public boolean equals(Object o) {
IntervalContextVar other = (IntervalContextVar) o;
return (other.L == L) && (other.R == R) && (other.var == var);
}
@Override
public int hashCode() {
int ch = (int) ((L + R) % Integer.MAX_VALUE);
int ans = var.hashCode() + ch;
if (ans < 0) {
ans = var.hashCode();
}
return ans;
}
@Override
public int compareTo(IntervalContextVar o) {
if (L == o.L) {
return R < o.R ? -1 : 1;
}
return L < o.L ? -1 : 1;
}
@Override
public boolean contains(ContextVar cv) {
IntervalContextVar icv = (IntervalContextVar) cv;
if (L <= icv.L && R >= icv.R) {
return true;
}
return false;
}
@Override
public boolean merge(ContextVar cv) {
IntervalContextVar icv = (IntervalContextVar) cv;
if (icv.L < L) {
if (L <= icv.R) {
L = icv.L;
if (R < icv.R) {
R = icv.R;
}
return true;
}
} else {
if (icv.L <= R) {
if (R < icv.R) {
R = icv.R;
}
return true;
}
}
return false;
}
@Override
public boolean intersect(ContextVar cv) {
IntervalContextVar icv = (IntervalContextVar) cv;
if (L <= icv.L && icv.L < R) {
return true;
}
if (icv.L <= L && L < icv.R) {
return true;
}
return false;
}
}
| 2,902
| 21.330769
| 94
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/PlainConstraint.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Set;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.pag.SparkField;
import soot.jimple.toolkits.callgraph.Edge;
import soot.toolkits.scalar.Pair;
import soot.util.Numberable;
/**
* The geometric encoding based constraint descriptor.
*
* @author xiao
*
*/
public class PlainConstraint implements Numberable {
// Plain constraint descriptor
// This is a full description that we can read/write without context
// A constraint has the form : lhs -> rhs, which means lhs is assigned to rhs
// lhs/rhs is a pointer p or a field p.f, which assigns the value of lhs to rhs
/** The type of this constraint, e.g. allocation, assignment or complex */
public int type;
/** The two pointers involved in this constraint */
public Pair<IVarAbstraction, IVarAbstraction> expr = new Pair<IVarAbstraction, IVarAbstraction>();
/** Used in complex constraint. If this constraint is a store p.f = q, we say otherSide = q */
public IVarAbstraction otherSide = null;
/** Indicate the mapping relation between the two pointers, 1-1, 1-many, ... */
public int code;
/** The field that is involved in a complex constraint */
public SparkField f = null;
/** If this constraint represents a parameter passing or function return, the corresponding call edge is identified here */
public Set<Edge> interCallEdges = null;
/** To indicate if this constraint will be evaluated or not */
public boolean isActive = true;
private int id = -1;
@Override
public void setNumber(int number) {
// TODO Auto-generated method stub
id = number;
}
@Override
public int getNumber() {
// TODO Auto-generated method stub
return id;
}
public IVarAbstraction getLHS() {
return expr.getO1();
}
public void setLHS(IVarAbstraction newLHS) {
expr.setO1(newLHS);
}
public IVarAbstraction getRHS() {
return expr.getO2();
}
public void setRHS(IVarAbstraction newRHS) {
expr.setO2(newRHS);
}
}
| 2,838
| 30.544444
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/RectangleNode.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* The rectangle figure for encoding the many-to-many relation.
*
* @author xiao
*
*/
public class RectangleNode extends SegmentNode {
// I1 : the starting x coordinate
// I2 : the starting y coordinate
// L : the length of the x-axis side
// L_prime : the length of the y-axis side
public long L_prime;
public RectangleNode() {
}
public RectangleNode(RectangleNode other) {
copyRectangle(other);
}
public void copyRectangle(RectangleNode other) {
I1 = other.I1;
I2 = other.I2;
L = other.L;
L_prime = other.L_prime;
}
public RectangleNode(long I1, long I2, long L, long LL) {
super(I1, I2, L);
L_prime = LL;
}
public boolean equals(RectangleNode other) {
if (I1 == other.I1 && I2 == other.I2 && L == other.L && L_prime == other.L_prime) {
return true;
}
return false;
}
@Override
public long yEnd() {
return I2 + L_prime;
}
@Override
public boolean intersect(SegmentNode q) {
RectangleNode p = this;
if (q instanceof SegmentNode) {
// If one of the end point is in the body of the rectangle
if (point_within_rectangle(q.I1, q.I2, p) || point_within_rectangle(q.I1 + q.L - 1, q.I2 + q.L - 1, p)) {
return true;
}
// Otherwise, the diagonal line must intersect with one of the boundary lines
if (diagonal_line_intersect_horizontal(q, p.I1, p.I2, p.L)
|| diagonal_line_intersect_horizontal(q, p.I1, p.I2 + p.L_prime - 1, p.L)
|| diagonal_line_intersect_vertical(q, p.I1, p.I2, p.L_prime)
|| diagonal_line_intersect_vertical(q, p.I1 + p.L - 1, p.I2, p.L_prime)) {
return true;
}
} else {
RectangleNode rect_q = (RectangleNode) q;
// If the segment is not entirely above, below, to the left, to the right of this rectangle
// then, they must intersect
if (p.I2 >= rect_q.I2 + rect_q.L_prime) {
return false;
}
if (p.I2 + p.L_prime <= rect_q.I2) {
return false;
}
if (p.I1 + p.L <= rect_q.I1) {
return false;
}
if (p.I1 >= rect_q.I1 + rect_q.L) {
return false;
}
return true;
}
return false;
}
private boolean point_within_rectangle(long x, long y, RectangleNode rect) {
if (x >= rect.I1 && x < rect.I1 + rect.L) {
if (y >= rect.I2 && y < rect.I2 + rect.L_prime) {
return true;
}
}
return false;
}
private boolean diagonal_line_intersect_vertical(SegmentNode p, long x, long y, long L) {
if (x >= p.I1 && x < (p.I1 + p.L)) {
long y_cross = x - p.I1 + p.I2;
if (y_cross >= y && y_cross < y + L) {
return true;
}
}
return false;
}
private boolean diagonal_line_intersect_horizontal(SegmentNode p, long x, long y, long L) {
if (y >= p.I2 && y < (p.I2 + p.L)) {
long x_cross = y - p.I2 + p.I1;
if (x_cross >= x && x_cross < x + L) {
return true;
}
}
return false;
}
}
| 3,846
| 24.993243
| 111
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/SegmentNode.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* The segment figure for encoding the one-to-one relation.
*
* @author richardxx
*
*/
public class SegmentNode implements Comparable<SegmentNode> {
// I1 : start interval of the pointer
// I2 : start interval of the pointed to object
// L : length of the interval
// is_new : a flag to indicate that this interval has not been processed
public long I1;
public long I2;
public long L;
public SegmentNode next = null;
public boolean is_new = true;
public SegmentNode() {
}
public SegmentNode(SegmentNode other) {
copySegment(other);
}
public void copySegment(SegmentNode other) {
I1 = other.I1;
I2 = other.I2;
L = other.L;
}
public SegmentNode(long i1, long i2, long l) {
I1 = i1;
I2 = i2;
L = l;
}
public boolean equals(SegmentNode other) {
if (other instanceof RectangleNode) {
return false;
}
if (I1 == other.I1 && I2 == other.I2 && L == other.L) {
return true;
}
return false;
}
@Override
public int compareTo(SegmentNode o) {
long d;
d = I1 - o.I1;
if (d != 0) {
return d < 0 ? -1 : 1;
}
d = I2 - o.I2;
if (d != 0) {
return d < 0 ? -1 : 1;
}
d = L - o.L;
if (d != 0) {
return d < 0 ? -1 : 1;
}
if (this instanceof RectangleNode && o instanceof RectangleNode) {
d = ((RectangleNode) this).L_prime - ((RectangleNode) o).L_prime;
if (d != 0) {
return d < 0 ? -1 : 1;
}
}
return 0;
}
public long xEnd() {
return I1 + L;
}
public long yEnd() {
return I2 + L;
}
/**
* Testing if two figures are intersected. This interface implements standard intersection testing that ignores the
* semantics of the X- and Y- axis. Processing the semantics issues before calling this method. A sample usage, please @see
* heap_sensitive_intersection
*
* @param q
* @return
*/
public boolean intersect(SegmentNode q) {
// Intersection with a rectangle is tested in the overrode method
if (q instanceof RectangleNode) {
return q.intersect(this);
}
SegmentNode p = this;
if ((p.I2 - p.I1) == (q.I2 - q.I1)) {
// Two segments have the same offset, so they are on the same line
if (p.I1 <= q.I1) {
return q.I1 < p.I1 + p.L;
} else {
return p.I1 < q.I1 + q.L;
}
}
return false;
}
public boolean projYIntersect(SegmentNode q) {
long py1 = this.I2;
long py2 = yEnd();
long qy1 = q.I2;
long qy2 = q.yEnd();
if (py1 <= qy1) {
return qy1 < py2;
}
return py1 < qy2;
}
}
| 3,464
| 21.796053
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/ShapeNode.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
/**
* It is the the abstract super type of geometric figures and the manager of all the generated sub-type figures. It is not
* used currently and we leave it here as an extensible point in future.
*
* @author xiao
*
*/
public abstract class ShapeNode {
// Common Instance Fields
// I1 : the starting x coordinate
// I2 : the starting y coordinate
// E1: the end coordinate of the X or Y axis depending on the value of I1 and I2 (I1 != 0, then E1 is associated with I1)
public long I1;
public long I2;
public long E1;
public boolean is_new;
public ShapeNode next;
public ShapeNode() {
is_new = true;
next = null;
}
/**
* Clone itself and make a new instance.
*
* @return
*/
public abstract ShapeNode makeDuplicate();
/**
* Test if the invoked figure contains the passed in figure
*
* @param other
* @return
*/
public abstract boolean inclusionTest(ShapeNode other);
/**
* Test if the input x parameter falls in the range of the X coordinates of this figure
*
* @param x
* @return
*/
public abstract boolean coverThisXValue(long x);
public abstract void printSelf(PrintStream outPrintStream);
public abstract void copy(ShapeNode other);
}
| 2,105
| 26.350649
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/dataRep/SimpleInterval.java
|
package soot.jimple.spark.geom.dataRep;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* The basic representation for an interval on the integer domain. A simple interval is a half-open structure [L, R).
*
* @author xiao
*
*/
public class SimpleInterval implements Comparable<SimpleInterval> {
public long L, R;
public SimpleInterval() {
L = 0;
R = 1;
}
public SimpleInterval(long l, long r) {
L = l;
R = r;
}
public SimpleInterval(SimpleInterval o) {
L = o.L;
R = o.R;
}
@Override
public String toString() {
return "[" + L + ", " + R + ")";
}
@Override
public boolean equals(Object o) {
SimpleInterval other = (SimpleInterval) o;
return (other.L == L) && (other.R == R);
}
@Override
public int hashCode() {
int ans = (int) ((L + R) % Integer.MAX_VALUE);
if (ans < 0) {
ans = -ans;
}
return ans;
}
@Override
public int compareTo(SimpleInterval o) {
if (L == o.L) {
return R < o.R ? -1 : 1;
}
return L < o.L ? -1 : 1;
}
public boolean contains(SimpleInterval o) {
SimpleInterval osi = (SimpleInterval) o;
if (L <= osi.L && R >= osi.R) {
return true;
}
return false;
}
public boolean merge(SimpleInterval o) {
SimpleInterval osi = (SimpleInterval) o;
if (osi.L < L) {
if (L <= osi.R) {
L = osi.L;
if (R < osi.R) {
R = osi.R;
}
return true;
}
} else {
if (osi.L <= R) {
if (R < osi.R) {
R = osi.R;
}
return true;
}
}
return false;
}
public boolean intersect(SimpleInterval o) {
SimpleInterval osi = (SimpleInterval) o;
if (L <= osi.L && osi.L < R) {
return true;
}
if (osi.L <= L && L < osi.R) {
return true;
}
return false;
}
}
| 2,605
| 20.53719
| 117
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomE/FullSensitiveNode.java
|
package soot.jimple.spark.geom.geomE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import soot.Hierarchy;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.geom.geomPA.IWorklist;
import soot.jimple.spark.geom.geomPA.Parameters;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ClassConstantNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.StringConstantNode;
import soot.jimple.spark.sets.P2SetVisitor;
/**
* This class defines an abstract pointer in the geometric points-to solver. All the points-to/flows-to information and the
* load/store constraints related to this pointer are stored here. The pointer assignment inference rules and the complex
* constraints initialization rules are also implemented here.
*
* @author xiao
*
*/
public class FullSensitiveNode extends IVarAbstraction {
// The targets of directed edges on the constraint graph
public Map<FullSensitiveNode, GeometricManager> flowto;
// The objects this variable points to
public Map<AllocNode, GeometricManager> pt_objs;
// Newly added points-to tuple
public Map<AllocNode, GeometricManager> new_pts;
// store/load complex constraints
public Vector<PlainConstraint> complex_cons;
// Symbolicize the 1-to-1 and many-to-many mappings
public static String symbols[] = { "/", "[]" };
static {
stubManager = new GeometricManager();
pres = new RectangleNode(1, 1, Constants.MAX_CONTEXTS, Constants.MAX_CONTEXTS);
stubManager.addNewFigure(GeometricManager.MANY_TO_MANY, pres);
deadManager = new GeometricManager();
}
public FullSensitiveNode(Node thisVar) {
me = thisVar;
}
@Override
public void deleteAll() {
flowto = null;
pt_objs = null;
new_pts = null;
complex_cons = null;
}
@Override
public void reconstruct() {
flowto = new HashMap<FullSensitiveNode, GeometricManager>();
pt_objs = new HashMap<AllocNode, GeometricManager>();
new_pts = new HashMap<AllocNode, GeometricManager>();
complex_cons = null;
lrf_value = 0;
}
@Override
public void keepPointsToOnly() {
flowto = null;
new_pts = null;
complex_cons = null;
}
@Override
public void do_before_propagation() {
// We first perform the geometric merging
do_pts_interval_merge();
do_flow_edge_interval_merge();
/*
* The following code eliminates the spurious points-to relation for THIS pointer. For example we have two classes A and
* B, B is a child class of A. We have a virtual function foo defined in both A and B. We have a pointer p in type A.
* pts(p) = { o1, o2 }, where o1 is in type A and o2 is in type B. Therefore, the call p.foo() will be resolved to call
* both A::foo and B::foo. Then, in the points-to analysis, we have two assignments: p -> A::foo.THIS, p -> B::foo.THIS
* At this time, obviously, although with the type filter, A::foo.THIS will receive the object o2, which is definitely a
* fake. Thus, we need a new filter to guarantee that A::foo.THIS only points to o1. We call this filter
* "this pointer filter".
*/
Node wrappedNode = getWrappedNode();
if (wrappedNode instanceof LocalVarNode && ((LocalVarNode) wrappedNode).isThisPtr()) {
SootMethod func = ((LocalVarNode) wrappedNode).getMethod();
if (!func.isConstructor()) {
// We don't process the specialinvoke call edge
SootClass defClass = func.getDeclaringClass();
Hierarchy typeHierarchy = Scene.v().getActiveHierarchy();
for (Iterator<AllocNode> it = new_pts.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
if (obj.getType() instanceof RefType) {
SootClass sc = ((RefType) obj.getType()).getSootClass();
if (defClass != sc) {
try {
SootMethod rt_func = typeHierarchy.resolveConcreteDispatch(sc, func);
if (rt_func != func) {
it.remove();
// Also preclude it from propagation again
pt_objs.put(obj, (GeometricManager) deadManager);
}
} catch (RuntimeException e) {
// If the input program has a wrong type cast, resolveConcreteDispatch fails and it goes here
// We simply ignore this error
}
}
}
}
}
}
}
@Override
public void do_after_propagation() {
if (new_pts.size() > 0) {
for (GeometricManager gm : new_pts.values()) {
gm.flush();
}
}
new_pts = new HashMap<AllocNode, GeometricManager>();
}
@Override
public int num_of_diff_objs() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (pt_objs == null) {
return -1;
}
return pt_objs.size();
}
@Override
public int num_of_diff_edges() {
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (flowto == null) {
return -1;
}
return flowto.size();
}
@Override
public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) {
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
return addPointsTo(GeometricManager.ONE_TO_ONE, obj);
}
@Override
public boolean add_points_to_4(AllocNode obj, long I1, long I2, long L1, long L2) {
pres.I1 = I1;
pres.I2 = I2;
pres.L = L1;
pres.L_prime = L2;
return addPointsTo(GeometricManager.MANY_TO_MANY, obj);
}
@Override
public boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L) {
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
return addFlowsTo(GeometricManager.ONE_TO_ONE, qv);
}
@Override
public boolean add_simple_constraint_4(IVarAbstraction qv, long I1, long I2, long L1, long L2) {
pres.I1 = I1;
pres.I2 = I2;
pres.L = L1;
pres.L_prime = L2;
return addFlowsTo(GeometricManager.MANY_TO_MANY, qv);
}
@Override
public void put_complex_constraint(PlainConstraint cons) {
if (complex_cons == null) {
complex_cons = new Vector<PlainConstraint>();
}
complex_cons.add(cons);
}
@Override
public void drop_duplicates() {
for (GeometricManager gm : pt_objs.values()) {
gm.removeUselessSegments();
}
}
/**
* The place where you implement the pointer assignment reasoning.
*/
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
int i, j;
AllocNode obj;
SegmentNode pts, pe, entry_pts[], entry_pe[];
GeometricManager gm1, gm2;
FullSensitiveNode qn, objn;
boolean added, hasNewPointsTo;
if (pt_objs.size() == 0) {
return;
// System.err.println("+++ Process assignment for: " + toString());
}
// We first build the flow edges that flow in to/out of object fields
if (complex_cons != null) {
for (Map.Entry<AllocNode, GeometricManager> entry : new_pts.entrySet()) {
obj = entry.getKey();
entry_pts = entry.getValue().getFigures();
for (PlainConstraint pcons : complex_cons) {
// For each newly points-to object, construct its instance field
objn = (FullSensitiveNode) ptAnalyzer.findInstanceField(obj, pcons.f);
if (objn == null) {
// This combination of allocdotfield must be invalid
// This expression p.f also renders that p cannot point to obj, so we remove it
// We label this event and sweep the garbage later
pt_objs.put(obj, (GeometricManager) deadManager);
entry.setValue((GeometricManager) deadManager);
break;
}
if (objn.willUpdate == false) {
// This must be a store constraint
// This object field is not need for computing
// the points-to information of the seed pointers
continue;
}
qn = (FullSensitiveNode) pcons.otherSide;
// System.err.println("--load/store-->: " + qn.toString());
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
switch (pcons.type) {
case Constants.STORE_CONS:
// Store, qv -> pv.field
if (instantiateStoreConstraint(qn, objn, pts, (pcons.code << 8) | i)) {
worklist.push(qn);
}
break;
case Constants.LOAD_CONS:
// Load, pv.field -> qv
if (instantiateLoadConstraint(objn, qn, pts, (pcons.code << 8) | i)) {
worklist.push(objn);
}
break;
}
pts = pts.next;
}
}
}
}
}
if (flowto.size() == 0) {
return;
}
// Next, we process the assignments (e.g. p = q)
for (Map.Entry<FullSensitiveNode, GeometricManager> entry1 : flowto.entrySet()) {
added = false;
qn = entry1.getKey();
gm1 = entry1.getValue();
entry_pe = gm1.getFigures();
// System.err.println("-assign->: " + qn.toString());
// We specialize the two cases that we hope it running faster
// We have new flow-to edges
if (gm1.isThereUnprocessedFigures()) {
// Second, we get the points-to shapes
for (Map.Entry<AllocNode, GeometricManager> entry2 : pt_objs.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager) {
continue;
}
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType())) {
continue;
}
entry_pts = gm2.getFigures();
hasNewPointsTo = gm2.isThereUnprocessedFigures();
// We pair up all the geometric points-to tuples and flow edges
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
if (pe.is_new == false && hasNewPointsTo == false) {
break;
}
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && (pts.is_new || pe.is_new)) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j)) {
added = true;
}
pts = pts.next;
}
}
pe = pe.next;
}
}
}
gm1.flush();
} else {
// We don't have new edges, thereby we can do the pairing up faster
for (Map.Entry<AllocNode, GeometricManager> entry2 : new_pts.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager) {
continue;
}
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType())) {
continue;
}
entry_pts = gm2.getFigures();
// We pair up all the geometric points-to tuples and flow edges
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j)) {
added = true;
}
pe = pe.next;
}
}
pts = pts.next;
}
}
}
}
if (added) {
worklist.push(qn);
}
}
// System.err.println();
}
@Override
public boolean isDeadObject(AllocNode obj) {
return pt_objs.get(obj) == deadManager;
}
@Override
public int count_pts_intervals(AllocNode obj) {
int ret = 0;
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < GeometricManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
@Override
public int count_flow_intervals(IVarAbstraction qv) {
int ret = 0;
SegmentNode[] int_entry = find_flowto((FullSensitiveNode) qv);
for (int j = 0; j < GeometricManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
int i, j;
FullSensitiveNode qn;
SegmentNode p, q, pt[], qt[];
boolean localToSameMethod;
qn = (FullSensitiveNode) qv;
localToSameMethod = (enclosingMethod() == qv.enclosingMethod());
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode an = it.next();
if (an instanceof ClassConstantNode) {
continue;
}
if (an instanceof StringConstantNode) {
continue;
}
qt = qn.find_points_to(an);
if (qt == null) {
continue;
}
pt = find_points_to(an);
for (i = 0; i < GeometricManager.Divisions; ++i) {
p = pt[i];
while (p != null) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
q = qt[j];
while (q != null) {
if (localToSameMethod) {
// We can use a more precise alias testing
if (p.intersect(q)) {
return true;
}
} else {
if (p.projYIntersect(q)) {
return true;
}
}
q = q.next;
}
}
p = p.next;
}
}
}
return false;
}
@Override
public Set<AllocNode> get_all_points_to_objects() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().get_all_points_to_objects();
}
return pt_objs.keySet();
}
@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < GeometricManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
outPrintStream.print("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ", ");
if (p instanceof RectangleNode) {
outPrintStream.print(((RectangleNode) p).L_prime + ", ");
}
outPrintStream.println(symbols[j] + ")");
p = p.next;
}
}
}
}
/**
* We transfer the SPARK results to current pointer if this pointer is not involved in the geometric analysis. Note that,
* the unreachable objects will not be inserted.
*/
@Override
public void injectPts() {
final GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
pt_objs = new HashMap<AllocNode, GeometricManager>();
me.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (geomPTA.isValidGeometricNode(n)) {
pt_objs.put((AllocNode) n, (GeometricManager) stubManager);
}
}
});
new_pts = null;
}
@Override
public boolean pointer_interval_points_to(long l, long r, AllocNode obj) {
SegmentNode[] int_entry = find_points_to(obj);
for (int i = 0; i < GeometricManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null) {
long R = p.I1 + p.L;
if ((l <= p.I1 && p.I1 < r) || (p.I1 <= l && l < R)) {
return true;
}
p = p.next;
}
}
return false;
}
@Override
public void remove_points_to(AllocNode obj) {
pt_objs.remove(obj);
}
@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
if (parent != this) {
getRepresentative().get_all_context_sensitive_objects(l, r, visitor);
return;
}
GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
for (Map.Entry<AllocNode, GeometricManager> entry : pt_objs.entrySet()) {
AllocNode obj = entry.getKey();
SootMethod sm = obj.getMethod();
int sm_int = geomPTA.getIDFromSootMethod(sm);
if (sm_int == -1) {
continue;
}
GeometricManager gm = entry.getValue();
SegmentNode[] int_entry = gm.getFigures();
for (int i = 0; i < GeometricManager.Divisions; ++i) {
// We iterate all the figures
SegmentNode p = int_entry[i];
while (p != null) {
long L = p.I1;
long R = L + p.L;
long objL = -1, objR = -1;
// Now we compute which context sensitive objects are pointed to by this pointer
if (l <= L && L < r) {
// l----------r
// L----R or
// L------------R
if (i == GeometricManager.ONE_TO_ONE) {
long d = r - L;
if (R < r) {
d = p.L;
}
objL = p.I2;
objR = objL + d;
} else {
objL = p.I2;
objR = p.I2 + ((RectangleNode) p).L_prime;
}
} else if (L <= l && l < R) {
// l---------r
// L-------R or
// L--------------------R
if (i == GeometricManager.ONE_TO_ONE) {
long d = R - l;
if (R > r) {
d = r - l;
}
objL = p.I2 + l - L;
objR = objL + d;
} else {
objL = p.I2;
objR = p.I2 + ((RectangleNode) p).L_prime;
}
}
// Now we test which context versions this interval [objL, objR) maps to
if (objL != -1 && objR != -1) {
visitor.visit(obj, objL, objR, sm_int);
}
p = p.next;
}
}
}
}
@Override
public int count_new_pts_intervals() {
int ans = 0;
for (GeometricManager gm : new_pts.values()) {
SegmentNode[] int_entry = gm.getFigures();
for (int i = 0; i < GeometricManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null && p.is_new == true) {
++ans;
p = p.next;
}
}
}
return ans;
}
// -----------------------------------Private Functions---------------------------------------
/**
* A non-interface public function. It adds the points-to tuple to the geometric manager.
*/
private boolean addPointsTo(int code, AllocNode obj) {
GeometricManager gm = pt_objs.get(obj);
if (gm == null) {
gm = new GeometricManager();
pt_objs.put(obj, gm);
} else if (gm == deadManager) {
// We preclude the propagation of this object
return false;
}
SegmentNode p = gm.addNewFigure(code, pres);
if (p != null) {
new_pts.put(obj, gm);
return true;
}
return false;
}
/**
* A non-interface public function. It adds the flows-to tuple to the geometric manager.
*/
private boolean addFlowsTo(int code, IVarAbstraction qv) {
GeometricManager gm = flowto.get(qv);
if (gm == null) {
gm = new GeometricManager();
flowto.put((FullSensitiveNode) qv, gm);
}
if (gm.addNewFigure(code, pres) != null) {
return true;
}
return false;
}
private void do_pts_interval_merge() {
for (GeometricManager gm : new_pts.values()) {
gm.mergeFigures(Parameters.max_pts_budget);
}
}
private void do_flow_edge_interval_merge() {
for (GeometricManager gm : flowto.values()) {
gm.mergeFigures(Parameters.max_cons_budget);
}
}
private SegmentNode[] find_flowto(FullSensitiveNode qv) {
GeometricManager im = flowto.get(qv);
return im == null ? null : im.getFigures();
}
private SegmentNode[] find_points_to(AllocNode obj) {
GeometricManager im = pt_objs.get(obj);
return im == null ? null : im.getFigures();
}
/**
* Implement the inference rules when the input points-to figure is a one-to-one mapping.
*/
private static int infer_pts_is_one_to_one(SegmentNode pts, SegmentNode pe, int code) {
long interI, interJ;
// The left-end is the larger one
interI = pe.I1 < pts.I1 ? pts.I1 : pe.I1;
// The right-end is the smaller one
interJ = (pe.I1 + pe.L < pts.I1 + pts.L ? pe.I1 + pe.L : pts.I1 + pts.L);
if (interI < interJ) {
switch (code) {
case GeometricManager.ONE_TO_ONE:
// assignment is a 1-1 mapping
pres.I1 = interI - pe.I1 + pe.I2;
pres.I2 = interI - pts.I1 + pts.I2;
pres.L = interJ - interI;
return GeometricManager.ONE_TO_ONE;
case GeometricManager.MANY_TO_MANY:
// assignment is a many-many mapping
pres.I1 = pe.I2;
pres.I2 = interI - pts.I1 + pts.I2;
pres.L = ((RectangleNode) pe).L_prime;
pres.L_prime = interJ - interI;
return GeometricManager.MANY_TO_MANY;
}
}
return GeometricManager.Undefined_Mapping;
}
/**
* Implement the inference rules when the input points-to figure is a many-to-many mapping.
*/
private static int infer_pts_is_many_to_many(RectangleNode pts, SegmentNode pe, int code) {
long interI, interJ;
// The left-end is the larger one
interI = pe.I1 < pts.I1 ? pts.I1 : pe.I1;
// The right-end is the smaller one
interJ = (pe.I1 + pe.L < pts.I1 + pts.L ? pe.I1 + pe.L : pts.I1 + pts.L);
if (interI < interJ) {
switch (code) {
case GeometricManager.ONE_TO_ONE:
// assignment is a 1-1 mapping
pres.I1 = interI - pe.I1 + pe.I2;
pres.I2 = pts.I2;
pres.L = interJ - interI;
pres.L_prime = pts.L_prime;
break;
case GeometricManager.MANY_TO_MANY:
// assignment is a many-many mapping
pres.I1 = pe.I2;
pres.I2 = pts.I2;
pres.L = ((RectangleNode) pe).L_prime;
pres.L_prime = pts.L_prime;
break;
}
return GeometricManager.MANY_TO_MANY;
}
return GeometricManager.Undefined_Mapping;
}
/**
* Implements the pointer assignment inference rules. The pts and pe are the points-to tuple and flow edge pres is the
* computed result code indicates the types of the pts and pe
*
* Return value is used to indicate the type of the result
*/
private static boolean reasonAndPropagate(FullSensitiveNode qn, AllocNode obj, SegmentNode pts, SegmentNode pe, int code) {
int ret_type = GeometricManager.Undefined_Mapping;
switch (code >> 8) {
case GeometricManager.ONE_TO_ONE:
// points-to is a 1-1 mapping
ret_type = infer_pts_is_one_to_one(pts, pe, code & 255);
break;
case GeometricManager.MANY_TO_MANY:
// points-to is a mangy-many mapping
ret_type = infer_pts_is_many_to_many((RectangleNode) pts, pe, code & 255);
break;
}
if (ret_type != GeometricManager.Undefined_Mapping) {
return qn.addPointsTo(ret_type, obj);
}
return false;
}
/**
* The last parameter code can only be 1-1 and many-1
*/
private static boolean instantiateLoadConstraint(FullSensitiveNode objn, FullSensitiveNode qn, SegmentNode pts, int code) {
int ret_type = GeometricManager.Undefined_Mapping;
if ((code >> 8) == GeometricManager.ONE_TO_ONE) {
// assignment is a 1-1 mapping
pres.I1 = pts.I2;
pres.I2 = pts.I1;
switch (code & 255) {
case GeometricManager.ONE_TO_ONE:
// points-to is a 1-1 mapping
pres.L = pts.L;
ret_type = GeometricManager.ONE_TO_ONE;
break;
case GeometricManager.MANY_TO_MANY:
// points-to is a many-many mapping
pres.L = ((RectangleNode) pts).L_prime;
pres.L_prime = pts.L;
ret_type = GeometricManager.MANY_TO_MANY;
break;
}
} else {
// The target pointer must be a global, in JIMPLE's case
pres.I1 = pts.I2;
pres.I2 = 1;
pres.L_prime = 1;
switch (code & 255) {
case GeometricManager.ONE_TO_ONE:
// points-to is a 1-1 mapping or 1-many mapping
pres.L = pts.L;
ret_type = GeometricManager.MANY_TO_MANY;
break;
case GeometricManager.MANY_TO_MANY:
// points-to is a many-many mapping
pres.L = ((RectangleNode) pts).L_prime;
ret_type = GeometricManager.MANY_TO_MANY;
break;
}
}
return objn.addFlowsTo(ret_type, qn);
}
// code can only be 1-1 and 1-many
private static boolean instantiateStoreConstraint(FullSensitiveNode qn, FullSensitiveNode objn, SegmentNode pts,
int code) {
int ret_type = GeometricManager.Undefined_Mapping;
if ((code >> 8) == GeometricManager.ONE_TO_ONE) {
// assignment is a 1-1 mapping
pres.I1 = pts.I1;
pres.I2 = pts.I2;
pres.L = pts.L;
switch (code & 255) {
case GeometricManager.ONE_TO_ONE:
// points-to is a 1-1 mapping
ret_type = GeometricManager.ONE_TO_ONE;
break;
case GeometricManager.MANY_TO_MANY:
// points-to is a many-many mapping
pres.L_prime = ((RectangleNode) pts).L_prime;
ret_type = GeometricManager.MANY_TO_MANY;
break;
}
} else {
// The source pointer must be a global, in JIMPLE's case
pres.I1 = 1;
pres.I2 = pts.I2;
pres.L = 1;
switch (code & 255) {
case GeometricManager.ONE_TO_ONE:
// points-to is a 1-1 mapping
pres.L_prime = pts.L;
ret_type = GeometricManager.MANY_TO_MANY;
break;
case GeometricManager.MANY_TO_MANY:
// points-to is a many-many mapping
pres.L_prime = ((RectangleNode) pts).L_prime;
ret_type = GeometricManager.MANY_TO_MANY;
break;
}
}
return qn.addFlowsTo(ret_type, objn);
}
}
| 28,006
| 28.605708
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomE/FullSensitiveNodeGenerator.java
|
package soot.jimple.spark.geom.geomE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Iterator;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.DummyNode;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IEncodingBroker;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.toolkits.callgraph.Edge;
/**
* Build the initial encoding of the assignment graph in full geometric encoding.
*
* @author xiao
*
*/
public class FullSensitiveNodeGenerator extends IEncodingBroker {
private static final int full_convertor[] = { GeometricManager.ONE_TO_ONE, GeometricManager.MANY_TO_MANY,
GeometricManager.MANY_TO_MANY, GeometricManager.MANY_TO_MANY };
@Override
public void initFlowGraph(GeomPointsTo ptAnalyzer) {
int k;
int n_legal_cons;
int nf1, nf2;
int code;
IVarAbstraction my_lhs, my_rhs;
// Visit all the simple constraints
n_legal_cons = 0;
for (PlainConstraint cons : ptAnalyzer.constraints) {
if (!cons.isActive) {
continue;
}
my_lhs = cons.getLHS().getRepresentative();
my_rhs = cons.getRHS().getRepresentative();
nf1 = ptAnalyzer.getMethodIDFromPtr(my_lhs);
nf2 = ptAnalyzer.getMethodIDFromPtr(my_rhs);
// Test how many globals are in this constraint
code = ((nf1 == Constants.SUPER_MAIN ? 1 : 0) << 1) | (nf2 == Constants.SUPER_MAIN ? 1 : 0);
switch (cons.type) {
case Constants.NEW_CONS:
// We directly add the objects to the points-to set
if (code == 0) {
// the allocation result is assigned to a local variable
my_rhs.add_points_to_3((AllocNode) my_lhs.getWrappedNode(), 1, 1, ptAnalyzer.context_size[nf1]);
} else {
// Assigned to a global or the object itself is a global
my_rhs.add_points_to_4((AllocNode) my_lhs.getWrappedNode(), 1, 1, ptAnalyzer.context_size[nf2],
ptAnalyzer.context_size[nf1]);
}
// Enqueue to the worklist
ptAnalyzer.getWorklist().push(my_rhs);
break;
case Constants.ASSIGN_CONS:
// Assigning between two pointers
if (cons.interCallEdges != null) {
// Inter-procedural assignment (parameter passing, function return)
for (Iterator<Edge> it = cons.interCallEdges.iterator(); it.hasNext();) {
Edge sEdge = it.next();
CgEdge q = ptAnalyzer.getInternalEdgeFromSootEdge(sEdge);
if (q.is_obsoleted == true) {
continue;
}
// Parameter passing or not
if (nf2 == q.t) {
/*
* The receiver must be a local, while the sender is perhaps not (e.g. for handling reflection, see class
* PAG)
*/
// Handle the special case first
// In that case, nf1 is SUPER_MAIN.
if (nf1 == Constants.SUPER_MAIN) {
my_lhs.add_simple_constraint_4(my_rhs, 1, q.map_offset, 1, ptAnalyzer.max_context_size_block[q.s]);
} else {
// nf1 == q.s
// We should treat the self recursive calls specially
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf1]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, k * ptAnalyzer.max_context_size_block[nf1] + 1, q.map_offset,
ptAnalyzer.max_context_size_block[nf1]);
}
}
}
} else {
// nf2 == q.s
// Return value
// Both are locals
if (q.s == q.t) {
// Self-recursive calls may fall here, we handle them properly
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf2]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf2]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, q.map_offset, k * ptAnalyzer.max_context_size_block[nf2] + 1,
ptAnalyzer.max_context_size_block[nf2]);
}
}
}
}
} else {
// Intra-procedural assignment
// And, the assignments involving the global variables go here. By our definition, the global variables belong to
// SUPER_MAIN.
// And according to the Jimple IR, not both sides are global variables
if (code == 0) {
// local to local assignment
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
my_lhs.add_simple_constraint_4(my_rhs, 1, 1, ptAnalyzer.context_size[nf1], ptAnalyzer.context_size[nf2]);
}
}
break;
case Constants.LOAD_CONS:
// lhs is always a local
// rhs = lhs.f
cons.code = full_convertor[code];
cons.otherSide = my_rhs;
my_lhs.put_complex_constraint(cons);
break;
case Constants.STORE_CONS:
// rhs is always a local
// rhs.f = lhs
cons.code = full_convertor[code];
cons.otherSide = my_lhs;
my_rhs.put_complex_constraint(cons);
break;
default:
throw new RuntimeException("Invalid type");
}
++n_legal_cons;
}
ptAnalyzer.ps.printf("Only %d (%.1f%%) constraints are needed for this run.\n", n_legal_cons,
((double) n_legal_cons / ptAnalyzer.n_init_constraints) * 100);
}
@Override
public String getSignature() {
return Constants.geomE;
}
@Override
public IVarAbstraction generateNode(Node vNode) {
IVarAbstraction ret;
if (vNode instanceof AllocNode || vNode instanceof FieldRefNode) {
ret = new DummyNode(vNode);
} else {
ret = new FullSensitiveNode(vNode);
}
return ret;
}
}
| 7,199
| 34.643564
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomE/GeometricManager.java
|
package soot.jimple.spark.geom.geomE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomPA.IFigureManager;
/**
* This class implements the figure manager.
*
* Currently, we apply a naive management strategy: For each type of object, we maintain a linked list. If we insert a new
* object, we don't test if all the geometric objects on the plane together can cover the new object. Instead, we test if
* there is one object already covers the new object.
*
* @author xiao
*
*/
public class GeometricManager extends IFigureManager {
public static final int Divisions = 2;
// The type ID for different figures
public static final int ONE_TO_ONE = 0;
public static final int MANY_TO_MANY = 1;
public static final int Undefined_Mapping = -1;
// Private fields for each instance
private SegmentNode header[] = { null, null };
private int size[] = { 0, 0 };
private boolean hasNewFigure = false;
public SegmentNode[] getFigures() {
return header;
}
public int[] getSizes() {
return size;
}
public boolean isThereUnprocessedFigures() {
return hasNewFigure;
}
/**
* Remove the new labels for all the figures.
*/
public void flush() {
hasNewFigure = false;
for (int i = 0; i < Divisions; ++i) {
SegmentNode p = header[i];
while (p != null && p.is_new == true) {
p.is_new = false;
p = p.next;
}
}
}
/**
* Insert a new figure into this manager if it is not covered by any exisiting figure.
*/
public SegmentNode addNewFigure(int code, RectangleNode pnew) {
SegmentNode p;
// We first check if there is an existing object contains this new object
if (checkRedundancy(code, pnew)) {
return null;
}
// Oppositely, we check if any existing objects are obsoleted
filterOutDuplicates(code, pnew);
// Ok, now we generate a copy
if (code == GeometricManager.ONE_TO_ONE) {
p = getSegmentNode();
p.copySegment(pnew);
} else {
p = getRectangleNode();
((RectangleNode) p).copyRectangle(pnew);
}
hasNewFigure = true;
p.next = header[code];
header[code] = p;
size[code]++;
return p;
}
/**
* Merge the set of objects in the same category into one.
*/
public void mergeFigures(int buget_size) {
RectangleNode p;
// We don't merge the figures if there are no new figures in this geometric manager
if (!hasNewFigure) {
return;
}
for (int i = 0; i < Divisions; ++i) {
p = null;
if (size[i] > buget_size && header[i].is_new == true) {
// Merging is finding the bounding rectangles for every type of objects
switch (i) {
case GeometricManager.ONE_TO_ONE:
p = mergeOneToOne();
break;
case GeometricManager.MANY_TO_MANY:
p = mergeManyToMany();
break;
}
}
if (p != null) {
if (i == GeometricManager.ONE_TO_ONE) {
if (checkRedundancy(GeometricManager.MANY_TO_MANY, p)) {
continue;
}
filterOutDuplicates(GeometricManager.MANY_TO_MANY, p);
}
p.next = header[GeometricManager.MANY_TO_MANY];
header[GeometricManager.MANY_TO_MANY] = p;
size[GeometricManager.MANY_TO_MANY]++;
}
}
}
/**
* The lines that are included in some rectangles can be deleted.
*/
public void removeUselessSegments() {
SegmentNode p = header[GeometricManager.ONE_TO_ONE];
SegmentNode q = null;
int countAll = 0;
while (p != null) {
SegmentNode temp = p.next;
if (!isContainedInRectangles(p)) {
p.next = q;
q = p;
++countAll;
} else {
reclaimSegmentNode(p);
}
p = temp;
}
size[GeometricManager.ONE_TO_ONE] = countAll;
header[GeometricManager.ONE_TO_ONE] = q;
}
/**
* Is the input line covered by any rectangle?
*
* @param pnew,
* must be a line
* @return
*/
private boolean isContainedInRectangles(SegmentNode pnew) {
SegmentNode p = header[GeometricManager.MANY_TO_MANY];
while (p != null) {
if (pnew.I1 >= p.I1 && pnew.I2 >= p.I2) {
if ((pnew.I1 + pnew.L) <= (p.I1 + p.L) && (pnew.I2 + pnew.L) <= (p.I2 + ((RectangleNode) p).L_prime)) {
return true;
}
}
p = p.next;
}
return false;
}
/**
* Judge if the newly added geometric shape is redundant.
*
* @param code
* @param pnew
* @return
*/
private boolean checkRedundancy(int code, RectangleNode pnew) {
// Expand it temporarily
if (code == GeometricManager.ONE_TO_ONE) {
pnew.L_prime = pnew.L;
}
// Check redundancy
for (int i = code; i <= GeometricManager.MANY_TO_MANY; ++i) {
SegmentNode p = header[i];
while (p != null) {
switch (i) {
case GeometricManager.ONE_TO_ONE:
if ((p.I2 - p.I1) == (pnew.I2 - pnew.I1)) {
// Have the same intercept and it is completely contained in an existing segment
if (pnew.I1 >= p.I1 && (pnew.I1 + pnew.L) <= (p.I1 + p.L)) {
return true;
}
}
break;
case GeometricManager.MANY_TO_MANY:
if (pnew.I1 >= p.I1 && pnew.I2 >= p.I2) {
if ((pnew.I1 + pnew.L) <= (p.I1 + p.L) && (pnew.I2 + pnew.L_prime) <= (p.I2 + ((RectangleNode) p).L_prime)) {
return true;
}
}
break;
}
p = p.next;
}
}
return false;
}
/**
* Drop the redundant existing objects.
*
* @param code
* @param p
*/
private void filterOutDuplicates(int code, SegmentNode p) {
boolean flag;
SegmentNode q_head, q_tail;
SegmentNode pold;
int countAll;
for (int i = code; i > -1; --i) {
pold = header[i];
q_head = null;
q_tail = null;
countAll = 0;
while (pold != null) {
flag = false;
switch (i) {
case GeometricManager.ONE_TO_ONE:
if (code == GeometricManager.MANY_TO_MANY) {
if (pold.I1 >= p.I1 && pold.I2 >= p.I2) {
if ((pold.I1 + pold.L) <= (p.I1 + p.L) && (pold.I2 + pold.L) <= (p.I2 + ((RectangleNode) p).L_prime)) {
flag = true;
}
}
} else {
if ((p.I2 - p.I1) == (pold.I2 - pold.I1)) {
if (pold.I1 >= p.I1 && (pold.I1 + pold.L) <= (p.I1 + p.L)) {
flag = true;
}
}
}
break;
case GeometricManager.MANY_TO_MANY:
if (pold.I1 >= p.I1 && pold.I2 >= p.I2) {
if ((pold.I1 + pold.L) <= (p.I1 + p.L)
&& (pold.I2 + ((RectangleNode) pold).L_prime) <= (p.I2 + ((RectangleNode) p).L_prime)) {
flag = true;
}
}
break;
}
if (flag == false) {
// We keep this figure
if (q_head == null) {
q_head = pold;
} else {
q_tail.next = pold;
}
q_tail = pold;
++countAll;
pold = pold.next;
} else {
// We reclaim this figure
if (i == GeometricManager.ONE_TO_ONE) {
pold = reclaimSegmentNode(pold);
} else {
pold = reclaimRectangleNode(pold);
}
}
}
if (q_tail != null) {
q_tail.next = null;
}
header[i] = q_head;
size[i] = countAll;
}
}
/**
* Find the bounding rectangle for all the rectangle figures.
*
* @return
*/
private RectangleNode mergeManyToMany() {
long x_min = Long.MAX_VALUE, y_min = Long.MAX_VALUE;
long x_max = Long.MIN_VALUE, y_max = Long.MIN_VALUE;
RectangleNode p = (RectangleNode) header[GeometricManager.MANY_TO_MANY];
header[GeometricManager.MANY_TO_MANY] = null;
size[GeometricManager.MANY_TO_MANY] = 0;
while (p != null) {
if (p.I1 < x_min) {
x_min = p.I1;
}
if (p.I2 < y_min) {
y_min = p.I2;
}
if (p.I1 + p.L > x_max) {
x_max = p.I1 + p.L;
}
if (p.I2 + p.L_prime > y_max) {
y_max = p.I2 + p.L_prime;
}
p = (RectangleNode) reclaimRectangleNode(p);
}
// We assume the list has at least one element
p = getRectangleNode();
p.I1 = x_min;
p.I2 = y_min;
p.L = x_max - x_min;
p.L_prime = y_max - y_min;
p.next = null;
return p;
}
/**
* Find the bounding rectangle for all segment figures.
*
* @return
*/
private RectangleNode mergeOneToOne() {
long x_min = Long.MAX_VALUE, y_min = Long.MAX_VALUE;
long x_max = Long.MIN_VALUE, y_max = Long.MIN_VALUE;
SegmentNode p = header[GeometricManager.ONE_TO_ONE];
header[GeometricManager.ONE_TO_ONE] = null;
size[GeometricManager.ONE_TO_ONE] = 0;
while (p != null) {
if (p.I1 < x_min) {
x_min = p.I1;
}
if (p.I2 < y_min) {
y_min = p.I2;
}
if (p.I1 + p.L > x_max) {
x_max = p.I1 + p.L;
}
if (p.I2 + p.L > y_max) {
y_max = p.I2 + p.L;
}
p = reclaimSegmentNode(p);
}
RectangleNode q = getRectangleNode();
q.I1 = x_min;
q.I2 = y_min;
q.L = x_max - x_min;
q.L_prime = y_max - y_min;
return q;
}
}
| 10,334
| 24.8375
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/Constants.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.RefType;
/**
* Named constants used in the geomPA.
*
* @author xiao
*
*/
public class Constants {
// Available encodings
public static final String geomE = "Geom";
public static final String heapinsE = "HeapIns";
public static final String ptinsE = "PtIns";
// Evaluation level
public static final int eval_nothing = 0;
public static final int eval_basicInfo = 1;
public static final int eval_simpleClients = 2;
// The constants for the constraints type identification
public static final int NEW_CONS = 0;
public static final int ASSIGN_CONS = 1;
public static final int LOAD_CONS = 2;
public static final int STORE_CONS = 3;
public static final int FIELD_ADDRESS = 4;
// The constants for the call graph
public static final int SUPER_MAIN = 0;
public static final int UNKNOWN_FUNCTION = -1;
// The number of contexts that is natively supported by Java (2^63)
// Using big integer would not bring too many benefits.
public static final long MAX_CONTEXTS = Long.MAX_VALUE - 1;
// Some commonly referred to information
public static final RefType exeception_type = RefType.v("java.lang.Throwable");
// The seed pointers for running constraints distillation
public static final int seedPts_allUser = 0x0000000f;
public static final int seedPts_all = 0x7fffffff;
}
| 2,174
| 31.954545
| 81
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/DummyNode.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.Set;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.Node;
/**
* It is used to represent the non-pointer variables (e.g. heap variable) in the geometric encoding PAG.
*
* @author xiao
*
*/
public class DummyNode extends IVarAbstraction {
public DummyNode(Node thisVarNode) {
me = thisVarNode;
}
@Override
public void deleteAll() {
}
@Override
public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean add_points_to_4(AllocNode obj, long I1, long I2, long L1, long L2) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean add_simple_constraint_4(IVarAbstraction qv, long I1, long I2, long L1, long L2) {
// TODO Auto-generated method stub
return false;
}
@Override
public void put_complex_constraint(PlainConstraint cons) {
// TODO Auto-generated method stub
}
@Override
public void reconstruct() {
// TODO Auto-generated method stub
}
@Override
public void do_before_propagation() {
// TODO Auto-generated method stub
}
@Override
public void do_after_propagation() {
// TODO Auto-generated method stub
}
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
// TODO Auto-generated method stub
}
@Override
public void drop_duplicates() {
// TODO Auto-generated method stub
}
@Override
public void remove_points_to(AllocNode obj) {
// TODO Auto-generated method stub
}
@Override
public int num_of_diff_objs() {
// TODO Auto-generated method stub
return -1;
}
@Override
public int num_of_diff_edges() {
// TODO Auto-generated method stub
return -1;
}
@Override
public int count_pts_intervals(AllocNode obj) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int count_new_pts_intervals() {
// TODO Auto-generated method stub
return 0;
}
@Override
public int count_flow_intervals(IVarAbstraction qv) {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean pointer_interval_points_to(long l, long r, AllocNode obj) {
// TODO Auto-generated method stub
return false;
}
@Override
public Set<AllocNode> get_all_points_to_objects() {
// TODO Auto-generated method stub
return null;
}
@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
// TODO Auto-generated method stub
}
@Override
public void keepPointsToOnly() {
// TODO Auto-generated method stub
}
@Override
public void injectPts() {
// TODO Auto-generated method stub
}
@Override
public boolean isDeadObject(AllocNode obj) {
// TODO Auto-generated method stub
return false;
}
@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
// TODO Auto-generated method stub
}
}
| 4,303
| 21.416667
| 104
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/FIFO_Worklist.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Deque;
import java.util.LinkedList;
/**
* Implementation of the worklist with FIFO strategy.
*
* @author xiao
*
*/
public class FIFO_Worklist implements IWorklist {
Deque<IVarAbstraction> Q = null;
@Override
public void initialize(int size) {
Q = new LinkedList<IVarAbstraction>();
}
@Override
public boolean has_job() {
return Q.size() != 0;
}
@Override
public IVarAbstraction next() {
IVarAbstraction t = Q.getFirst();
Q.removeFirst();
t.Qpos = 0;
return t;
}
@Override
public void push(IVarAbstraction pv) {
if (pv.Qpos == 0) {
Q.addLast(pv);
pv.Qpos = 1;
}
}
@Override
public int size() {
return Q.size();
}
@Override
public void clear() {
Q = null;
}
}
| 1,615
| 21.136986
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/GeomPointsTo.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Date;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.Vector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.Context;
import soot.G;
import soot.Local;
import soot.MethodOrMethodContext;
import soot.PointsToSet;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootField;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.Stmt;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.geomE.FullSensitiveNodeGenerator;
import soot.jimple.spark.geom.heapinsE.HeapInsNodeGenerator;
import soot.jimple.spark.geom.helper.GeomEvaluator;
import soot.jimple.spark.geom.ptinsE.PtInsNodeGenerator;
import soot.jimple.spark.geom.utils.SootInfo;
import soot.jimple.spark.geom.utils.ZArrayNumberer;
import soot.jimple.spark.internal.TypeManager;
import soot.jimple.spark.pag.AllocDotField;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.ContextVarNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.SparkField;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.EmptyPointsToSet;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.spark.sets.PointsToSetInternal;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.callgraph.VirtualCalls;
import soot.options.SparkOptions;
import soot.toolkits.scalar.Pair;
import soot.util.NumberedString;
import soot.util.queue.ChunkedQueue;
import soot.util.queue.QueueReader;
/**
* The main interface for the points-to analysis with geometric encodings. Since we need SPARK to bootstrap our analysis,
* thus, we identify ourself to be a subclass of SPARK.
*
* @author xiao
*
*/
public class GeomPointsTo extends PAG {
private static final Logger logger = LoggerFactory.getLogger(GeomPointsTo.class);
// Worklist, the core data structure for fixed point computation
// Other choice, FIFO_Worklist
protected IWorklist worklist = null;
// The generator that is used to generate the internal representations for the
// pointers and objects
protected IEncodingBroker nodeGenerator = null;
// The same type manager used by SPARK
protected TypeManager typeManager = null;
// The offline processing strategies for the constraints
protected OfflineProcessor offlineProcessor = null;
// A table that maps the SPARK nodes to the geometric nodes
public Map<Node, IVarAbstraction> consG = null;
// Stores all the pointers including the instance fields
public ZArrayNumberer<IVarAbstraction> pointers = null;
// Stores all the symbolic objects
public ZArrayNumberer<IVarAbstraction> allocations = null;
// Store all the constraints, initially generated from SPARK
public ZArrayNumberer<PlainConstraint> constraints = null;
// All the callsites that spawn a new thread
public Set<Stmt> thread_run_callsites = null;
// The virtual callsites (and base pointers) that have multiple call targets
public Set<Stmt> multiCallsites = null;
/*
* Context size records the total number of instances for a function. max_context_size_block is the context size of the
* largest block for a function in cycle
*/
public long context_size[], max_context_size_block[];
// Number of context blocks for a function
public int block_num[];
// Analysis statistics
public int max_scc_size, max_scc_id;
public int n_func, n_calls;
public int n_reach_methods, n_reach_user_methods, n_reach_spark_user_methods;
public int n_init_constraints;
// Output options
public String dump_dir = null;
public PrintStream ps = null;
/*
* This container contains the methods that are considered "valid" by user. For example, we want to compare the geometric
* points-to result with 1-obj analysis. They may compute different set of reachable functions due to the different
* precision. To make the comparison fairly, we only evaluate the functions that are reachable in both analyses.
*/
protected Map<String, Boolean> validMethods = null;
// Call graph related components
protected CgEdge call_graph[];
// Only keep the obsoleted call edges decided in the last round
protected Vector<CgEdge> obsoletedEdges = null;
protected Map<Integer, LinkedList<CgEdge>> rev_call_graph = null;
protected Deque<Integer> queue_cg = null;
// Containers used for call graph traversal
protected int vis_cg[], low_cg[], rep_cg[], indeg_cg[], scc_size[];
protected int pre_cnt; // preorder time-stamp for constructing the SCC condensed call graph
// The mappings between Soot functions and call edges to our internal
// representations
protected Map<SootMethod, Integer> func2int = null;
protected Map<Integer, SootMethod> int2func = null;
protected Map<Edge, CgEdge> edgeMapping = null;
// Others
private boolean hasTransformed = false;
// Because we override the points-to query interface for SPARK, we need this
// flag to know how to answer queries
private boolean hasExecuted = false;
// Prepare necessary structures when first time ddSolve is called
private boolean ddPrepared = false;
// -------------------Constructors--------------------
public GeomPointsTo(final SparkOptions opts) {
super(opts);
}
public String toString() {
return "Geometric Points-To Analysis";
}
/**
* Data structures that only specific to geometric solver are created here. The initialized container sizes are empirically
* chosen from the primes. We believe most of the machine today can afford the memory overhead.
*/
private void prepareContainers() {
// All kinds of variables
consG = new HashMap<Node, IVarAbstraction>(39341);
// Only the pointer variables
pointers = new ZArrayNumberer<IVarAbstraction>(25771);
// Only the heap variables
allocations = new ZArrayNumberer<IVarAbstraction>();
// The constraints extracted from code
constraints = new ZArrayNumberer<PlainConstraint>(25771);
// The statements that fork a new thread
thread_run_callsites = new HashSet<Stmt>(251);
// The virtual callsites that have multiple call targets
multiCallsites = new HashSet<Stmt>(251);
// The fake virtual call edges created by SPARK
// obsoletedEdges = new Vector<CgEdge>(4021);
// A linkedlist used for traversing the call graph
queue_cg = new LinkedList<Integer>();
// Containers for functions and call graph edges
func2int = new HashMap<SootMethod, Integer>(5011);
int2func = new HashMap<Integer, SootMethod>(5011);
edgeMapping = new HashMap<Edge, CgEdge>(19763);
consG.clear();
constraints.clear();
func2int.clear();
edgeMapping.clear();
}
/**
* Using the user specified arguments to parameterize the geometric points-to solver.
*
* @param spark_run_time
*/
public void parametrize(double spark_run_time) {
// We first setup the encoding methodology
int solver_encoding = opts.geom_encoding();
if (solver_encoding == SparkOptions.geom_encoding_Geom) {
nodeGenerator = new FullSensitiveNodeGenerator();
} else if (solver_encoding == SparkOptions.geom_encoding_HeapIns) {
nodeGenerator = new HeapInsNodeGenerator();
} else if (solver_encoding == SparkOptions.geom_encoding_PtIns) {
nodeGenerator = new PtInsNodeGenerator();
}
String encoding_name = nodeGenerator.getSignature();
if (encoding_name == null) {
throw new RuntimeException("No encoding given for geometric points-to analysis.");
}
if (nodeGenerator == null) {
throw new RuntimeException("The encoding " + encoding_name + " is unavailable for geometric points-to analysis.");
}
// Then, we set the worklist
switch (opts.geom_worklist()) {
case SparkOptions.geom_worklist_FIFO:
worklist = new FIFO_Worklist();
break;
case SparkOptions.geom_worklist_PQ:
worklist = new PQ_Worklist();
break;
}
// We dump the processing statistics to an external file if needed by the user
dump_dir = opts.geom_dump_verbose();
File dir = null;
if (!dump_dir.isEmpty()) {
// We create a new folder and put all the dump files in that folder
dir = new File(dump_dir);
if (!dir.exists()) {
dir.mkdirs();
}
// We create the log file
File log_file = new File(dump_dir, encoding_name + (opts.geom_blocking() == true ? "_blocked" : "_unblocked") + "_frac"
+ opts.geom_frac_base() + "_runs" + opts.geom_runs() + "_log.txt");
try {
ps = new PrintStream(log_file);
logger.debug("" + "[Geom] Analysis log can be found in: " + log_file.toString());
} catch (FileNotFoundException e) {
String msg = "[Geom] The dump file: " + log_file.toString() + " cannot be created. Abort.";
logger.debug("" + msg);
throw new RuntimeException(msg, e);
}
} else {
ps = G.v().out;
}
// Load the method signatures computed by other points-to analysis
// With these methods, we can compare the points-to results fairly.
String method_verify_file = opts.geom_verify_name();
if (method_verify_file != null) {
try {
FileReader fr = new FileReader(method_verify_file);
java.util.Scanner fin = new java.util.Scanner(fr);
validMethods = new HashMap<String, Boolean>();
while (fin.hasNextLine()) {
validMethods.put(fin.nextLine(), Boolean.FALSE);
}
fin.close();
fr.close();
logger.debug("" + "[Geom] Read in verification file successfully.\n");
} catch (FileNotFoundException e) {
validMethods = null;
} catch (IOException e) {
logger.debug(e.getMessage(), e);
}
}
// Set which pointers will be processed
Parameters.seedPts = opts.geom_app_only() ? Constants.seedPts_allUser : Constants.seedPts_all;
// Output the SPARK running information
double mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
ps.println();
ps.printf("[Spark] Time: %.3f s\n", (double) spark_run_time / 1000);
ps.printf("[Spark] Memory: %.1f MB\n", mem / 1024 / 1024);
// Get type manager from SPARK
typeManager = getTypeManager();
// The tunable parameters
Parameters.max_cons_budget = opts.geom_frac_base();
Parameters.max_pts_budget = Parameters.max_cons_budget * 2;
Parameters.cg_refine_times = opts.geom_runs();
if (Parameters.cg_refine_times < 1) {
Parameters.cg_refine_times = 1;
}
// Prepare for the containers
prepareContainers();
// Now we start working
ps.println("[Geom]" + " Start working on <" + (dir == null ? "NoName" : dir.getName()) + "> with <" + encoding_name
+ "> encoding.");
}
/**
* Read in the program facts generated by SPARK. We also construct our own call graph and pointer variables.
*/
private void preprocess() {
int id;
int s, t;
// Build the call graph
n_func = Scene.v().getReachableMethods().size() + 1;
call_graph = new CgEdge[n_func];
n_calls = 0;
n_reach_spark_user_methods = 0;
id = 1;
QueueReader<MethodOrMethodContext> smList = Scene.v().getReachableMethods().listener();
CallGraph soot_callgraph = Scene.v().getCallGraph();
while (smList.hasNext()) {
final SootMethod func = smList.next().method();
func2int.put(func, id);
int2func.put(id, func);
/*
* We cannot identify all entry methods since some entry methods call themselves. In that case, the Soot
* CallGraph.isEntryMethod() function returns false.
*/
if (soot_callgraph.isEntryMethod(func) || func.isEntryMethod()) {
CgEdge p = new CgEdge(Constants.SUPER_MAIN, id, null, call_graph[Constants.SUPER_MAIN]);
call_graph[Constants.SUPER_MAIN] = p;
n_calls++;
}
if (!func.isJavaLibraryMethod()) {
++n_reach_spark_user_methods;
}
id++;
}
// Next, we scan all the call edges and rebuild the call graph in our own
// vocabulary
QueueReader<Edge> edgeList = Scene.v().getCallGraph().listener();
while (edgeList.hasNext()) {
Edge edge = edgeList.next();
if (edge.isClinit()) {
continue;
}
SootMethod src_func = edge.src();
SootMethod tgt_func = edge.tgt();
s = func2int.get(src_func);
t = func2int.get(tgt_func);
// Create a new call edge in our own format
CgEdge p = new CgEdge(s, t, edge, call_graph[s]);
call_graph[s] = p;
edgeMapping.put(edge, p);
// We collect callsite information
Stmt callsite = edge.srcStmt();
if (edge.isThreadRunCall() || edge.kind().isExecutor() || edge.kind().isAsyncTask()) {
// We don't modify the treatment to the thread run() calls
thread_run_callsites.add(callsite);
} else if (edge.isInstance() && !edge.isSpecial()) {
// We try to refine the virtual callsites (virtual + interface) with multiple
// call targets
InstanceInvokeExpr expr = (InstanceInvokeExpr) callsite.getInvokeExpr();
if (expr.getMethodRef().getSignature().contains("<java.lang.Thread: void start()>")) {
// It is a thread start function
thread_run_callsites.add(callsite);
} else {
p.base_var = findLocalVarNode(expr.getBase());
if (SootInfo.countCallEdgesForCallsite(callsite, true) > 1 && p.base_var != null) {
multiCallsites.add(callsite);
}
}
}
++n_calls;
}
// We build the wrappers for all the pointers built by SPARK
for (Iterator<VarNode> it = getVarNodeNumberer().iterator(); it.hasNext();) {
VarNode vn = it.next();
IVarAbstraction pn = makeInternalNode(vn);
pointers.add(pn);
}
for (Iterator<AllocDotField> it = getAllocDotFieldNodeNumberer().iterator(); it.hasNext();) {
AllocDotField adf = it.next();
// Some allocdotfield is invalid, we check and remove them
SparkField field = adf.getField();
if (field instanceof SootField) {
// This is an instance field of a class
Type decType = ((SootField) field).getDeclaringClass().getType();
Type baseType = adf.getBase().getType();
// baseType must be a sub type of decType
if (!castNeverFails(baseType, decType)) {
continue;
}
}
IVarAbstraction pn = makeInternalNode(adf);
pointers.add(pn);
}
for (Iterator<AllocNode> it = getAllocNodeNumberer().iterator(); it.hasNext();) {
AllocNode obj = it.next();
IVarAbstraction pn = makeInternalNode(obj);
allocations.add(pn);
}
// Now we extract all the constraints from SPARK
// The address constraints, new obj -> p
for (Object object : allocSources()) {
IVarAbstraction obj = makeInternalNode((AllocNode) object);
Node[] succs = allocLookup((AllocNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction p = makeInternalNode(element0);
cons.expr.setPair(obj, p);
cons.type = Constants.NEW_CONS;
constraints.add(cons);
}
}
// The assign constraints, p -> q
Pair<Node, Node> intercall = new Pair<Node, Node>();
for (Object object : simpleSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = simpleLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.expr.setPair(p, q);
cons.type = Constants.ASSIGN_CONS;
intercall.setPair((VarNode) object, element0);
cons.interCallEdges = lookupEdgesForAssignment(intercall);
constraints.add(cons);
}
}
intercall = null;
assign2edges.clear();
// The load constraints, p.f -> q
for (Object object : loadSources()) {
FieldRefNode frn = (FieldRefNode) object;
IVarAbstraction p = makeInternalNode(frn.getBase());
Node[] succs = loadLookup(frn);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.LOAD_CONS;
constraints.add(cons);
}
}
// The store constraints, p -> q.f
for (Object object : storeSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = storeLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
FieldRefNode frn = (FieldRefNode) element0;
IVarAbstraction q = makeInternalNode(frn.getBase());
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.STORE_CONS;
constraints.add(cons);
}
}
n_init_constraints = constraints.size();
// Initialize other stuff
low_cg = new int[n_func];
vis_cg = new int[n_func];
rep_cg = new int[n_func];
indeg_cg = new int[n_func];
scc_size = new int[n_func];
block_num = new int[n_func];
context_size = new long[n_func];
max_context_size_block = new long[n_func];
}
/**
* As pointed out by the single entry graph contraction, temporary variables incur high redundancy in points-to relations.
* Find and eliminate the redundancies as early as possible.
*
* Methodology: If q has unique incoming edge p -> q, p and q are both local to the same function, and they have the same
* type, we merge them.
*/
private void mergeLocalVariables() {
IVarAbstraction my_lhs, my_rhs;
Node lhs, rhs;
int[] count = new int[pointers.size()];
// We count how many ways a local pointer can be assigned
for (PlainConstraint cons : constraints) {
my_lhs = cons.getLHS();
my_rhs = cons.getRHS();
switch (cons.type) {
case Constants.NEW_CONS:
case Constants.ASSIGN_CONS:
count[my_rhs.id]++;
break;
case Constants.LOAD_CONS:
lhs = my_lhs.getWrappedNode();
count[my_rhs.id] += lhs.getP2Set().size();
break;
}
}
// Second time scan, we delete those constraints that only duplicate points-to
// information
for (Iterator<PlainConstraint> cons_it = constraints.iterator(); cons_it.hasNext();) {
PlainConstraint cons = cons_it.next();
if (cons.type == Constants.ASSIGN_CONS) {
my_lhs = cons.getLHS();
my_rhs = cons.getRHS();
lhs = my_lhs.getWrappedNode();
rhs = my_rhs.getWrappedNode();
if ((lhs instanceof LocalVarNode) && (rhs instanceof LocalVarNode)) {
SootMethod sm1 = ((LocalVarNode) lhs).getMethod();
SootMethod sm2 = ((LocalVarNode) rhs).getMethod();
if (sm1 == sm2 && count[my_rhs.id] == 1 && lhs.getType() == rhs.getType()) {
// They are local to the same function and the receiver pointer has unique
// incoming edge
// More importantly, they have the same type.
my_rhs.merge(my_lhs);
cons_it.remove();
}
}
}
}
// Third scan, update the constraints with the representatives
for (PlainConstraint cons : constraints) {
my_lhs = cons.getLHS();
my_rhs = cons.getRHS();
switch (cons.type) {
case Constants.NEW_CONS:
cons.setRHS(my_rhs.getRepresentative());
break;
case Constants.ASSIGN_CONS:
case Constants.LOAD_CONS:
case Constants.STORE_CONS:
cons.setLHS(my_lhs.getRepresentative());
cons.setRHS(my_rhs.getRepresentative());
break;
}
}
}
/**
* Using Tarjan's algorithm to contract the SCCs.
*/
private void callGraphDFS(int s) {
int t;
CgEdge p;
vis_cg[s] = low_cg[s] = pre_cnt++;
queue_cg.addLast(s);
p = call_graph[s];
while (p != null) {
t = p.t;
if (vis_cg[t] == 0) {
callGraphDFS(t);
low_cg[s] = Math.min(low_cg[s], low_cg[t]);
} else {
low_cg[s] = Math.min(low_cg[s], vis_cg[t]);
}
p = p.next;
}
if (low_cg[s] < vis_cg[s]) {
scc_size[s] = 1;
return;
}
scc_size[s] = queue_cg.size();
do {
t = queue_cg.getLast();
queue_cg.removeLast();
rep_cg[t] = s;
low_cg[t] += n_func;
} while (s != t);
scc_size[s] -= queue_cg.size();
if (scc_size[s] > max_scc_size) {
max_scc_size = scc_size[s];
max_scc_id = s;
}
}
/**
* Build a call graph, merge the SCCs and name the contexts. Also permit clients to decide whether to connect the disjoint
* parts in the call graph or not.
*/
private void encodeContexts(boolean connectMissedEntries) {
int i, j;
int n_reachable = 0, n_scc_reachable = 0;
int n_full = 0;
long max_contexts = Long.MIN_VALUE;
Random rGen = new Random();
pre_cnt = 1;
max_scc_size = 1;
for (i = 0; i < n_func; ++i) {
vis_cg[i] = 0;
indeg_cg[i] = 0;
max_context_size_block[i] = 0;
}
// We only consider all the methods which are reachable from SUPER_MAIN
queue_cg.clear();
callGraphDFS(Constants.SUPER_MAIN);
if (connectMissedEntries) {
// We also scan rest of the functions
for (i = Constants.SUPER_MAIN + 1; i < n_func; ++i) {
if (vis_cg[i] == 0) {
callGraphDFS(i);
}
}
}
// Then, we topologically number the contexts starting from the SUPER_MAIN
// function
// We count the in-degree of each function.
// And, we classify the call edges into SCC/non-SCC edges
for (i = 0; i < n_func; ++i) {
if (vis_cg[i] == 0) {
continue;
}
CgEdge p = call_graph[i];
while (p != null) {
// Only count an edge that links two functions in the same SCC
if (rep_cg[i] == rep_cg[p.t]) {
p.scc_edge = true;
} else {
p.scc_edge = false;
++indeg_cg[rep_cg[p.t]];
}
p = p.next;
}
// Do simple statistics
++n_reachable;
if (rep_cg[i] == i) {
++n_scc_reachable;
}
}
if (connectMissedEntries) {
// The functions other than SUPER_MAIN that have zero in-degrees are missed
// entry methods
for (i = Constants.SUPER_MAIN + 1; i < n_func; ++i) {
int rep_node = rep_cg[i];
if (indeg_cg[rep_node] == 0) {
CgEdge p = new CgEdge(Constants.SUPER_MAIN, i, null, call_graph[Constants.SUPER_MAIN]);
call_graph[Constants.SUPER_MAIN] = p;
n_calls++;
}
}
}
// Next, we condense the SCCs
// Later, we have to restore the call graph in order to serve the
// context sensitive queries
for (i = 0; i < n_func; ++i) {
if (vis_cg[i] != 0 && rep_cg[i] != i) {
// Any node in a SCC must have at least one outgoing edge
CgEdge p = call_graph[i];
while (p.next != null) {
p = p.next;
}
p.next = call_graph[rep_cg[i]];
// Note that, call_graph[i] is not cleared after merging
call_graph[rep_cg[i]] = call_graph[i];
}
}
// Now, we add all the source nodes to the queue
max_context_size_block[Constants.SUPER_MAIN] = 1;
queue_cg.addLast(Constants.SUPER_MAIN);
while (!queue_cg.isEmpty()) {
i = queue_cg.getFirst();
queue_cg.removeFirst();
CgEdge p = call_graph[i];
while (p != null) {
if (p.scc_edge == false) {
// Consider the representative only
j = rep_cg[p.t];
/*
* We can control how many contexts created for a specified function. And, for any call edge, we can manually move
* the mapping interval from caller to callee.
*/
if (Constants.MAX_CONTEXTS - max_context_size_block[i] < max_context_size_block[j]) {
// The are more than 2^63 - 1 execution paths, terrible!
// We have to merge some contexts in order to make the analysis sound!
// The merging starting context is randomly picked
long start = rGen.nextLong();
if (start < 0) {
start = -start;
}
if (start > Constants.MAX_CONTEXTS - max_context_size_block[i]) {
// We use the last max_context_size_block[i] bits for this mapping
start = Constants.MAX_CONTEXTS - max_context_size_block[i];
max_context_size_block[j] = Constants.MAX_CONTEXTS;
} else {
if (max_context_size_block[j] < start + max_context_size_block[i]) {
// We compensate the difference
max_context_size_block[j] = start + max_context_size_block[i];
}
}
p.map_offset = start + 1;
} else {
// Accumulate the contexts
p.map_offset = max_context_size_block[j] + 1;
max_context_size_block[j] += max_context_size_block[i];
}
// Add to the worklist
if (--indeg_cg[j] == 0) {
queue_cg.addLast(j);
}
} else {
// 0-CFA modeling for the SCC, the default mode
p.map_offset = 1;
}
p = p.next;
}
if (max_context_size_block[i] > max_contexts) {
max_contexts = max_context_size_block[i];
}
}
// Now we restore the call graph
for (i = n_func - 1; i > -1; --i) {
if (vis_cg[i] == 0) {
continue;
}
if (rep_cg[i] != i) {
// All nodes in the same SCC have the same number of contexts
max_context_size_block[i] = max_context_size_block[rep_cg[i]];
// Put all the call edges back
CgEdge p = call_graph[i];
while (p.next.s == i) {
// p.next.s may not be i because it would be linked to another scc member
p = p.next;
}
call_graph[rep_cg[i]] = p.next;
p.next = null;
}
if (max_context_size_block[i] == Constants.MAX_CONTEXTS) {
++n_full;
}
context_size[i] = max_context_size_block[i];
block_num[i] = 1;
}
// Now we apply the blocking scheme if necessary
// The implementation is slightly different from our paper (the non-SCC edges
// are not moved, they still use their current context mappings)
if (getOpts().geom_blocking()) {
// We scan all the edges again, and tune the SCC related call edges
// We don't manipulate the non-SCC edges, because they don't induce problems
for (i = 0; i < n_func; ++i) {
if (vis_cg[i] == 0) {
continue;
}
CgEdge p = call_graph[i];
while (p != null) {
j = p.t;
if (j != i // This is not a self-loop, and a self-loop is treated specially in the initial
// encoding phase
&& p.scc_edge == true) {
// max_context_size_block[i] == max_context_size_block[j]
// So, we don't distinguish them
if (context_size[j] <= Constants.MAX_CONTEXTS - max_context_size_block[i]) {
p.map_offset = context_size[j] + 1;
context_size[j] += max_context_size_block[i];
++block_num[j];
} else {
// We randomly pick a block for reuse (try best to avoid reusing the first
// block)
int iBlock = 0;
if (block_num[j] > 1) {
iBlock = rGen.nextInt(block_num[j] - 1) + 1;
}
p.map_offset = iBlock * max_context_size_block[j] + 1;
}
}
p = p.next;
}
}
}
// Print debug info
ps.printf("Reachable Methods = %d, in which #Condensed Nodes = %d, #Full Context Nodes = %d \n", n_reachable - 1,
n_scc_reachable - 1, n_full);
ps.printf("Maximum SCC = %d \n", max_scc_size);
ps.printf("The maximum context size = %e\n", (double) max_contexts);
}
/**
* We iteratively update the call graph and the constraints list until our demand is satisfied
*/
private void solveConstraints() {
IWorklist ptaList = worklist;
while (ptaList.has_job()) {
IVarAbstraction pn = ptaList.next();
pn.do_before_propagation();
pn.propagate(this, ptaList);
pn.do_after_propagation();
}
}
/**
* Obtain the set of possible call targets at given @param callsite.
*/
private void getCallTargets(IVarAbstraction pn, SootMethod src, Stmt callsite, ChunkedQueue<SootMethod> targetsQueue) {
InstanceInvokeExpr iie = (InstanceInvokeExpr) callsite.getInvokeExpr();
Local receiver = (Local) iie.getBase();
// We first build the set of possible call targets
for (AllocNode an : pn.get_all_points_to_objects()) {
Type type = an.getType();
if (type == null) {
continue;
}
VirtualCalls.v().resolve(type, receiver.getType(), iie.getMethodRef(), src, targetsQueue);
}
}
/**
* Remove unreachable call targets at the virtual callsites using the up-to-date points-to information.
*/
private int updateCallGraph() {
int all_virtual_edges = 0, n_obsoleted = 0;
CallGraph cg = Scene.v().getCallGraph();
ChunkedQueue<SootMethod> targetsQueue = new ChunkedQueue<SootMethod>();
QueueReader<SootMethod> targets = targetsQueue.reader();
Set<SootMethod> resolvedMethods = new HashSet<SootMethod>();
// obsoletedEdges.clear();
// We first update the virtual callsites
for (Iterator<Stmt> csIt = multiCallsites.iterator(); csIt.hasNext();) {
Stmt callsite = csIt.next();
Iterator<Edge> edges = cg.edgesOutOf(callsite);
if (!edges.hasNext()) {
csIt.remove();
continue;
}
Edge anyEdge = edges.next();
CgEdge p = edgeMapping.get(anyEdge);
SootMethod src = anyEdge.src();
if (!isReachableMethod(src)) {
// The source method is no longer reachable
// We move this callsite
csIt.remove();
continue;
}
if (!edges.hasNext()) {
// We keep this resolved site for call graph profiling
continue;
}
IVarAbstraction pn = consG.get(p.base_var);
if (pn != null) {
pn = pn.getRepresentative();
// We resolve the call targets with the new points-to result
getCallTargets(pn, src, callsite, targetsQueue);
resolvedMethods.clear();
while (targets.hasNext()) {
resolvedMethods.add(targets.next());
}
// We delete the edges that are proven to be spurious
while (true) {
SootMethod tgt = anyEdge.tgt();
if (!resolvedMethods.contains(tgt) && !anyEdge.kind().isFake()) {
p = edgeMapping.get(anyEdge);
p.is_obsoleted = true;
}
if (!edges.hasNext()) {
break;
}
anyEdge = edges.next();
}
}
}
// We delete the spurious edges
for (int i = 1; i < n_func; ++i) {
// New outgoing edge list is pointed to by q
CgEdge p = call_graph[i];
CgEdge q = null;
while (p != null) {
if (vis_cg[i] == 0) {
// If this method is unreachable, we delete all its outgoing edges
p.is_obsoleted = true;
}
if (p.base_var != null) {
++all_virtual_edges;
}
CgEdge temp = p.next;
if (p.is_obsoleted == false) {
p.next = q;
q = p;
} else {
// Update the corresponding SOOT call graph
// ps.println("%%% Remove an call edge: " + p.toString());
cg.removeEdge(p.sootEdge);
// We record this obsoleted edge
// obsoletedEdges.add(p);
++n_obsoleted;
}
p = temp;
}
call_graph[i] = q;
}
ps.printf("%d of %d virtual call edges are proved to be spurious.\n", n_obsoleted, all_virtual_edges);
return n_obsoleted;
}
/**
* Prepare for the next iteration.
*/
private void prepareNextRun() {
// Clean the context sensitive points-to results for the representative pointers
for (IVarAbstraction pn : pointers) {
if (pn.willUpdate == true) {
pn.reconstruct();
}
}
// Reclaim
System.gc();
}
/**
* Scan the call graph and mark the reachable methods.
*/
private void markReachableMethods() {
int ans = 0;
CgEdge p;
for (int i = 0; i < n_func; ++i) {
vis_cg[i] = 0;
}
queue_cg.clear();
queue_cg.add(Constants.SUPER_MAIN);
vis_cg[Constants.SUPER_MAIN] = 1;
while (queue_cg.size() > 0) {
int s = queue_cg.removeFirst();
p = call_graph[s];
while (p != null) {
int t = p.t;
if (vis_cg[t] == 0) {
queue_cg.add(t);
vis_cg[t] = 1;
++ans;
}
p = p.next;
}
}
n_reach_methods = ans;
// Scan again to remove unreachable methods
ans = 0;
for (int i = 1; i < n_func; ++i) {
SootMethod sm = int2func.get(i);
if (vis_cg[i] == 0) {
func2int.remove(sm);
int2func.remove(i);
} else {
if (!sm.isJavaLibraryMethod()) {
++ans;
}
}
}
n_reach_user_methods = ans;
}
/**
* The reversed call graph might be used by evaluating queries.
*/
private void buildRevCallGraph() {
rev_call_graph = new HashMap<Integer, LinkedList<CgEdge>>();
for (int i = 0; i < n_func; ++i) {
CgEdge p = call_graph[i];
while (p != null) {
LinkedList<CgEdge> list = rev_call_graph.get(p.t);
if (list == null) {
list = new LinkedList<CgEdge>();
rev_call_graph.put(p.t, list);
}
list.add(p);
p = p.next;
}
}
}
/**
* 1. Update the call graph; 2. Eliminate the pointers, objects, and constraints related to the unreachable code.
*/
private void finalizeInternalData() {
// Compute the set of reachable functions after the points-to analysis
markReachableMethods();
// Clean the unreachable objects
for (Iterator<IVarAbstraction> it = allocations.iterator(); it.hasNext();) {
IVarAbstraction po = it.next();
AllocNode obj = (AllocNode) po.getWrappedNode();
SootMethod sm = obj.getMethod();
if (sm != null && func2int.containsKey(sm) == false) {
it.remove();
}
}
// Clean the unreachable pointers
final Vector<AllocNode> removeSet = new Vector<AllocNode>();
for (Iterator<IVarAbstraction> it = pointers.iterator(); it.hasNext();) {
IVarAbstraction pn = it.next();
// Is this pointer obsoleted?
Node vn = pn.getWrappedNode();
SootMethod sm = null;
if (vn instanceof LocalVarNode) {
sm = ((LocalVarNode) vn).getMethod();
} else if (vn instanceof AllocDotField) {
sm = ((AllocDotField) vn).getBase().getMethod();
}
if (sm != null) {
if (func2int.containsKey(sm) == false) {
pn.deleteAll();
vn.discardP2Set();
it.remove();
continue;
}
}
if (pn.getRepresentative() != pn) {
continue;
}
removeSet.clear();
if (pn.hasPTResult()) {
// We remove the useless shapes or objects
Set<AllocNode> objSet = pn.get_all_points_to_objects();
for (Iterator<AllocNode> oit = objSet.iterator(); oit.hasNext();) {
AllocNode obj = oit.next();
IVarAbstraction po = consG.get(obj);
if (!po.reachable() || pn.isDeadObject(obj)) {
removeSet.add(obj);
}
}
for (AllocNode obj : removeSet) {
pn.remove_points_to(obj);
}
pn.drop_duplicates();
} else {
// We also remove unreachable objects for SPARK nodes
PointsToSetInternal pts = vn.getP2Set();
pts.forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction pan = findInternalNode(n);
// The removeSet is misused as a contains set
if (pan.reachable()) {
removeSet.add((AllocNode) n);
}
}
});
pts = vn.makeP2Set();
for (AllocNode an : removeSet) {
pts.add(an);
}
}
}
// Clean the useless constraints
for (Iterator<PlainConstraint> cIt = constraints.iterator(); cIt.hasNext();) {
PlainConstraint cons = cIt.next();
IVarAbstraction lhs = cons.getLHS();
IVarAbstraction rhs = cons.getRHS();
if (!lhs.reachable() || !rhs.reachable() || getMethodIDFromPtr(lhs) == Constants.UNKNOWN_FUNCTION
|| getMethodIDFromPtr(rhs) == Constants.UNKNOWN_FUNCTION) {
cIt.remove();
}
}
// We reassign the IDs to the pointers, objects and constraints
pointers.reassign();
allocations.reassign();
constraints.reassign();
}
/**
* Stuff that is useless for querying is released.
*/
private void releaseUselessResources() {
offlineProcessor.destroy();
offlineProcessor = null;
IFigureManager.cleanCache();
System.gc();
}
/**
* Update the reachable methods and SPARK points-to results.
*/
private void finalizeSootData() {
// We remove the unreachable functions from Soot internal structures
Scene.v().releaseReachableMethods();
// The we rebuild it from the updated Soot call graph
Scene.v().getReachableMethods();
if (!opts.geom_trans()) {
// We remove the SPARK points-to information for pointers that have geomPTA
// results (willUpdate = true)
// At querying time, the SPARK points-to container acts as a query cache
for (IVarAbstraction pn : pointers) {
// Keep only the points-to results for representatives
if (pn != pn.getRepresentative()) {
continue;
}
// Simplify
if (pn.hasPTResult()) {
pn.keepPointsToOnly();
Node vn = pn.getWrappedNode();
vn.discardP2Set();
}
}
} else {
// Do we need to obtain the context insensitive points-to result?
transformToCIResult();
}
}
/**
* For many applications, they only need the context insensitive points-to result. We provide a way to transfer our result
* back to SPARK. After the transformation, we discard the context sensitive points-to information. Therefore, if context
* sensitive queries are needed in future, please call ddSolve() for queried pointers first.
*/
public void transformToCIResult() {
for (IVarAbstraction pn : pointers) {
if (pn.getRepresentative() != pn) {
continue;
}
Node node = pn.getWrappedNode();
node.discardP2Set();
PointsToSetInternal ptSet = node.makeP2Set();
for (AllocNode obj : pn.get_all_points_to_objects()) {
ptSet.add(obj);
}
pn.deleteAll();
}
hasTransformed = true;
}
/**
* The starting point of the geometric points-to analysis engine. This function computes the whole program points-to
* information.
*/
public void solve() {
long solve_time = 0, prepare_time = 0;
long mem;
int rounds;
int n_obs;
// Flush all accumulated outputs
G.v().out.flush();
// Collect and process the basic information from SPARK
preprocess();
mergeLocalVariables();
worklist.initialize(pointers.size());
offlineProcessor = new OfflineProcessor(this);
IFigureManager.cleanCache();
int evalLevel = opts.geom_eval();
GeomEvaluator ge = new GeomEvaluator(this, ps);
if (evalLevel == Constants.eval_basicInfo) {
ge.profileSparkBasicMetrics();
}
// Start our constraints solving phase
Date begin = new Date();
// Main loop
for (rounds = 0, n_obs = 1000; rounds < Parameters.cg_refine_times && n_obs > 0; ++rounds) {
ps.println("\n" + "[Geom] Propagation Round " + rounds + " ==> ");
// Encode the contexts
encodeContexts(rounds == 0);
// Offline processing:
// substantially use the points-to result for redundancy elimination prior to
// the analysis
Date prepare_begin = new Date();
offlineProcessor.init();
offlineProcessor.defaultFeedPtsRoutines();
offlineProcessor.runOptimizations();
Date prepare_end = new Date();
prepare_time += prepare_end.getTime() - prepare_begin.getTime();
if (rounds == 0) {
if (evalLevel <= Constants.eval_basicInfo) {
offlineProcessor.releaseSparkMem();
}
}
// Clear the points-to results in previous runs
prepareNextRun();
// We construct the initial flow graph
nodeGenerator.initFlowGraph(this);
// Solve the constraints
solveConstraints();
// We update the call graph and other internal data when the new points-to
// information is ready
n_obs = updateCallGraph();
finalizeInternalData();
}
if (rounds < Parameters.cg_refine_times) {
ps.printf("\nThe points-to information has converged. We stop here.\n");
}
Date end = new Date();
solve_time += end.getTime() - begin.getTime();
mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
ps.println();
ps.printf("[Geom] Preprocessing time: %.2f s\n", (double) prepare_time / 1000);
ps.printf("[Geom] Total time: %.2f s\n", (double) solve_time / 1000);
ps.printf("[Geom] Memory: %.1f MB\n", (double) (mem) / 1024 / 1024);
// We perform a set of tests to assess the quality of the points-to results for
// user pointers
if (evalLevel != Constants.eval_nothing) {
ge.profileGeomBasicMetrics(evalLevel > Constants.eval_basicInfo);
if (evalLevel > Constants.eval_basicInfo) {
ge.checkCallGraph();
ge.checkCastsSafety();
ge.checkAliasAnalysis();
// ge.estimateHeapDefuseGraph();
}
}
// Make changes available to Soot
finalizeSootData();
// Finish
releaseUselessResources();
hasExecuted = true;
}
/**
* The demand-driven mode for precisely computing points-to information for given pointers. Call graph will not be updated
* in this mode.
*
* @param qryNodes:
* the set of nodes that would be refined by geomPA.
*/
public void ddSolve(Set<Node> qryNodes) {
long solve_time = 0, prepare_time = 0;
if (hasExecuted == false) {
solve();
}
if (ddPrepared == false || offlineProcessor == null) {
offlineProcessor = new OfflineProcessor(this);
IFigureManager.cleanCache();
ddPrepared = true;
// First time entering into the demand-driven mode
ps.println();
ps.println("==> Entering demand-driven mode (experimental).");
}
int init_size = qryNodes.size();
if (init_size == 0) {
ps.println("Please provide at least one pointer.");
return;
}
// We must not encode the contexts again,
// otherwise the points-to information is invalid due to context mapping change
// encodeContexts();
// We first perform the offline optimizations
Date prepare_begin = new Date();
offlineProcessor.init();
offlineProcessor.addUserDefPts(qryNodes);
offlineProcessor.runOptimizations();
Date prepare_end = new Date();
prepare_time += prepare_end.getTime() - prepare_begin.getTime();
// Run geomPA again
Date begin = new Date();
prepareNextRun();
nodeGenerator.initFlowGraph(this);
solveConstraints();
Date end = new Date();
solve_time += end.getTime() - begin.getTime();
ps.println();
ps.printf("[ddGeom] Preprocessing time: %.2f seconds\n", (double) prepare_time / 1000);
ps.printf("[ddGeom] Main propagation time: %.2f seconds\n", (double) solve_time / 1000);
}
/**
* We thoroughly delete the geometric points-to result for space saving. Some applications such as those needing the call
* graph only may want to clean the points-to result.
*/
public void cleanResult() {
consG.clear();
pointers.clear();
allocations.clear();
constraints.clear();
func2int.clear();
int2func.clear();
edgeMapping.clear();
hasTransformed = false;
hasExecuted = false;
System.gc();
System.gc();
System.gc();
System.gc();
}
/**
* Keep only the pointers the users are interested in. Just used for reducing memory occupation.
*/
public void keepOnly(Set<IVarAbstraction> usefulPointers) {
Set<IVarAbstraction> reps = new HashSet<IVarAbstraction>();
for (IVarAbstraction pn : usefulPointers) {
reps.add(pn.getRepresentative());
}
usefulPointers.addAll(reps);
reps = null;
for (IVarAbstraction pn : pointers) {
if (!usefulPointers.contains(pn)) {
pn.deleteAll();
}
}
System.gc();
}
/**
* Get Internal ID for soot method @param sm
*
* @return -1 if the given method is unreachable
*/
public int getIDFromSootMethod(SootMethod sm) {
Integer ans = func2int.get(sm);
return ans == null ? Constants.UNKNOWN_FUNCTION : ans.intValue();
}
/**
* Get soot method from given internal ID @param fid
*
* @return null if such ID is illegal.
*/
public SootMethod getSootMethodFromID(int fid) {
return int2func.get(fid);
}
/**
* Deciding if the given method represented by @param fid is reachable.
*/
public boolean isReachableMethod(int fid) {
return fid == Constants.UNKNOWN_FUNCTION ? false : vis_cg[fid] != 0;
}
/**
* Deciding if the given method represented by @param sm is reachable.
*/
public boolean isReachableMethod(SootMethod sm) {
int id = getIDFromSootMethod(sm);
return isReachableMethod(id);
}
/**
* Telling if the given method is in the file given by the option "cg.spark geom-verify-name".
*/
public boolean isValidMethod(SootMethod sm) {
if (validMethods != null) {
String sig = sm.toString();
if (!validMethods.containsKey(sig)) {
return false;
}
// We mark this method for future inspection
validMethods.put(sig, Boolean.TRUE);
}
return true;
}
public void outputNotEvaluatedMethods() {
if (validMethods != null) {
ps.println("\nThe following methods are not evaluated because they are unreachable:");
for (Map.Entry<String, Boolean> entry : validMethods.entrySet()) {
if (entry.getValue().equals(Boolean.FALSE)) {
ps.println(entry.getKey());
}
}
ps.println();
}
}
/**
* A replacement of the Scene.v().getReachableMethods.
*
* @return
*/
public Set<SootMethod> getAllReachableMethods() {
return func2int.keySet();
}
/**
* Get the call edges calling from the method @param fid.
*/
public CgEdge getCallEgesOutFrom(int fid) {
return call_graph[fid];
}
/**
* Get the call edges calling into the method @param fid.
*/
public LinkedList<CgEdge> getCallEdgesInto(int fid) {
if (rev_call_graph == null) {
// We build the reversed call graph on demand
buildRevCallGraph();
}
return rev_call_graph.get(fid);
}
/**
* Get the index of the enclosing function of the specified node.
*/
public int getMethodIDFromPtr(IVarAbstraction pn) {
SootMethod sm = null;
int ret = Constants.SUPER_MAIN;
Node node = pn.getWrappedNode();
if (node instanceof AllocNode) {
sm = ((AllocNode) node).getMethod();
} else if (node instanceof LocalVarNode) {
sm = ((LocalVarNode) node).getMethod();
} else if (node instanceof AllocDotField) {
sm = ((AllocDotField) node).getBase().getMethod();
}
if (sm != null && func2int.containsKey(sm)) {
int id = func2int.get(sm);
if (vis_cg[id] == 0) {
ret = Constants.UNKNOWN_FUNCTION;
} else {
ret = id;
}
}
return ret;
}
/**
* Transform the SPARK node @param v representation to our representation.
*/
public IVarAbstraction makeInternalNode(Node v) {
IVarAbstraction ret = consG.get(v);
if (ret == null) {
ret = nodeGenerator.generateNode(v);
consG.put(v, ret);
}
return ret;
}
/**
* Find our representation for the SPARK node @param v. We don't create a new node if nothing found.
*/
public IVarAbstraction findInternalNode(Node v) {
return consG.get(v);
}
/**
* Type compatibility test.
*
* @param src
* @param dst
*/
public boolean castNeverFails(Type src, Type dst) {
return typeManager.castNeverFails(src, dst);
}
/**
* Get the number of valid pointers currently reachable by geomPTA.
*/
public int getNumberOfPointers() {
return pointers.size();
}
/**
* Get the number of valid objects current in the container.
*
* @return
*/
public int getNumberOfObjects() {
return allocations.size();
}
/**
* Return the number of functions that are reachable by SPARK.
*/
public int getNumberOfSparkMethods() {
return n_func;
}
/**
* Return the number of functions that are reachable after the geometric points-to analysis.
*/
public int getNumberOfMethods() {
return n_reach_methods;
}
public IWorklist getWorklist() {
return worklist;
}
/**
* Obtain the internal representation of an object field.
*/
public IVarAbstraction findInstanceField(AllocNode obj, SparkField field) {
AllocDotField af = findAllocDotField(obj, field);
return consG.get(af);
}
/**
* Obtain or create an internal representation of an object field.
*/
public IVarAbstraction findAndInsertInstanceField(AllocNode obj, SparkField field) {
AllocDotField af = findAllocDotField(obj, field);
IVarAbstraction pn = null;
if (af == null) {
// We create a new instance field node w.r.t type compatiblity
Type decType = ((SootField) field).getDeclaringClass().getType();
Type baseType = obj.getType();
// baseType must be a sub type of decType
if (typeManager.castNeverFails(baseType, decType)) {
af = makeAllocDotField(obj, field);
pn = makeInternalNode(af);
pointers.add(pn);
}
} else {
pn = consG.get(af);
}
return pn;
}
/**
* Obtain the edge representation internal to geomPTA.
*/
public CgEdge getInternalEdgeFromSootEdge(Edge e) {
return edgeMapping.get(e);
}
public boolean isExceptionPointer(Node v) {
if (v.getType() instanceof RefType) {
SootClass sc = ((RefType) v.getType()).getSootClass();
if (!sc.isInterface()
&& Scene.v().getActiveHierarchy().isClassSubclassOfIncluding(sc, Constants.exeception_type.getSootClass())) {
return true;
}
}
return false;
}
/**
* Given a valid SPARK node, we test if it is still valid after the geometric analysis.
*/
public boolean isValidGeometricNode(Node sparkNode) {
IVarAbstraction pNode = consG.get(sparkNode);
return pNode != null && pNode.reachable();
}
/**
* Is this a Spark or Geom?
*
* @return
*/
public boolean hasGeomExecuted() {
return hasExecuted;
}
/**
* Create all output files under the uniform location.
*
* @param file_name
* @return
* @throws FileNotFoundException
*/
public FileOutputStream createOutputFile(String file_name) throws FileNotFoundException {
return new FileOutputStream(new File(dump_dir, file_name));
}
// --------------------------------------------------------------------------------------------------------
// -------------------------------Soot Standard Points-to Query
// Interface----------------------------------
// --------------------------------------------------------------------------------------------------------
private PointsToSetInternal field_p2set(PointsToSet s, final SparkField f) {
if (!(s instanceof PointsToSetInternal)) {
throw new RuntimeException("Base pointers must be stored in *PointsToSetInternal*.");
}
PointsToSetInternal bases = (PointsToSetInternal) s;
final PointsToSetInternal ret = getSetFactory().newSet(f.getType(), this);
bases.forall(new P2SetVisitor() {
public final void visit(Node n) {
Node nDotF = ((AllocNode) n).dot(f);
if (nDotF != null) {
// nDotF.getP2Set() has been discarded in solve()
IVarAbstraction pn = consG.get(nDotF);
if (pn == null || hasTransformed || nDotF.getP2Set() != EmptyPointsToSet.v()) {
ret.addAll(nDotF.getP2Set(), null);
return;
}
pn = pn.getRepresentative();
// PointsToSetInternal ptSet = nDotF.makeP2Set();
for (AllocNode obj : pn.get_all_points_to_objects()) {
ret.add(obj);
// ptSet.add(obj);
}
}
}
});
return ret;
}
@Override
public PointsToSet reachingObjects(Local l) {
if (!hasExecuted) {
return super.reachingObjects(l);
}
LocalVarNode vn = findLocalVarNode(l);
if (vn == null) {
return EmptyPointsToSet.v();
}
IVarAbstraction pn = consG.get(vn);
// In case this pointer has no geomPTA result
// This is perhaps a bug
if (pn == null) {
return vn.getP2Set();
}
// Return the cached result
if (hasTransformed || vn.getP2Set() != EmptyPointsToSet.v()) {
return vn.getP2Set();
}
// Compute and cache the result
pn = pn.getRepresentative();
PointsToSetInternal ptSet = vn.makeP2Set();
for (AllocNode obj : pn.get_all_points_to_objects()) {
ptSet.add(obj);
}
return ptSet;
}
/*
* Currently, we only accept one call unit context (1CFA). For querying K-CFA (K >1), please see
* GeomQueries.contextsByCallChain
*/
@Override
public PointsToSet reachingObjects(Context c, Local l) {
if (!hasExecuted) {
return super.reachingObjects(c, l);
}
if (hasTransformed || !(c instanceof Unit)) {
return reachingObjects(l);
}
LocalVarNode vn = findLocalVarNode(l);
if (vn == null) {
return EmptyPointsToSet.v();
}
// Lookup the context sensitive points-to information for this pointer
IVarAbstraction pn = consG.get(vn);
if (pn == null) {
return vn.getP2Set();
}
pn = pn.getRepresentative();
// Obtain the context sensitive points-to result
SootMethod callee = vn.getMethod();
Edge e = Scene.v().getCallGraph().findEdge((Unit) c, callee);
if (e == null) {
return vn.getP2Set();
}
// Compute the contexts interval
CgEdge myEdge = getInternalEdgeFromSootEdge(e);
if (myEdge == null) {
return vn.getP2Set();
}
long low = myEdge.map_offset;
long high = low + max_context_size_block[myEdge.s];
// Lookup the cache
ContextVarNode cvn = vn.context(c);
if (cvn != null) {
PointsToSetInternal ans = cvn.getP2Set();
if (ans != EmptyPointsToSet.v()) {
return ans;
}
} else {
// Create a new context sensitive variable
// The points-to vector is set to empty at start
cvn = makeContextVarNode(vn, c);
}
// Fill
PointsToSetInternal ptset = cvn.makeP2Set();
for (AllocNode an : pn.get_all_points_to_objects()) {
if (pn.pointer_interval_points_to(low, high, an)) {
ptset.add(an);
}
}
return ptset;
}
@Override
public PointsToSet reachingObjects(SootField f) {
if (!hasExecuted) {
return super.reachingObjects(f);
}
if (!f.isStatic()) {
throw new RuntimeException("The parameter f must be a *static* field.");
}
VarNode vn = findGlobalVarNode(f);
if (vn == null) {
return EmptyPointsToSet.v();
}
IVarAbstraction pn = consG.get(vn);
if (pn == null) {
return vn.getP2Set();
}
// Lookup the cache
if (hasTransformed || vn.getP2Set() != EmptyPointsToSet.v()) {
return vn.getP2Set();
}
// We transform and cache the result for the next query
pn = pn.getRepresentative();
PointsToSetInternal ptSet = vn.makeP2Set();
for (AllocNode obj : pn.getRepresentative().get_all_points_to_objects()) {
ptSet.add(obj);
}
return ptSet;
}
@Override
public PointsToSet reachingObjects(PointsToSet s, final SootField f) {
if (!hasExecuted) {
return super.reachingObjects(s, f);
}
return field_p2set(s, f);
}
@Override
public PointsToSet reachingObjects(Local l, SootField f) {
if (!hasExecuted) {
return super.reachingObjects(l, f);
}
return reachingObjects(reachingObjects(l), f);
}
@Override
public PointsToSet reachingObjects(Context c, Local l, SootField f) {
if (!hasExecuted) {
return super.reachingObjects(c, l, f);
}
return reachingObjects(reachingObjects(c, l), f);
}
@Override
public PointsToSet reachingObjectsOfArrayElement(PointsToSet s) {
if (!hasExecuted) {
return super.reachingObjectsOfArrayElement(s);
}
return field_p2set(s, ArrayElement.v());
}
// An extra query interfaces not provided by SPARK
public PointsToSet reachingObjects(AllocNode an, SootField f) {
AllocDotField adf = an.dot(f);
IVarAbstraction pn = consG.get(adf);
// No such pointer seen by SPARK
if (adf == null) {
return EmptyPointsToSet.v();
}
// Not seen by geomPTA
if (pn == null) {
return adf.getP2Set();
}
if (hasTransformed || adf.getP2Set() != EmptyPointsToSet.v()) {
return adf.getP2Set();
}
// We transform and cache the result for the next query
pn = pn.getRepresentative();
PointsToSetInternal ptSet = adf.makeP2Set();
for (AllocNode obj : pn.getRepresentative().get_all_points_to_objects()) {
ptSet.add(obj);
}
return ptSet;
}
}
| 59,576
| 29.119818
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/GeomQueries.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2013 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Queue;
import soot.Local;
import soot.PointsToSet;
import soot.SootMethod;
import soot.jimple.spark.geom.dataMgr.ContextsCollector;
import soot.jimple.spark.geom.dataMgr.Obj_full_extractor;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.dataRep.IntervalContextVar;
import soot.jimple.spark.geom.dataRep.SimpleInterval;
import soot.jimple.spark.pag.AllocDotField;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.SparkField;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.toolkits.callgraph.Edge;
/**
* A powerful interface for querying points-to results in many ways. It is an extension for SPARK standard querying system.
*
* @author xiao
*
*/
public class GeomQueries {
protected GeomPointsTo geomPTA = null;
// Call graph information
protected int n_func;
// A reduced call graph that does not have SCC internal edges
protected CgEdge call_graph[];
// Basic call graph info copied from geomPTA
protected int vis_cg[], rep_cg[], scc_size[];
protected int block_num[];
protected long max_context_size_block[];
// Topological order of the call graph SCC representative nodes
protected int top_rank[];
// Temporary data structures reused across queries
private boolean prop_initialized = false;
private Queue<Integer> topQ;
private int in_degree[];
private ContextsCollector[] contextsForMethods;
/**
* We copy and make a condensed version of call graph.
*
* @param geom_pts
*/
public GeomQueries(GeomPointsTo geom_pta) {
geomPTA = geom_pta;
n_func = geomPTA.n_func;
vis_cg = geomPTA.vis_cg;
rep_cg = geomPTA.rep_cg;
scc_size = geomPTA.scc_size;
block_num = geomPTA.block_num;
max_context_size_block = geomPTA.max_context_size_block;
// Initialize an empty call graph
call_graph = new CgEdge[n_func];
Arrays.fill(call_graph, null);
// We duplicate a call graph without SCC edges
in_degree = new int[n_func];
Arrays.fill(in_degree, 0);
CgEdge[] raw_call_graph = geomPTA.call_graph;
for (int i = 0; i < n_func; ++i) {
if (vis_cg[i] == 0) {
continue;
}
CgEdge p = raw_call_graph[i];
int rep = rep_cg[i];
while (p != null) {
// To speedup context searching, SCC edges are all removed
if (p.scc_edge == false) {
CgEdge q = p.duplicate();
// The non-SCC edge is attached to the SCC representative
q.next = call_graph[rep];
call_graph[rep] = q;
in_degree[rep_cg[q.t]]++;
}
p = p.next;
}
}
// We also add the edges dropped in the last round of geomPTA
// The are needed because the contexts mapping are built with them
// for (CgEdge p : geomPts.obsoletedEdges) {
// if ( p.scc_edge == true )
// continue;
//
// // The non-SCC edge is attached to the SCC representative
// int s = rep_cg[p.s];
//
// if ( vis_cg[s] != 0 ) {
// CgEdge q = p.duplicate();
// q.next = call_graph[s];
// call_graph[s] = q;
// in_degree[ rep_cg[q.t] ]++;
// }
// }
}
/**
* Only needed by part of the queries. Therefore, it is called on demand.
*/
private void prepareIntervalPropagations() {
if (prop_initialized) {
return;
}
// We layout the nodes hierarchically by topological sorting
// The topological labels are used for speeding up reachability
top_rank = new int[n_func];
Arrays.fill(top_rank, 0);
topQ = new LinkedList<Integer>();
topQ.add(Constants.SUPER_MAIN);
while (!topQ.isEmpty()) {
int s = topQ.poll();
CgEdge p = call_graph[s];
while (p != null) {
int t = p.t;
int rep_t = rep_cg[t];
int w = top_rank[s] + 1;
if (top_rank[rep_t] < w) {
top_rank[rep_t] = w;
}
if (--in_degree[rep_t] == 0) {
topQ.add(rep_t);
}
p = p.next;
}
}
// Prepare for querying artifacts
contextsForMethods = new ContextsCollector[n_func];
for (int i = 0; i < n_func; ++i) {
ContextsCollector cc = new ContextsCollector();
cc.setBudget(Parameters.qryBudgetSize);
contextsForMethods[i] = cc;
}
prop_initialized = true;
}
/**
* Retrieve the subgraph from s->target. An edge s->t is included in the subgraph iff target is reachable from t.
*
* @param s
* @param target
* @return
*/
protected boolean dfsScanSubgraph(int s, int target) {
int rep_s = rep_cg[s];
int rep_target = rep_cg[target];
if (rep_s == rep_target) {
return true;
}
s = rep_s;
boolean reachable = false;
// We only traverse the SCC representatives
CgEdge p = call_graph[s];
while (p != null) {
int t = p.t;
int rep_t = rep_cg[t];
if (in_degree[rep_t] != 0 || (top_rank[rep_t] <= top_rank[rep_target] && dfsScanSubgraph(t, target) == true)) {
in_degree[rep_t]++;
reachable = true;
}
p = p.next;
}
return reachable;
}
protected void transferInSCC(int s, int t, long L, long R, ContextsCollector tContexts) {
if (s == t) {
if (scc_size[s] == 1) {
/*
* If s is not a member of mutually recursive call SCC, it's unnecessary to pollute all blocks of t.
*/
tContexts.insert(L, R);
return;
}
}
/*
* We assume all blocks of target method are reachable for soundness and for simplicity.
*/
int n_blocks = block_num[t];
long block_size = max_context_size_block[rep_cg[s]];
// Compute the offset to the nearest context block for s
// We use (L - 1) because the contexts are numbered from 1
long offset = (L - 1) % block_size;
long ctxtLength = R - L;
long block_offset = 0;
long lEnd, rEnd;
// We iterate all blocks of target method
for (int i = 0; i < n_blocks; ++i) {
lEnd = 1 + offset + block_offset;
rEnd = lEnd + ctxtLength;
tContexts.insert(lEnd, rEnd);
block_offset += block_size;
}
}
/**
* Compute the mapping from interval [L, R) of method start to the intervals of method target. Return true if the mapping
* is feasible.
*
* @param start
* @param L
* @param R
* @param target
* @return
*/
protected boolean propagateIntervals(int start, long L, long R, int target) {
// We first identify the subgraph, where all edges in the subgraph lead to the target
if (!dfsScanSubgraph(start, target)) {
return false;
}
// Now we prepare for iteration
int rep_start = rep_cg[start];
int rep_target = rep_cg[target];
ContextsCollector targetContexts = contextsForMethods[target];
if (rep_start == rep_target) {
// Fast path for the special case
transferInSCC(start, target, L, R, targetContexts);
} else {
// We start traversal from the representative method
transferInSCC(start, rep_start, L, R, contextsForMethods[rep_start]);
// Start topsort
topQ.clear();
topQ.add(rep_start);
while (!topQ.isEmpty()) {
// Every function in the queue is representative function
int s = topQ.poll();
ContextsCollector sContexts = contextsForMethods[s];
// Loop over the edges
CgEdge p = call_graph[s];
while (p != null) {
int t = p.t;
int rep_t = rep_cg[t];
if (in_degree[rep_t] != 0) {
// This node has a path to target
ContextsCollector reptContexts = contextsForMethods[rep_t];
long block_size = max_context_size_block[s];
for (SimpleInterval si : sContexts.bars) {
// Compute the offset within the block for si
long in_block_offset = (si.L - 1) % block_size;
long newL = p.map_offset + in_block_offset;
long newR = si.R - si.L + newL;
if (rep_t == rep_target) {
// t and target are in the same SCC
// We directly transfer this context interval to target
transferInSCC(t, target, newL, newR, targetContexts);
} else {
// We transfer this interval to its SCC representative
// It might be t == rep_t
transferInSCC(t, rep_t, newL, newR, reptContexts);
}
}
if (--in_degree[rep_t] == 0 && rep_t != rep_target) {
topQ.add(rep_t);
}
}
p = p.next;
}
sContexts.clear();
}
}
return true;
}
/**
* Answer contexts-go-by query.
*
* Usually, users specify the last K paths as the context. We call it k-CFA context. However, k-CFA is too restrictive. In
* contexts-go-by query, user specifies arbitrary call edge in the call graph. The query searches for all contexts induced
* by the specified call edge and collect points-to results under these contexts.
*
* @param sootEdge:
* the specified context edge in soot edge format
* @param l:
* the querying pointer
* @param visitor:
* container for querying result
* @return false, l does not have points-to information under the contexts induced by the given call edge
*/
@SuppressWarnings("rawtypes")
public boolean contextsGoBy(Edge sootEdge, Local l, PtSensVisitor visitor) {
// Obtain the internal representation of specified context
CgEdge ctxt = geomPTA.getInternalEdgeFromSootEdge(sootEdge);
if (ctxt == null || ctxt.is_obsoleted == true) {
return false;
}
// Obtain the internal representation for querying pointer
LocalVarNode vn = geomPTA.findLocalVarNode(l);
if (vn == null) {
// Normally this could not happen, perhaps it's a bug
return false;
}
IVarAbstraction pn = geomPTA.findInternalNode(vn);
if (pn == null) {
// This pointer is no longer reachable
return false;
}
pn = pn.getRepresentative();
if (!pn.hasPTResult()) {
return false;
}
// Obtain the internal representation of the method that encloses the querying pointer
SootMethod sm = vn.getMethod();
int target = geomPTA.getIDFromSootMethod(sm);
if (target == -1) {
return false;
}
// Start call graph traversal
long L = ctxt.map_offset;
long R = L + max_context_size_block[rep_cg[ctxt.s]];
assert L < R;
visitor.prepare();
prepareIntervalPropagations();
if (propagateIntervals(ctxt.t, L, R, target)) {
// We calculate the points-to results
ContextsCollector targetContexts = contextsForMethods[target];
for (SimpleInterval si : targetContexts.bars) {
assert si.L < si.R;
pn.get_all_context_sensitive_objects(si.L, si.R, visitor);
}
// Reset
targetContexts.clear();
}
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
@Deprecated
@SuppressWarnings("rawtypes")
public boolean contexsByAnyCallEdge(Edge sootEdge, Local l, PtSensVisitor visitor) {
return contextsGoBy(sootEdge, l, visitor);
}
/**
* Searching the points-to results for field expression such as p.f.
*
* @param sootEdge
* @param l
* @param field
* @param visitor
* @return
*/
@SuppressWarnings("rawtypes")
public boolean contextsGoBy(Edge sootEdge, Local l, SparkField field, PtSensVisitor visitor) {
Obj_full_extractor pts_l = new Obj_full_extractor();
if (contextsGoBy(sootEdge, l, pts_l) == false) {
return false;
}
visitor.prepare();
for (IntervalContextVar icv : pts_l.outList) {
AllocNode obj = (AllocNode) icv.var;
AllocDotField obj_f = geomPTA.findAllocDotField(obj, field);
if (obj_f == null) {
continue;
}
IVarAbstraction objField = geomPTA.findInternalNode(obj_f);
if (objField == null) {
continue;
}
long L = icv.L;
long R = icv.R;
assert L < R;
objField.get_all_context_sensitive_objects(L, R, visitor);
}
pts_l = null;
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
@Deprecated
@SuppressWarnings("rawtypes")
public boolean contextsByAnyCallEdge(Edge sootEdge, Local l, SparkField field, PtSensVisitor visitor) {
return contextsGoBy(sootEdge, l, visitor);
}
/**
* Standard K-CFA querying for arbitrary K.
*
* @param callEdgeChain:
* last K call edges leading to the method that contains l. callEdgeChain[0] is the farthest call edge in the
* chain.
* @param l:
* the querying pointer
* @param visitor:
* the querying result container
* @return false, l does not have points-to information under the given context
*/
@SuppressWarnings("rawtypes")
public boolean kCFA(Edge[] callEdgeChain, Local l, PtSensVisitor visitor) {
// Prepare for initial contexts
SootMethod firstMethod = callEdgeChain[0].src();
int firstMethodID = geomPTA.getIDFromSootMethod(firstMethod);
if (firstMethodID == -1) {
return false;
}
// Obtain the internal representation for querying pointer
LocalVarNode vn = geomPTA.findLocalVarNode(l);
if (vn == null) {
// Normally this could not happen, perhaps it's a bug
return false;
}
IVarAbstraction pn = geomPTA.findInternalNode(vn);
if (pn == null) {
// This pointer is no longer reachable
return false;
}
pn = pn.getRepresentative();
if (!pn.hasPTResult()) {
return false;
}
SootMethod sm = vn.getMethod();
if (geomPTA.getIDFromSootMethod(sm) == -1) {
return false;
}
// Iterate the call edges and compute the contexts mapping iteratively
visitor.prepare();
long L = 1;
for (int i = 0; i < callEdgeChain.length; ++i) {
Edge sootEdge = callEdgeChain[i];
CgEdge ctxt = geomPTA.getInternalEdgeFromSootEdge(sootEdge);
if (ctxt == null || ctxt.is_obsoleted == true) {
return false;
}
// Following searching procedure works for both methods in SCC and out of SCC
// with blocking scheme or without blocking scheme
int caller = geomPTA.getIDFromSootMethod(sootEdge.src());
// We obtain the block that contains current offset L
long block_size = max_context_size_block[rep_cg[caller]];
long in_block_offset = (L - 1) % block_size;
// Transfer to the target block with the same in-block offset
L = ctxt.map_offset + in_block_offset;
}
long ctxtLength = max_context_size_block[rep_cg[firstMethodID]];
long R = L + ctxtLength;
pn.get_all_context_sensitive_objects(L, R, visitor);
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
@Deprecated
@SuppressWarnings("rawtypes")
public boolean contextsByCallChain(Edge[] callEdgeChain, Local l, PtSensVisitor visitor) {
return kCFA(callEdgeChain, l, visitor);
}
/**
* Standard K-CFA querying for field expression.
*
* @param callEdgeChain:
* callEdgeChain[0] is the farthest call edge in the chain.
* @param l
* @param field
* @param visitor
* @return
*/
@SuppressWarnings("rawtypes")
public boolean kCFA(Edge[] callEdgeChain, Local l, SparkField field, PtSensVisitor visitor) {
// We first obtain the points-to information for l
Obj_full_extractor pts_l = new Obj_full_extractor();
if (kCFA(callEdgeChain, l, pts_l) == false) {
return false;
}
// We compute the points-to information for l.field
visitor.prepare();
for (IntervalContextVar icv : pts_l.outList) {
AllocNode obj = (AllocNode) icv.var;
AllocDotField obj_f = geomPTA.findAllocDotField(obj, field);
if (obj_f == null) {
continue;
}
IVarAbstraction objField = geomPTA.findInternalNode(obj_f);
if (objField == null) {
continue;
}
long L = icv.L;
long R = icv.R;
assert L < R;
objField.get_all_context_sensitive_objects(L, R, visitor);
}
pts_l = null;
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
@Deprecated
@SuppressWarnings("rawtypes")
public boolean contextsByCallChain(Edge[] callEdgeChain, Local l, SparkField field, PtSensVisitor visitor) {
return kCFA(callEdgeChain, l, field, visitor);
}
/**
* Are the two pointers an alias with context insensitive points-to information?
*/
public boolean isAliasCI(Local l1, Local l2) {
PointsToSet pts1 = geomPTA.reachingObjects(l1);
PointsToSet pts2 = geomPTA.reachingObjects(l2);
return pts1.hasNonEmptyIntersection(pts2);
}
/**
* Test if two pointers given in geomPTA form are an alias under any contexts.
*
* @param pn1
* and @param pn2 cannot be null.
*/
public boolean isAlias(IVarAbstraction pn1, IVarAbstraction pn2) {
pn1 = pn1.getRepresentative();
pn2 = pn2.getRepresentative();
if (!pn1.hasPTResult() || !pn2.hasPTResult()) {
VarNode vn1 = (VarNode) pn1.getWrappedNode();
VarNode vn2 = (VarNode) pn2.getWrappedNode();
return isAliasCI((Local) vn1.getVariable(), (Local) vn2.getVariable());
}
return pn1.heap_sensitive_intersection(pn2);
}
/**
* Decide if under any contexts, pointers @param l1 and @param l2 can be an alias.
*/
public boolean isAlias(Local l1, Local l2) {
// Obtain the internal representation for querying pointers
LocalVarNode vn1 = geomPTA.findLocalVarNode(l1);
LocalVarNode vn2 = geomPTA.findLocalVarNode(l2);
if (vn1 == null || vn2 == null) {
// Normally this could not happen, perhaps it's a bug
return false;
}
IVarAbstraction pn1 = geomPTA.findInternalNode(vn1);
IVarAbstraction pn2 = geomPTA.findInternalNode(vn2);
if (pn1 == null || pn2 == null) {
return isAliasCI(l1, l2);
}
pn1 = pn1.getRepresentative();
pn2 = pn2.getRepresentative();
if (!pn1.hasPTResult() || !pn2.hasPTResult()) {
return isAliasCI(l1, l2);
}
return pn1.heap_sensitive_intersection(pn2);
}
}
| 19,120
| 29.017268
| 124
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/IEncodingBroker.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.Node;
/**
* An abstract class for hiding different encoding methods, e.g. Geom, HeapIns, PtIns.
*
* @author xiao
*
*/
public abstract class IEncodingBroker {
/**
* Generate a node of proper kind.
*
* @param v
* @return
*/
public abstract IVarAbstraction generateNode(Node v);
/**
* Build the initial encoding of the pointer assignments and points-to facts.
*/
public abstract void initFlowGraph(GeomPointsTo ptAnalyzer);
/**
* Return the signature of the implemented sub-class, may be useful in somewhere.
*
* @return
*/
public abstract String getSignature();
}
| 1,484
| 26.5
| 86
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/IFigureManager.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
/**
* An interface to standardize the functionality of a figure manager.
*
* @author xiao
*
*/
public abstract class IFigureManager {
// We implement an internal memory manager here
private static SegmentNode segHeader = null;
private static SegmentNode rectHeader = null;
/**
* Generate a segment node from our own cache.
*
* @return
*/
protected static SegmentNode getSegmentNode() {
SegmentNode ret = null;
if (segHeader != null) {
ret = segHeader;
segHeader = ret.next;
ret.next = null;
ret.is_new = true;
} else {
ret = new SegmentNode();
}
return ret;
}
/**
* Generate a rectangle node from our own cache.
*
* @return
*/
protected static RectangleNode getRectangleNode() {
RectangleNode ret = null;
if (rectHeader != null) {
ret = (RectangleNode) rectHeader;
rectHeader = ret.next;
ret.next = null;
ret.is_new = true;
} else {
ret = new RectangleNode();
}
return ret;
}
/**
* Return the segment node to cache.
*
* @param p
* @return
*/
protected static SegmentNode reclaimSegmentNode(SegmentNode p) {
SegmentNode q = p.next;
p.next = segHeader;
segHeader = p;
return q;
}
/**
* Return the rectangle node to cache.
*
* @param p
* @return
*/
protected static SegmentNode reclaimRectangleNode(SegmentNode p) {
SegmentNode q = p.next;
p.next = rectHeader;
rectHeader = p;
return q;
}
/**
* We return the cached memory to garbage collector.
*/
public static void cleanCache() {
segHeader = null;
rectHeader = null;
}
// Get the information of the figures
public abstract SegmentNode[] getFigures();
public abstract int[] getSizes();
public abstract boolean isThereUnprocessedFigures();
public abstract void flush();
// Deal with the figures
public abstract SegmentNode addNewFigure(int code, RectangleNode pnew);
public abstract void mergeFigures(int size);
public abstract void removeUselessSegments();
}
| 3,030
| 22.315385
| 73
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/IVarAbstraction.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.Set;
import soot.SootMethod;
import soot.Type;
import soot.jimple.spark.geom.dataMgr.Obj_full_extractor;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.util.Numberable;
/**
* Pointer/object representation in geomPTA. This interface defines the operations needed for manipulating a pointer/object.
*
* @author xiao
*
*/
public abstract class IVarAbstraction implements Numberable {
// A shape manager that has only one all map to all member, representing the context insensitive points-to info
protected static IFigureManager stubManager = null;
// This is used to indicate the corresponding object should be removed
protected static IFigureManager deadManager = null;
// A temporary rectangle holds the candidate figure
protected static RectangleNode pres = null;
// Corresponding SPARK node
public Node me;
// The integer mapping for this node
public int id = -1;
// Position in the queue
public int Qpos = 0;
// Will we update the points-to information for this node in the geometric analysis?
// Because of constraints distillation, not all the pointers will be updated.
public boolean willUpdate = false;
// top_value: the topological value for this node on the symbolic assignment graph
// lrf_value: the number of processing times for this pointer
// top_value will be modified in the offlineProcessor and every pointer has a different value
public int top_value = 1, lrf_value = 0;
// union-find tree link
protected IVarAbstraction parent;
public IVarAbstraction() {
parent = this;
}
/**
* Used by ordering the nodes in priority worklist.
*/
public boolean lessThan(IVarAbstraction other) {
if (lrf_value != other.lrf_value) {
return lrf_value < other.lrf_value;
}
return top_value < other.top_value;
}
public IVarAbstraction getRepresentative() {
return parent == this ? this : (parent = parent.getRepresentative());
}
/**
* Make the variable other be the parent of this variable.
*
* @param other
* @return
*/
public IVarAbstraction merge(IVarAbstraction other) {
getRepresentative();
parent = other.getRepresentative();
return parent;
}
@Override
public void setNumber(int number) {
id = number;
}
@Override
public int getNumber() {
return id;
}
@Override
public String toString() {
if (me != null) {
return me.toString();
}
return super.toString();
}
/**
* This pointer/object is reachable if its enclosing method is reachable. Pleas always call this method to check the status
* before querying points-to information.
*/
public boolean reachable() {
return id != -1;
}
/**
* Test if this pointer currently has points-to result. The result can be computed in the last iteration of geomPTA,
* although its willUpdate = false this round.
*/
public boolean hasPTResult() {
return num_of_diff_objs() != -1;
}
/**
* Processing the wrapped SPARK node.
*/
public Node getWrappedNode() {
return me;
}
public Type getType() {
return me.getType();
}
public boolean isLocalPointer() {
return me instanceof LocalVarNode;
}
public SootMethod enclosingMethod() {
if (me instanceof LocalVarNode) {
return ((LocalVarNode) me).getMethod();
}
return null;
}
// Initiation
public abstract boolean add_points_to_3(AllocNode obj, long I1, long I2, long L);
public abstract boolean add_points_to_4(AllocNode obj, long I1, long I2, long L1, long L2);
public abstract boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L);
public abstract boolean add_simple_constraint_4(IVarAbstraction qv, long I1, long I2, long L1, long L2);
public abstract void put_complex_constraint(PlainConstraint cons);
public abstract void reconstruct();
// Points-to facts propagation
public abstract void do_before_propagation();
public abstract void do_after_propagation();
public abstract void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist);
// Manipulate points-to results
public abstract void drop_duplicates();
public abstract void remove_points_to(AllocNode obj);
public abstract void deleteAll();
public abstract void keepPointsToOnly();
public abstract void injectPts();
// Obtaining points-to information statistics
/**
* Return -1 if this pointer does not have points-to information. This function can be used for testing if the pointer has
* been processed by geomPTA.
*/
public abstract int num_of_diff_objs();
public abstract int num_of_diff_edges();
public abstract int count_pts_intervals(AllocNode obj);
public abstract int count_new_pts_intervals();
public abstract int count_flow_intervals(IVarAbstraction qv);
// Querying procedures
/**
* Perform context sensitive alias checking with qv.
*
* @param qv
* @return
*/
public abstract boolean heap_sensitive_intersection(IVarAbstraction qv);
/**
* Test if the pointer in the context range [l, R) points to object obj.
*
* @param l
* @param r
* @param obj
* @return
*/
public abstract boolean pointer_interval_points_to(long l, long r, AllocNode obj);
/**
* Test if the particular object has been obsoleted. It's mainly for points-to developer use.
*
* @param obj
* @return
*/
public abstract boolean isDeadObject(AllocNode obj);
/**
* Obtain context insensitive points-to result (by removing contexts).
*
* @return
*/
public abstract Set<AllocNode> get_all_points_to_objects();
/**
* Given the pointers falling in the context range [l, r), we compute the set of context sensitive objects pointed to by
* those pointers. This function is designed in visitor pattern.
*
* @see Obj_1cfa_extractor
* @see Obj_full_extractor
*/
public abstract void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor);
// Debugging facilities
public abstract void print_context_sensitive_points_to(PrintStream outPrintStream);
}
| 7,185
| 27.975806
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/IWorklist.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* The worklist interface that abstracts the selection strategy.
*
* @author xiao
*
*/
public interface IWorklist {
/**
* Some worklist may need the initial capacity.
*
* @param size
* @return
*/
public void initialize(int size);
public boolean has_job();
public IVarAbstraction next();
public void push(IVarAbstraction p);
public int size();
public void clear();
};
| 1,248
| 23.98
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/OfflineProcessor.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import soot.SootClass;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.Stmt;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.utils.ZArrayNumberer;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.GlobalVarNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.SparkField;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.P2SetVisitor;
/**
* Implementation of pre-processing algorithms performed prior to the pointer analysis.
*
* Currently supported techniques are: 1. Pointer distillation: the library code that does not impact the application code
* pointers is removed; 2. Pointer ranking for worklist prioritizing.
*
* @author xiao
*
*/
public class OfflineProcessor {
class off_graph_edge {
// Start and end of this edge
int s, t;
// If this edge is created via complex constraint (e.g. p.f = q), base_var = p
IVarAbstraction base_var;
off_graph_edge next;
}
// Used in anonymous class visitor
private boolean visitedFlag;
GeomPointsTo geomPTA;
ZArrayNumberer<IVarAbstraction> int2var;
ArrayList<off_graph_edge> varGraph;
int pre[], low[], count[], rep[], repsize[];
Deque<Integer> queue;
int pre_cnt;
int n_var;
public OfflineProcessor(GeomPointsTo pta) {
int2var = pta.pointers;
int size = int2var.size();
varGraph = new ArrayList<off_graph_edge>(size);
queue = new LinkedList<Integer>();
pre = new int[size];
low = new int[size];
count = new int[size];
rep = new int[size];
repsize = new int[size];
geomPTA = pta;
for (int i = 0; i < size; ++i) {
varGraph.add(null);
}
}
/**
* Call it before running the optimizations.
*/
public void init() {
// We prepare the essential data structures first
// The size of the pointers may shrink after each round of analysis
n_var = int2var.size();
for (int i = 0; i < n_var; ++i) {
varGraph.set(i, null);
int2var.get(i).willUpdate = false;
}
}
public void defaultFeedPtsRoutines() {
switch (Parameters.seedPts) {
case Constants.seedPts_allUser:
setAllUserCodeVariablesUseful();
break;
case Constants.seedPts_all:
// All pointers will be processed
for (int i = 0; i < n_var; ++i) {
IVarAbstraction pn = int2var.get(i);
if (pn != null && pn.getRepresentative() == pn) {
pn.willUpdate = true;
}
}
return;
}
// We always refine the callsites that have multiple call targets
Set<Node> multiBaseptrs = new HashSet<Node>();
for (Stmt callsite : geomPTA.multiCallsites) {
InstanceInvokeExpr iie = (InstanceInvokeExpr) callsite.getInvokeExpr();
VarNode vn = geomPTA.findLocalVarNode(iie.getBase());
multiBaseptrs.add(vn);
}
addUserDefPts(multiBaseptrs);
}
/**
* Compute the refined points-to results for specified pointers.
*
* @param initVars
*/
public void addUserDefPts(Set<Node> initVars) {
for (Node vn : initVars) {
IVarAbstraction pn = geomPTA.findInternalNode(vn);
if (pn == null) {
// I don't know where is this pointer
continue;
}
pn = pn.getRepresentative();
if (pn.reachable()) {
pn.willUpdate = true;
}
}
}
public void releaseSparkMem() {
for (int i = 0; i < n_var; ++i) {
IVarAbstraction pn = int2var.get(i);
// Keep only the points-to results for representatives
if (pn != pn.getRepresentative()) {
continue;
}
if (pn.willUpdate) {
Node vn = pn.getWrappedNode();
vn.discardP2Set();
}
}
System.gc();
System.gc();
System.gc();
System.gc();
}
/**
* Preprocess the pointers and constraints before running geomPA.
*
* @param useSpark
* @param multiCallsites
*/
public void runOptimizations() {
/*
* Optimizations based on the dependence graph.
*/
buildDependenceGraph();
distillConstraints();
/*
* Optimizations based on the impact graph.
*/
buildImpactGraph();
computeWeightsForPts();
}
public void destroy() {
pre = null;
low = null;
count = null;
rep = null;
repsize = null;
varGraph = null;
queue = null;
}
/**
* The dependence graph reverses the assignment relations. E.g., p = q => p -> q Note that, the assignments that are
* eliminated by local variable merging should be used here. Otherwise, the graph would be erroneously disconnected.
*/
protected void buildDependenceGraph() {
for (PlainConstraint cons : geomPTA.constraints) {
// In our constraint representation, lhs -> rhs means rhs = lhs.
final IVarAbstraction lhs = cons.getLHS();
final IVarAbstraction rhs = cons.getRHS();
final SparkField field = cons.f;
IVarAbstraction rep;
// Now we use this constraint for graph construction
switch (cons.type) {
// rhs = lhs
case Constants.ASSIGN_CONS:
add_graph_edge(rhs.id, lhs.id);
break;
// rhs = lhs.f
case Constants.LOAD_CONS: {
rep = lhs.getRepresentative();
if (rep.hasPTResult() == false) {
lhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false) {
return;
}
off_graph_edge e = add_graph_edge(rhs.id, padf.id);
e.base_var = lhs;
}
});
} else {
// Use geom
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false) {
continue;
}
off_graph_edge e = add_graph_edge(rhs.id, padf.id);
e.base_var = lhs;
}
}
}
break;
// rhs.f = lhs
case Constants.STORE_CONS: {
rep = rhs.getRepresentative();
if (rep.hasPTResult() == false) {
rhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false) {
return;
}
off_graph_edge e = add_graph_edge(padf.id, lhs.id);
e.base_var = rhs;
}
});
} else {
// use geom
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false) {
continue;
}
off_graph_edge e = add_graph_edge(padf.id, lhs.id);
e.base_var = rhs;
}
}
}
break;
}
}
}
/**
* All the pointers that we need their points-to information are marked.
*
* @param virtualBaseSet
*/
protected void setAllUserCodeVariablesUseful() {
for (int i = 0; i < n_var; ++i) {
IVarAbstraction pn = int2var.get(i);
if (pn != pn.getRepresentative()) {
continue;
}
Node node = pn.getWrappedNode();
int sm_id = geomPTA.getMethodIDFromPtr(pn);
if (!geomPTA.isReachableMethod(sm_id)) {
continue;
}
if (node instanceof VarNode) {
// flag == true if node is defined in the Java library
boolean defined_in_lib = false;
if (node instanceof LocalVarNode) {
defined_in_lib = ((LocalVarNode) node).getMethod().isJavaLibraryMethod();
} else if (node instanceof GlobalVarNode) {
SootClass sc = ((GlobalVarNode) node).getDeclaringClass();
if (sc != null) {
defined_in_lib = sc.isJavaLibraryClass();
}
}
if (!defined_in_lib && !geomPTA.isExceptionPointer(node)) {
// Defined in the user code
pn.willUpdate = true;
}
}
}
}
/**
* Compute a set of pointers that required to refine the seed pointers. Prerequisite: dependence graph
*/
protected void computeReachablePts() {
int i;
IVarAbstraction pn;
off_graph_edge p;
// We first collect the initial seeds
queue.clear();
for (i = 0; i < n_var; ++i) {
pn = int2var.get(i);
if (pn.willUpdate == true) {
queue.add(i);
}
}
// Worklist based graph traversal
while (!queue.isEmpty()) {
i = queue.getFirst();
queue.removeFirst();
p = varGraph.get(i);
while (p != null) {
pn = int2var.get(p.t);
if (pn.willUpdate == false) {
pn.willUpdate = true;
queue.add(p.t);
}
pn = p.base_var;
if (pn != null && pn.willUpdate == false) {
pn.willUpdate = true;
queue.add(pn.id);
}
p = p.next;
}
}
}
/**
* Eliminate the constraints that do not contribute points-to information to the seed pointers. Prerequisite: dependence
* graph
*/
protected void distillConstraints() {
IVarAbstraction pn;
// Mark the pointers
computeReachablePts();
// Mark the constraints
for (PlainConstraint cons : geomPTA.constraints) {
// We only look at the receiver pointers
pn = cons.getRHS();
final SparkField field = cons.f;
visitedFlag = false;
switch (cons.type) {
case Constants.NEW_CONS:
case Constants.ASSIGN_CONS:
case Constants.LOAD_CONS:
visitedFlag = pn.willUpdate;
break;
case Constants.STORE_CONS:
/**
* Interesting point in store constraint p.f = q: For example, pts(p) = { o1, o2 }; If any of the o1.f and the o2.f
* (e.g. o1.f) will be updated, this constraint should be kept. However, in the points-to analysis, we only assign
* to o1.f.
*/
pn = pn.getRepresentative();
if (pn.hasPTResult() == false) {
pn.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (visitedFlag) {
return;
}
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false) {
return;
}
visitedFlag |= padf.willUpdate;
}
});
} else {
// Use the geometric points-to result
for (AllocNode o : pn.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false) {
continue;
}
visitedFlag |= padf.willUpdate;
if (visitedFlag) {
break;
}
}
}
break;
}
cons.isActive = visitedFlag;
}
}
/**
* The dependence graph will be destroyed and the impact graph will be built. p = q means q impacts p. Therefore, we add en
* edge q -> p in impact graph.
*/
protected void buildImpactGraph() {
for (int i = 0; i < n_var; ++i) {
varGraph.set(i, null);
}
queue.clear();
for (PlainConstraint cons : geomPTA.constraints) {
if (!cons.isActive) {
continue;
}
final IVarAbstraction lhs = cons.getLHS();
final IVarAbstraction rhs = cons.getRHS();
final SparkField field = cons.f;
IVarAbstraction rep;
switch (cons.type) {
case Constants.NEW_CONS:
// We enqueue the pointers that are allocation result receivers
queue.add(rhs.id);
break;
case Constants.ASSIGN_CONS:
add_graph_edge(lhs.id, rhs.id);
break;
case Constants.LOAD_CONS:
rep = lhs.getRepresentative();
if (rep.hasPTResult() == false) {
lhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false) {
return;
}
add_graph_edge(padf.id, rhs.id);
}
});
} else {
// use geomPA
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false) {
continue;
}
add_graph_edge(padf.id, rhs.id);
}
}
break;
case Constants.STORE_CONS:
rep = rhs.getRepresentative();
if (rep.hasPTResult() == false) {
rhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false) {
return;
}
add_graph_edge(lhs.id, padf.id);
}
});
} else {
// use geomPA
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false) {
continue;
}
add_graph_edge(lhs.id, padf.id);
}
}
break;
}
}
}
/**
* Prepare for a near optimal worklist selection strategy inspired by Ben's PLDI 07 work. Prerequisite: impact graph
*/
protected void computeWeightsForPts() {
int i;
int s, t;
off_graph_edge p;
IVarAbstraction node;
// prepare the data
pre_cnt = 0;
for (i = 0; i < n_var; ++i) {
pre[i] = -1;
count[i] = 0;
rep[i] = i;
repsize[i] = 1;
node = int2var.get(i);
node.top_value = Integer.MIN_VALUE;
}
// perform the SCC identification
for (i = 0; i < n_var; ++i) {
if (pre[i] == -1) {
tarjan_scc(i);
}
}
// In-degree counting
for (i = 0; i < n_var; ++i) {
p = varGraph.get(i);
s = find_parent(i);
while (p != null) {
t = find_parent(p.t);
if (t != s) {
count[t]++;
}
p = p.next;
}
}
// Reconstruct the graph with condensed cycles
for (i = 0; i < n_var; ++i) {
p = varGraph.get(i);
if (p != null && rep[i] != i) {
t = find_parent(i);
while (p.next != null) {
p = p.next;
}
p.next = varGraph.get(t);
varGraph.set(t, varGraph.get(i));
varGraph.set(i, null);
}
}
queue.clear();
for (i = 0; i < n_var; ++i) {
if (rep[i] == i && count[i] == 0) {
queue.addLast(i);
}
}
// Assign the topological value to every node
// We also reserve space for the cycle members, i.e. linearize all the nodes not only the SCCs
i = 0;
while (!queue.isEmpty()) {
s = queue.getFirst();
queue.removeFirst();
node = int2var.get(s);
node.top_value = i;
i += repsize[s];
p = varGraph.get(s);
while (p != null) {
t = find_parent(p.t);
if (t != s) {
if (--count[t] == 0) {
queue.addLast(t);
}
}
p = p.next;
}
}
// Assign the non-representative node with the reserved positions
for (i = n_var - 1; i > -1; --i) {
if (rep[i] != i) {
node = int2var.get(find_parent(i));
IVarAbstraction me = int2var.get(i);
me.top_value = node.top_value + repsize[node.id] - 1;
--repsize[node.id];
}
}
}
private off_graph_edge add_graph_edge(int s, int t) {
off_graph_edge e = new off_graph_edge();
e.s = s;
e.t = t;
e.next = varGraph.get(s);
varGraph.set(s, e);
return e;
}
// Contract the graph
private void tarjan_scc(int s) {
int t;
off_graph_edge p;
pre[s] = low[s] = pre_cnt++;
queue.addLast(s);
p = varGraph.get(s);
while (p != null) {
t = p.t;
if (pre[t] == -1) {
tarjan_scc(t);
}
if (low[t] < low[s]) {
low[s] = low[t];
}
p = p.next;
}
if (low[s] < pre[s]) {
return;
}
int w = s;
do {
t = queue.getLast();
queue.removeLast();
low[t] += n_var;
w = merge_nodes(w, t);
} while (t != s);
}
// Find-union
private int find_parent(int v) {
return v == rep[v] ? v : (rep[v] = find_parent(rep[v]));
}
// Find-union
private int merge_nodes(int v1, int v2) {
v1 = find_parent(v1);
v2 = find_parent(v2);
if (v1 != v2) {
// Select v1 as the representative
if (repsize[v1] < repsize[v2]) {
int t = v1;
v1 = v2;
v2 = t;
}
rep[v2] = v1;
repsize[v1] += repsize[v2];
}
return v1;
}
}
| 18,720
| 26.210756
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/PQ_Worklist.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* The implementation of prioritized worklist. The priority is computed by two parameters: the Topsort order and the least
* recent fired (LRF) timestamp. For two pointers p and q p has higher priority than q iff: 1. topsort_order(p) <
* topsort_order(q) 2. topsort_order(p) == topsort_order(q) && LRF(p) < LRF(q)
*
* @author xiao
*
*/
public class PQ_Worklist implements IWorklist {
private IVarAbstraction[] heap = null;
int cur_tail = 0;
@Override
public void initialize(int size) {
heap = new IVarAbstraction[size];
cur_tail = 1;
}
@Override
public boolean has_job() {
return cur_tail > 1;
}
@Override
public IVarAbstraction next() {
IVarAbstraction ret = heap[1];
--cur_tail;
if (cur_tail > 1) {
IVarAbstraction e = heap[cur_tail];
int k = 1;
while ((k * 2) < cur_tail) {
int kk = k * 2;
if ((kk + 1) < cur_tail && heap[kk + 1].lessThan(heap[kk])) {
kk++;
}
if (e.lessThan(heap[kk])) {
break;
}
heap[k] = heap[kk];
heap[k].Qpos = k;
k = kk;
}
e.Qpos = k;
heap[k] = e;
}
ret.Qpos = 0;
return ret;
}
@Override
public void push(IVarAbstraction e) {
e.lrf_value++;
if (e.Qpos == 0) {
// This element has not been inserted
int k = cur_tail;
while (k > 1) {
int kk = k / 2;
if (heap[kk].lessThan(e)) {
break;
}
heap[k] = heap[kk];
heap[k].Qpos = k;
k /= 2;
}
e.Qpos = k;
heap[k] = e;
++cur_tail;
} else {
// We decrease this element whenever possible
int k = e.Qpos;
while ((k * 2) < cur_tail) {
int kk = k * 2;
if ((kk + 1) < cur_tail && heap[kk + 1].lessThan(heap[kk])) {
kk++;
}
if (e.lessThan(heap[kk])) {
break;
}
heap[k] = heap[kk];
heap[kk].Qpos = k;
k = kk;
}
e.Qpos = k;
heap[k] = e;
}
}
@Override
public int size() {
return cur_tail - 1;
}
@Override
public void clear() {
cur_tail = 1;
}
}
| 3,000
| 22.084615
| 122
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/geomPA/Parameters.java
|
package soot.jimple.spark.geom.geomPA;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 - 2014 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* Parameters to control the behaviors of geom points-to solver.
*
* @author xiao
*
*/
public class Parameters {
// The parameters that are used to tune the precision and performance tradeoff
public static int max_cons_budget = 40;
public static int max_pts_budget = 80;
public static int cg_refine_times = 1;
// Parameters for offline processing
public static int seedPts = Constants.seedPts_allUser;
// Querying parameters: budget size for collecting contexts intervals
public static int qryBudgetSize = max_pts_budget / 2;
}
| 1,399
| 31.55814
| 80
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/heapinsE/HeapInsIntervalManager.java
|
package soot.jimple.spark.geom.heapinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.IFigureManager;
/**
* An abstraction for the management of all the heap insensitive encoding figures. We employ the naive management strategy,
* which is a linked list based manager.
*
* For HeapIns analysis, we have four types of figures: Type | Index all-to-many | 0 many-to-all | 1 one-to-one | 2
* all-to-all all-to-all is special because whenever it was presented, all others can be deleted (its semantics is context
* insensitive). The corresponding index means header[0] stores all the all-to-one figures, and so on.
*
* @author xiao
*
*/
public class HeapInsIntervalManager extends IFigureManager {
public static int Divisions = 3;
// Type IDs for the figures
public static final int ALL_TO_ALL = -1; // A special case
public static final int ALL_TO_MANY = 0;
public static final int MANY_TO_ALL = 1;
public static final int ONE_TO_ONE = 2;
// Recording the size of each type of figure
private int size[] = { 0, 0, 0 };
// Recording the geometric figures, categorized by the type IDs.
private SegmentNode header[] = { null, null, null };
private boolean hasNewFigure = false;
public SegmentNode[] getFigures() {
return header;
}
public int[] getSizes() {
return size;
}
public boolean isThereUnprocessedFigures() {
return hasNewFigure;
}
public void flush() {
hasNewFigure = false;
for (int i = 0; i < Divisions; ++i) {
SegmentNode p = header[i];
while (p != null && p.is_new == true) {
p.is_new = false;
p = p.next;
}
}
}
/**
* Delete all the shapes recorded.
*/
public void clear() {
for (int i = 0; i < Divisions; ++i) {
size[i] = 0;
header[i] = null;
}
hasNewFigure = false;
}
/*
* pnew.L < 0 is a special case we used to indicate a square: L = L_prime This case is specially handled because it is very
* common in the program. And, treating it as a MANY-TO-ALL is loss of precision.
*/
public SegmentNode addNewFigure(int code, RectangleNode pnew) {
SegmentNode p;
if (code == ALL_TO_ALL) {
// The input figure is a all-to-all figure
// Directly clean all the existing intervals unless the all-to-all figure is existing.
if (header[ALL_TO_MANY] != null && header[ALL_TO_MANY].I2 == 0) {
return null;
}
p = new SegmentNode();
code = ALL_TO_MANY;
p.I1 = p.I2 = 0;
p.L = Constants.MAX_CONTEXTS;
for (int i = 0; i < Divisions; ++i) {
size[i] = 0;
header[i] = null;
}
} else {
// Before inserting into the figure list, we do duplicate testing
// This is a all-to-many or one-to-one figure
if (code == ALL_TO_MANY || code == ONE_TO_ONE) {
p = header[ALL_TO_MANY];
while (p != null) {
if ((p.I2 <= pnew.I2) && (p.I2 + p.L >= pnew.I2 + pnew.L)) {
return null;
}
p = p.next;
}
}
// This is a many-to-all or one-to-one figure
if (code == MANY_TO_ALL || code == ONE_TO_ONE) {
p = header[MANY_TO_ALL];
while (p != null) {
if ((p.I1 <= pnew.I1) && (p.I1 + p.L >= pnew.I1 + pnew.L)) {
return null;
}
p = p.next;
}
}
// This is a one-to-one figure
if (code == ONE_TO_ONE) {
p = header[ONE_TO_ONE];
while (p != null) {
// We don't process the case: the input figure is a square but the tested figure is a segment
if (p.I1 - p.I2 == pnew.I1 - pnew.I2) {
// On the same line
if (p.I1 <= pnew.I1 && p.I1 + p.L >= pnew.I1 + pnew.L) {
return null;
}
}
p = p.next;
}
}
// Insert the new interval immediately, and we delay the merging until necessary
p = new SegmentNode(pnew);
if (code == ALL_TO_MANY) {
clean_garbage_all_to_many(p);
} else if (code == MANY_TO_ALL) {
clean_garbage_many_to_all(p);
} else {
clean_garbage_one_to_one(p);
}
}
hasNewFigure = true;
size[code]++;
p.next = header[code];
header[code] = p;
return p;
}
// This function tries to do the geometric merging
public void mergeFigures(int upperSize) {
if (!hasNewFigure) {
return;
}
/*
* We start the merging from ONE_TO_ONE, because the generated figure may be merged with those figures in MANY_TO_ALL
*/
if (size[ONE_TO_ONE] > upperSize && header[ONE_TO_ONE].is_new == true) {
// We prefer to generate a heap insensitive figure
SegmentNode p = generate_many_to_all(header[ONE_TO_ONE]);
clean_garbage_many_to_all(p);
p.next = header[MANY_TO_ALL];
header[MANY_TO_ALL] = p;
header[ONE_TO_ONE] = null;
size[MANY_TO_ALL]++;
size[ONE_TO_ONE] = 0;
}
if (size[MANY_TO_ALL] > upperSize && header[MANY_TO_ALL].is_new == true) {
header[MANY_TO_ALL] = generate_many_to_all(header[MANY_TO_ALL]);
size[MANY_TO_ALL] = 1;
}
if (size[ALL_TO_MANY] > upperSize && header[ALL_TO_MANY].is_new == true) {
header[ALL_TO_MANY] = generate_all_to_many(header[ALL_TO_MANY]);
size[ALL_TO_MANY] = 1;
}
}
public void removeUselessSegments() {
int i;
SegmentNode p, q, temp;
p = header[ONE_TO_ONE];
size[ONE_TO_ONE] = 0;
q = null;
while (p != null) {
boolean contained = false;
long L = p.L;
for (i = 0; i < 2; ++i) {
temp = header[i];
while (temp != null) {
if (temp.I1 == 0 || ((temp.I1 <= p.I1) && (temp.I1 + temp.L >= p.I1 + L))) {
if (temp.I2 == 0 || ((temp.I2 <= p.I2) && (temp.I2 + temp.L >= p.I2 + L))) {
contained = true;
break;
}
}
temp = temp.next;
}
}
temp = p.next;
if (contained == false) {
p.next = q;
q = p;
++size[ONE_TO_ONE];
}
p = temp;
}
header[ONE_TO_ONE] = q;
}
/**
* Merge all the ONE_TO_ONE figures pointed to by mp. The result is in the form (p, q, 0, I, L).
*/
private SegmentNode generate_all_to_many(SegmentNode mp) {
long left, right;
SegmentNode p;
left = mp.I2;
right = left + mp.L;
p = mp.next;
while (p != null) {
if (p.I2 < left) {
left = p.I2;
}
long t = p.I2 + p.L;
if (t > right) {
right = t;
}
p = p.next;
}
// We reuse the first element in the list mp
mp.I1 = 0;
mp.I2 = left;
mp.L = right - left;
mp.next = null;
return mp;
}
/**
* The result is in the form: (p, q, I, 0, L)
*/
private SegmentNode generate_many_to_all(SegmentNode mp) {
long left, right;
SegmentNode p;
left = mp.I1;
right = left + mp.L;
p = mp.next;
while (p != null) {
if (p.I1 < left) {
left = p.I1;
}
long t = p.I1 + p.L;
if (t > right) {
right = t;
}
p = p.next;
}
mp.I1 = left;
mp.I2 = 0;
mp.L = right - left;
mp.next = null;
return mp;
}
// Clean garbages in the MANY_TO_ALL list that the information is already covered by mp
// BTW, we also do simple adjacent figures concatenation
private void clean_garbage_many_to_all(SegmentNode predator) {
SegmentNode p, q, list;
int num;
long right, left;
list = header[MANY_TO_ALL];
p = q = null;
num = 0;
left = predator.I1;
right = left + predator.L;
while (list != null) {
// We first process the overlapped cases
if (list.I1 >= left) {
if (list.I1 <= right) {
if (list.I1 + list.L > right) {
// We extend predator to the right
right = list.I1 + list.L;
}
// else, this figure is completely contained in predator, we swallow it
list = list.next;
continue;
}
// else, this figure has no overlap with the predator
} else if (list.I1 + list.L >= left) {
// We extend predator to the left
left = list.I1;
list = list.next;
continue;
}
// No intersection, no overlap
// Notice that, we have to preserve the order of the list
// Because the newly inserted figures are headed at the list
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
list = list.next;
}
predator.I1 = left;
predator.L = right - left;
if (q != null) {
q.next = null;
}
header[MANY_TO_ALL] = p;
size[MANY_TO_ALL] = num;
}
// Clean the ALL_TO_MANY list
private void clean_garbage_all_to_many(SegmentNode predator) {
SegmentNode p, q, list;
int num;
long right, left;
list = header[ALL_TO_MANY];
p = q = null;
num = 0;
left = predator.I2;
right = predator.I2 + predator.L;
while (list != null) {
if (list.I2 >= left) {
if (list.I2 <= right) {
if (list.I2 + list.L > right) {
// We extend predator to the right
right = list.I2 + list.L;
}
list = list.next;
continue;
}
} else if (list.I2 + list.L >= left) {
// We extend predator to the left
left = list.I2;
list = list.next;
continue;
}
// No intersection, no overlap
// Notice that, we have to preserve the order of the list
// Because the unprocessed points-to tuples are headed at the list
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
list = list.next;
}
predator.I2 = left;
predator.L = right - left;
if (q != null) {
q.next = null;
}
header[ALL_TO_MANY] = p;
size[ALL_TO_MANY] = num;
}
/*
* Eliminate the redundant ONE_TO_ONE figures
*/
private void clean_garbage_one_to_one(SegmentNode predator) {
SegmentNode p, q, list;
int num;
list = header[ONE_TO_ONE];
p = q = null;
num = 0;
while (list != null) {
long L = list.L;
if ((predator.I2 - predator.I1 == list.I2 - list.I1) && predator.I1 <= list.I1
&& (predator.I1 + predator.L >= list.I2 + L)) {
// The checked figure is completely contained in the predator
// So we ignore it
;
} else {
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
}
list = list.next;
}
if (q != null) {
q.next = null;
}
header[ONE_TO_ONE] = p;
size[ONE_TO_ONE] = num;
}
}
| 11,760
| 24.679039
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/heapinsE/HeapInsNode.java
|
/*
* Please attach the following author information if you would like to redistribute the source code:
* Developer: Xiao Xiao
* Address: Room 4208, Hong Kong University of Science and Technology
* Contact: frogxx@gmail.com
*/
package soot.jimple.spark.geom.heapinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1997 - 2018 Raja Vallée-Rai and others
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import soot.Hierarchy;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomE.GeometricManager;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.geom.geomPA.IWorklist;
import soot.jimple.spark.geom.geomPA.Parameters;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ClassConstantNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.StringConstantNode;
import soot.jimple.spark.sets.P2SetVisitor;
/**
* This class defines a pointer variable for use in the HeapIns encoding based points-to solver. HeapIns is a simpler form of
* geometric encoding. HeapIns is faster and uses less memory, but also, it is less precise than geometric encoding. NOT
* recommended to use.
*
* @author xiao
*
*/
public class HeapInsNode extends IVarAbstraction {
// The targets of directed edges on the constraint graph
public HashMap<HeapInsNode, HeapInsIntervalManager> flowto;
// The objects this variable points to
public HashMap<AllocNode, HeapInsIntervalManager> pt_objs;
// Newly added points-to tuple
public Map<AllocNode, HeapInsIntervalManager> new_pts;
// store/load complex constraints
public Vector<PlainConstraint> complex_cons = null;
static {
stubManager = new HeapInsIntervalManager();
pres = new RectangleNode(0, 0, Constants.MAX_CONTEXTS, Constants.MAX_CONTEXTS);
stubManager.addNewFigure(HeapInsIntervalManager.ALL_TO_ALL, pres);
deadManager = new HeapInsIntervalManager();
}
public HeapInsNode(Node thisVar) {
me = thisVar;
}
@Override
public void deleteAll() {
flowto = null;
pt_objs = null;
new_pts = null;
complex_cons = null;
}
@Override
public void reconstruct() {
flowto = new HashMap<HeapInsNode, HeapInsIntervalManager>();
pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>();
new_pts = new HashMap<AllocNode, HeapInsIntervalManager>();
complex_cons = null;
lrf_value = 0;
}
@Override
public void do_before_propagation() {
// if ( complex_cons == null )
do_pts_interval_merge();
// if ( !(me instanceof LocalVarNode) )
do_flow_edge_interval_merge();
// This pointer filter, please read the comments at this line in file FullSensitiveNode.java
Node wrappedNode = getWrappedNode();
if (wrappedNode instanceof LocalVarNode && ((LocalVarNode) wrappedNode).isThisPtr()) {
SootMethod func = ((LocalVarNode) wrappedNode).getMethod();
if (!func.isConstructor()) {
// We don't process the specialinvoke call edge
SootClass defClass = func.getDeclaringClass();
Hierarchy typeHierarchy = Scene.v().getActiveHierarchy();
for (Iterator<AllocNode> it = new_pts.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
if (obj.getType() instanceof RefType) {
SootClass sc = ((RefType) obj.getType()).getSootClass();
if (defClass != sc) {
try {
SootMethod rt_func = typeHierarchy.resolveConcreteDispatch(sc, func);
if (rt_func != func) {
it.remove();
// Also preclude it from propagation again
pt_objs.put(obj, (HeapInsIntervalManager) deadManager);
}
} catch (RuntimeException e) {
// If the input program has a wrong type cast, resolveConcreteDispatch fails and it goes here
// We simply ignore this error
}
}
}
}
}
}
}
/**
* Remember to clean the is_new flag
*/
@Override
public void do_after_propagation() {
for (HeapInsIntervalManager im : new_pts.values()) {
im.flush();
}
new_pts = new HashMap<AllocNode, HeapInsIntervalManager>();
}
@Override
public int num_of_diff_objs() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (pt_objs == null) {
return -1;
}
return pt_objs.size();
}
@Override
public int num_of_diff_edges() {
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (flowto == null) {
return -1;
}
return flowto.size();
}
@Override
public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) {
int code = 0;
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
if (I1 == 0) {
code = (I2 == 0 ? HeapInsIntervalManager.ALL_TO_ALL : HeapInsIntervalManager.ALL_TO_MANY);
} else {
code = (I2 == 0 ? HeapInsIntervalManager.MANY_TO_ALL : HeapInsIntervalManager.ONE_TO_ONE);
}
return addPointsTo(code, obj);
}
@Override
public boolean add_points_to_4(AllocNode obj, long I1, long I2, long L1, long L2) {
return false;
}
@Override
public boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L) {
int code = 0;
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
if (I1 == 0) {
code = (I2 == 0 ? HeapInsIntervalManager.ALL_TO_ALL : HeapInsIntervalManager.ALL_TO_MANY);
} else {
code = (I2 == 0 ? HeapInsIntervalManager.MANY_TO_ALL : HeapInsIntervalManager.ONE_TO_ONE);
}
return addFlowsTo(code, (HeapInsNode) qv);
}
@Override
public boolean add_simple_constraint_4(IVarAbstraction qv, long I1, long I2, long L1, long L2) {
return false;
}
@Override
public void put_complex_constraint(PlainConstraint cons) {
if (complex_cons == null) {
complex_cons = new Vector<PlainConstraint>();
}
complex_cons.add(cons);
}
/**
* Discard all the ONE_TO_ONE figures which are covered by the ALL_TO_MANY and MANY_TO_ALL figures
*/
@Override
public void drop_duplicates() {
for (HeapInsIntervalManager im : pt_objs.values()) {
im.removeUselessSegments();
}
}
/**
* An efficient implementation of differential propagation.
*/
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
int i, j;
AllocNode obj;
SegmentNode pts, pe, int_entry1[], int_entry2[];
HeapInsIntervalManager him1, him2;
HeapInsNode qn, objn;
boolean added, has_new_edges;
// We first build the new flow edges via the field dereferences
if (complex_cons != null) {
for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : new_pts.entrySet()) {
obj = entry.getKey();
int_entry1 = entry.getValue().getFigures();
for (PlainConstraint pcons : complex_cons) {
// Construct the two variables in assignment
objn = (HeapInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
if (objn == null) {
// This combination of allocdotfield must be invalid
// This expression p.f also renders that p cannot point to obj, so we remove it
// We label this event and sweep the garbage later
pt_objs.put(obj, (HeapInsIntervalManager) deadManager);
entry.setValue((HeapInsIntervalManager) deadManager);
break;
}
if (objn.willUpdate == false) {
// This must be a store constraint
// This object field is not need for computing
// the points-to information of the seed pointers
continue;
}
qn = (HeapInsNode) pcons.otherSide;
for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
pts = int_entry1[i];
while (pts != null && pts.is_new) {
switch (pcons.type) {
case Constants.STORE_CONS:
// Store, qv -> pv.field
// pts.I2 may be zero, pts.L may be less than zero
if (qn.add_simple_constraint_3(objn, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.I2,
pts.L < 0 ? -pts.L : pts.L)) {
worklist.push(qn);
}
break;
case Constants.LOAD_CONS:
// Load, pv.field -> qv
if (objn.add_simple_constraint_3(qn, pts.I2, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0,
pts.L < 0 ? -pts.L : pts.L)) {
worklist.push(objn);
}
break;
}
pts = pts.next;
}
}
}
}
}
for (Map.Entry<HeapInsNode, HeapInsIntervalManager> entry1 : flowto.entrySet()) {
// First, we pick one flow-to figure
added = false;
qn = entry1.getKey();
him1 = entry1.getValue();
int_entry1 = him1.getFigures(); // Figure collection for the flows-to tuple
has_new_edges = him1.isThereUnprocessedFigures();
Map<AllocNode, HeapInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);
for (Map.Entry<AllocNode, HeapInsIntervalManager> entry2 : objs.entrySet()) {
// Second, we get the points-to intervals
obj = entry2.getKey();
him2 = entry2.getValue();
if (him2 == deadManager) {
continue;
}
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType())) {
continue;
}
// Figure collection for the points-to tuple
int_entry2 = him2.getFigures();
// We pair up all the interval points-to tuples and interval flow edges
// Loop over all points-to figures
for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
pts = int_entry2[i];
while (pts != null) {
if (!has_new_edges && !pts.is_new) {
break;
}
// Loop over all flows-to figures
for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
pe = int_entry1[j];
while (pe != null) {
if (pts.is_new || pe.is_new) {
// Propagate this object
if (add_new_points_to_tuple(pts, pe, obj, qn)) {
added = true;
}
} else {
break;
}
pe = pe.next;
}
}
pts = pts.next;
}
}
}
if (added) {
worklist.push(qn);
}
// Now, we clean the new edges if necessary
if (has_new_edges) {
him1.flush();
}
}
}
@Override
public int count_pts_intervals(AllocNode obj) {
int ret = 0;
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
@Override
public int count_flow_intervals(IVarAbstraction qv) {
int ret = 0;
SegmentNode[] int_entry = find_flowto((HeapInsNode) qv);
for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
/**
* Query if this pointer and qv could point to the same object under any contexts
*/
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
int i, j;
HeapInsNode qn;
SegmentNode p, q, pt[], qt[];
qn = (HeapInsNode) qv;
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode an = it.next();
if (an instanceof ClassConstantNode) {
continue;
}
if (an instanceof StringConstantNode) {
continue;
}
qt = qn.find_points_to(an);
if (qt == null) {
continue;
}
pt = find_points_to(an);
for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
p = pt[i];
while (p != null) {
for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
q = qt[j];
while (q != null) {
if (quick_intersecting_test(p, q)) {
return true;
}
q = q.next;
}
}
p = p.next;
}
}
}
return false;
}
@Override
public Set<AllocNode> get_all_points_to_objects() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().get_all_points_to_objects();
}
return pt_objs.keySet();
}
@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")");
p = p.next;
}
}
}
}
@Override
public boolean pointer_interval_points_to(long l, long r, AllocNode obj) {
SegmentNode[] int_entry = find_points_to(obj);
if (int_entry == null) {
return false;
}
// Check all-to-many figures
if (int_entry[HeapInsIntervalManager.ALL_TO_MANY] != null) {
return true;
}
for (int i = 1; i < HeapInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null) {
long R = p.I1 + p.L;
if ((l <= p.I1 && p.I1 < r) || (p.I1 <= l && l < R)) {
return true;
}
p = p.next;
}
}
return false;
}
@Override
public void remove_points_to(AllocNode obj) {
pt_objs.remove(obj);
}
@Override
public void keepPointsToOnly() {
flowto = null;
new_pts = null;
complex_cons = null;
}
@Override
public int count_new_pts_intervals() {
int ans = 0;
for (HeapInsIntervalManager im : new_pts.values()) {
SegmentNode[] int_entry = im.getFigures();
for (int i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null && p.is_new == true) {
++ans;
p = p.next;
}
}
}
return ans;
}
@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
if (parent != this) {
getRepresentative().get_all_context_sensitive_objects(l, r, visitor);
return;
}
GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : pt_objs.entrySet()) {
AllocNode obj = entry.getKey();
HeapInsIntervalManager im = entry.getValue();
SegmentNode[] int_entry = im.getFigures();
// We first get the 1-CFA contexts for the object
SootMethod sm = obj.getMethod();
int sm_int = 0;
long n_contexts = 1;
if (sm != null) {
sm_int = geomPTA.getIDFromSootMethod(sm);
n_contexts = geomPTA.context_size[sm_int];
}
// We search for all the pointers falling in the range [1, r) that may point to this object
for (int i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null) {
long R = p.I1 + p.L;
long objL = -1, objR = -1;
// Now we compute which context sensitive objects are pointed to by this pointer
if (i == HeapInsIntervalManager.ALL_TO_MANY) {
// all-to-many figures
objL = p.I2;
objR = p.I2 + p.L;
} else {
// We compute the intersection
if (l <= p.I1 && p.I1 < r) {
if (i != HeapInsIntervalManager.MANY_TO_ALL) {
long d = r - p.I1;
if (d > p.L) {
d = p.L;
}
objL = p.I2;
objR = objL + d;
} else {
objL = 1;
objR = 1 + n_contexts;
}
} else if (p.I1 <= l && l < R) {
if (i != HeapInsIntervalManager.MANY_TO_ALL) {
long d = R - l;
if (R > r) {
d = r - l;
}
objL = p.I2 + l - p.I1;
objR = objL + d;
} else {
objL = 1;
objR = 1 + n_contexts;
}
}
}
// Now we test which context versions should this interval [objL, objR) maps to
if (objL != -1 && objR != -1) {
visitor.visit(obj, objL, objR, sm_int);
}
p = p.next;
}
}
}
}
@Override
public void injectPts() {
final GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>();
me.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (geomPTA.isValidGeometricNode(n)) {
pt_objs.put((AllocNode) n, (HeapInsIntervalManager) stubManager);
}
}
});
new_pts = null;
}
@Override
public boolean isDeadObject(AllocNode obj) {
return pt_objs.get(obj) == deadManager;
}
// ---------------------------------Private Functions----------------------------------------
private SegmentNode[] find_flowto(HeapInsNode qv) {
HeapInsIntervalManager im = flowto.get(qv);
if (im == null) {
return null;
}
return im.getFigures();
}
private SegmentNode[] find_points_to(AllocNode obj) {
HeapInsIntervalManager im = pt_objs.get(obj);
if (im == null) {
return null;
}
return im.getFigures();
}
/**
* Merge the context sensitive tuples, and make a single insensitive tuple
*/
private void do_pts_interval_merge() {
for (HeapInsIntervalManager him : new_pts.values()) {
him.mergeFigures(Parameters.max_pts_budget);
}
}
private void do_flow_edge_interval_merge() {
for (HeapInsIntervalManager him : flowto.values()) {
him.mergeFigures(Parameters.max_cons_budget);
}
}
private boolean addPointsTo(int code, AllocNode obj) {
HeapInsIntervalManager im = pt_objs.get(obj);
if (im == null) {
im = new HeapInsIntervalManager();
pt_objs.put(obj, im);
} else if (im == deadManager) {
// We preclude the propagation of this object
return false;
}
// pres has been filled properly before calling this method
if (im.addNewFigure(code, pres) != null) {
new_pts.put(obj, im);
return true;
}
return false;
}
private boolean addFlowsTo(int code, HeapInsNode qv) {
HeapInsIntervalManager im = flowto.get(qv);
if (im == null) {
im = new HeapInsIntervalManager();
flowto.put(qv, im);
}
// pres has been filled properly before calling this method
return im.addNewFigure(code, pres) != null;
}
// Apply the inference rules
private static boolean add_new_points_to_tuple(SegmentNode pts, SegmentNode pe, AllocNode obj, HeapInsNode qn) {
long interI, interJ;
int code = 0;
// Special Cases
if (pts.I1 == 0 || pe.I1 == 0) {
if (pe.I2 != 0) {
// pointer sensitive, heap insensitive
pres.I1 = pe.I2;
pres.I2 = 0;
pres.L = pe.L;
code = HeapInsIntervalManager.MANY_TO_ALL;
} else {
// pointer insensitive, heap sensitive
pres.I1 = 0;
pres.I2 = pts.I2;
pres.L = pts.L;
code = (pts.I2 == 0 ? HeapInsIntervalManager.ALL_TO_ALL : HeapInsIntervalManager.ALL_TO_MANY);
}
} else {
// The left-end is the larger one
interI = pe.I1 < pts.I1 ? pts.I1 : pe.I1;
// The right-end is the smaller one
interJ = (pe.I1 + pe.L < pts.I1 + pts.L ? pe.I1 + pe.L : pts.I1 + pts.L);
if (interI >= interJ) {
return false;
}
// The intersection is non-empty
pres.I1 = (pe.I2 == 0 ? 0 : interI - pe.I1 + pe.I2);
pres.I2 = (pts.I2 == 0 ? 0 : interI - pts.I1 + pts.I2);
pres.L = interJ - interI;
if (pres.I1 == 0) {
code = (pres.I2 == 0 ? HeapInsIntervalManager.ALL_TO_ALL : HeapInsIntervalManager.ALL_TO_MANY);
} else {
code = (pres.I2 == 0 ? HeapInsIntervalManager.MANY_TO_ALL : HeapInsIntervalManager.ONE_TO_ONE);
}
}
return qn.addPointsTo(code, obj);
}
// We only test if their points-to objects intersected under context
// insensitive manner
private static boolean quick_intersecting_test(SegmentNode p, SegmentNode q) {
if (p.I2 == 0 || q.I2 == 0) {
return true;
}
if (p.I2 >= q.I2) {
return p.I2 < q.I2 + (q.L < 0 ? -q.L : q.L);
}
return q.I2 < p.I2 + (p.L < 0 ? -p.L : p.L);
}
}
| 22,471
| 28.490814
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/heapinsE/HeapInsNodeGenerator.java
|
package soot.jimple.spark.geom.heapinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Iterator;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.geomE.GeometricManager;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.DummyNode;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IEncodingBroker;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.toolkits.callgraph.Edge;
/**
* Build the initial pointer assignment graph with the HeapIns encoding.
*
* @author xiao
*
*/
public class HeapInsNodeGenerator extends IEncodingBroker {
private static final int full_convertor[] = { GeometricManager.ONE_TO_ONE, GeometricManager.MANY_TO_MANY,
GeometricManager.MANY_TO_MANY, GeometricManager.MANY_TO_MANY };
@Override
public void initFlowGraph(GeomPointsTo ptAnalyzer) {
int k;
int n_legal_cons;
int nf1, nf2;
int code;
IVarAbstraction my_lhs, my_rhs;
// Visit all the simple constraints
n_legal_cons = 0;
for (PlainConstraint cons : ptAnalyzer.constraints) {
if (!cons.isActive) {
continue;
}
my_lhs = cons.getLHS().getRepresentative();
my_rhs = cons.getRHS().getRepresentative();
nf1 = ptAnalyzer.getMethodIDFromPtr(my_lhs);
nf2 = ptAnalyzer.getMethodIDFromPtr(my_rhs);
// Test how many globals are in this constraint
code = ((nf1 == Constants.SUPER_MAIN ? 1 : 0) << 1) | (nf2 == Constants.SUPER_MAIN ? 1 : 0);
switch (cons.type) {
case Constants.NEW_CONS:
// We directly add the objects to the points-to set
my_rhs.add_points_to_3((AllocNode) my_lhs.getWrappedNode(), (code & 1) == 1 ? 0 : 1, // to decide if the receiver
// is a global or not
(code >> 1) == 1 ? 0 : 1, // if the object is a global or not
(code & 1) == 1 ? ptAnalyzer.context_size[nf1] : ptAnalyzer.context_size[nf2]);
// Enqueue to the worklist
ptAnalyzer.getWorklist().push(my_rhs);
break;
case Constants.ASSIGN_CONS:
// The core part of any context sensitive algorithms
if (cons.interCallEdges != null) {
// Inter-procedural assignment
for (Iterator<Edge> it = cons.interCallEdges.iterator(); it.hasNext();) {
Edge sEdge = it.next();
CgEdge q = ptAnalyzer.getInternalEdgeFromSootEdge(sEdge);
if (q.is_obsoleted == true) {
continue;
}
// Parameter passing
if (nf2 == q.t) {
/*
* The receiver must be a local, while the sender is perhaps not (e.g. for handling reflection, see class
* PAG) In that case, nf1 is 0.
*/
if (nf1 == Constants.SUPER_MAIN) {
my_lhs.add_simple_constraint_3(my_rhs, 0, q.map_offset, ptAnalyzer.max_context_size_block[q.s]);
} else {
// nf1 == q.s
// We should treat the self recursive calls specially
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf1]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, k * ptAnalyzer.max_context_size_block[nf1] + 1, q.map_offset,
ptAnalyzer.max_context_size_block[nf1]);
}
}
}
} else {
// nf2 == q.s
// Return value
// Both are locals
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf2]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf2]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, q.map_offset, k * ptAnalyzer.max_context_size_block[nf2] + 1,
ptAnalyzer.max_context_size_block[nf2]);
}
}
}
}
} else {
// Intraprocedural
// And, assignment involves global variable goes here. By
// definition, global variables belong to SUPER_MAIN.
// By the Jimple IR, not both sides are global variables
my_lhs.add_simple_constraint_3(my_rhs, nf1 == Constants.SUPER_MAIN ? 0 : 1, nf2 == Constants.SUPER_MAIN ? 0 : 1,
nf1 == Constants.SUPER_MAIN ? ptAnalyzer.context_size[nf2] : ptAnalyzer.context_size[nf1]);
}
break;
case Constants.LOAD_CONS:
// lhs is always a local
// rhs = lhs.f
cons.code = full_convertor[code];
cons.otherSide = my_rhs;
my_lhs.put_complex_constraint(cons);
break;
case Constants.STORE_CONS:
// rhs is always a local
// rhs.f = lhs
cons.code = full_convertor[code];
cons.otherSide = my_lhs;
my_rhs.put_complex_constraint(cons);
break;
default:
throw new RuntimeException("Invalid node type");
}
++n_legal_cons;
}
ptAnalyzer.ps.printf("Only %d (%.1f%%) constraints are needed for this run.\n", n_legal_cons,
((double) n_legal_cons / ptAnalyzer.n_init_constraints) * 100);
}
@Override
public IVarAbstraction generateNode(Node vNode) {
IVarAbstraction ret = null;
if (vNode instanceof AllocNode || vNode instanceof FieldRefNode) {
ret = new DummyNode(vNode);
} else {
ret = new HeapInsNode(vNode);
}
return ret;
}
@Override
public String getSignature() {
return Constants.heapinsE;
}
}
| 6,887
| 35.062827
| 124
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/helper/GeomEvaluator.java
|
package soot.jimple.spark.geom.helper;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.AnySubType;
import soot.ArrayType;
import soot.FastHierarchy;
import soot.Local;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootField;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.CastExpr;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InvokeExpr;
import soot.jimple.Stmt;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.geom.utils.Histogram;
import soot.jimple.spark.pag.AllocDotField;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.Edge;
/**
* We provide a set of methods to evaluate the quality of geometric points-to analysis. The evaluation methods are:
*
* 1. Count the basic points-to information, such as average points-to set size, constraints evaluation graph size, etc; 2.
* Virtual function resolution comparison; 3. Static casts checking; 4. All pairs alias analysis; 5. Building heap graph (not
* used yet).
*
* @author xiao
*
*/
public class GeomEvaluator {
private static final Logger logger = LoggerFactory.getLogger(GeomEvaluator.class);
private GeomPointsTo ptsProvider;
private PrintStream outputer;
private EvalResults evalRes;
private boolean solved; // Used in the anonymous class visitor
public GeomEvaluator(GeomPointsTo gpts, PrintStream ps) {
ptsProvider = gpts;
outputer = ps;
evalRes = new EvalResults();
}
/**
* Collecting basic statistical information for SPARK.
*/
public void profileSparkBasicMetrics() {
int n_legal_var = 0;
int[] limits = new int[] { 1, 5, 10, 25, 50, 75, 100 };
evalRes.pts_size_bar_spark = new Histogram(limits);
for (IVarAbstraction pn : ptsProvider.pointers) {
// We don't consider exception pointers
Node var = pn.getWrappedNode();
if (ptsProvider.isExceptionPointer(var)) {
continue;
}
++n_legal_var;
int size = var.getP2Set().size();
evalRes.pts_size_bar_spark.addNumber(size);
evalRes.total_spark_pts += size;
if (size > evalRes.max_pts_spark) {
evalRes.max_pts_spark = size;
}
}
evalRes.avg_spark_pts = (double) evalRes.total_spark_pts / n_legal_var;
}
/**
* Summarize the geometric points-to analysis and report the basic metrics.
*/
public void profileGeomBasicMetrics(boolean testSpark) {
int n_legal_var = 0, n_alloc_dot_fields = 0;
int[] limits = new int[] { 1, 5, 10, 25, 50, 75, 100 };
evalRes.pts_size_bar_geom = new Histogram(limits);
if (testSpark) {
evalRes.total_spark_pts = 0;
evalRes.max_pts_spark = 0;
evalRes.pts_size_bar_spark = new Histogram(limits);
}
// We first count the LOC
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (!sm.isConcrete()) {
continue;
}
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
evalRes.loc += sm.getActiveBody().getUnits().size();
}
for (IVarAbstraction pn : ptsProvider.pointers) {
// We don't consider those un-processed pointers because their
// points-to information is equivalent to SPARK
if (!pn.hasPTResult()) {
continue;
}
pn = pn.getRepresentative();
Node var = pn.getWrappedNode();
if (ptsProvider.isExceptionPointer(var)) {
continue;
}
if (var instanceof AllocDotField) {
++n_alloc_dot_fields;
}
++n_legal_var;
// ...spark
int size;
if (testSpark) {
size = var.getP2Set().size();
evalRes.pts_size_bar_spark.addNumber(size);
evalRes.total_spark_pts += size;
if (size > evalRes.max_pts_spark) {
evalRes.max_pts_spark = size;
}
}
// ...geom
size = pn.num_of_diff_objs();
evalRes.pts_size_bar_geom.addNumber(size);
evalRes.total_geom_ins_pts += size;
if (size > evalRes.max_pts_geom) {
evalRes.max_pts_geom = size;
}
}
evalRes.avg_geom_ins_pts = (double) evalRes.total_geom_ins_pts / n_legal_var;
if (testSpark) {
evalRes.avg_spark_pts = (double) evalRes.total_spark_pts / n_legal_var;
}
outputer.println("");
outputer.println("----------Statistical Result of geomPTA <Data Format: geomPTA (SPARK)>----------");
outputer.printf("Lines of code (jimple): %.1fK\n", (double) evalRes.loc / 1000);
outputer.printf("Reachable Methods: %d (%d)\n", ptsProvider.getNumberOfMethods(), ptsProvider.getNumberOfSparkMethods());
outputer.printf("Reachable User Methods: %d (%d)\n", ptsProvider.n_reach_user_methods,
ptsProvider.n_reach_spark_user_methods);
outputer.println("#All Pointers: " + ptsProvider.getNumberOfPointers());
outputer.println("#Core Pointers: " + n_legal_var + ", in which #AllocDot Fields: " + n_alloc_dot_fields);
outputer.printf("Total/Average Projected Points-to Tuples [core pointers]: %d (%d) / %.3f (%.3f) \n",
evalRes.total_geom_ins_pts, evalRes.total_spark_pts, evalRes.avg_geom_ins_pts, evalRes.avg_spark_pts);
outputer.println(
"The largest points-to set size [core pointers]: " + evalRes.max_pts_geom + " (" + evalRes.max_pts_spark + ")");
outputer.println();
evalRes.pts_size_bar_geom.printResult(outputer, "Points-to Set Sizes Distribution [core pointers]:",
evalRes.pts_size_bar_spark);
}
/**
* We assess the quality of building the 1-cfa call graph with the geometric points-to result.
*/
private void test_1cfa_call_graph(LocalVarNode vn, SootMethod caller, SootMethod callee_signature, Histogram ce_range) {
long l, r;
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null) {
return;
}
pn = pn.getRepresentative();
Set<SootMethod> tgts = new HashSet<SootMethod>();
Set<AllocNode> set = pn.get_all_points_to_objects();
LinkedList<CgEdge> list = ptsProvider.getCallEdgesInto(ptsProvider.getIDFromSootMethod(caller));
FastHierarchy hierarchy = Scene.v().getOrMakeFastHierarchy();
for (Iterator<CgEdge> it = list.iterator(); it.hasNext();) {
CgEdge p = it.next();
l = p.map_offset;
r = l + ptsProvider.max_context_size_block[p.s];
tgts.clear();
for (AllocNode obj : set) {
if (!pn.pointer_interval_points_to(l, r, obj)) {
continue;
}
Type t = obj.getType();
if (t == null) {
continue;
} else if (t instanceof AnySubType) {
t = ((AnySubType) t).getBase();
} else if (t instanceof ArrayType) {
t = RefType.v("java.lang.Object");
}
try {
tgts.add(hierarchy.resolveConcreteDispatch(((RefType) t).getSootClass(), callee_signature));
} catch (Exception e) {
logger.debug(e.getMessage(), e);
}
}
tgts.remove(null);
ce_range.addNumber(tgts.size());
}
}
/**
* Report the virtual callsites resolution result for the user's code.
*/
public void checkCallGraph() {
int[] limits = new int[] { 1, 2, 4, 8 };
evalRes.total_call_edges = new Histogram(limits);
CallGraph cg = Scene.v().getCallGraph();
for (Stmt callsite : ptsProvider.multiCallsites) {
Iterator<Edge> edges = cg.edgesOutOf(callsite);
if (!edges.hasNext()) {
continue;
}
evalRes.n_callsites++;
// get an edge
Edge anyEdge = edges.next();
SootMethod src = anyEdge.src();
if (!ptsProvider.isReachableMethod(src) || !ptsProvider.isValidMethod(src)) {
continue;
}
// get the base pointer
CgEdge p = ptsProvider.getInternalEdgeFromSootEdge(anyEdge);
LocalVarNode vn = (LocalVarNode) p.base_var;
// test the call graph
int edge_cnt = 1;
while (edges.hasNext()) {
++edge_cnt;
edges.next();
}
evalRes.n_geom_call_edges += edge_cnt;
if (edge_cnt == 1) {
++evalRes.n_geom_solved_all;
}
// test app method
if (!src.isJavaLibraryMethod()) {
InvokeExpr ie = callsite.getInvokeExpr();
if (edge_cnt == 1) {
++evalRes.n_geom_solved_app;
if (ptsProvider.getOpts().verbose()) {
outputer.println();
outputer.println("<<<<<<<<< Additional Solved Call >>>>>>>>>>");
outputer.println(src.toString());
outputer.println(ie.toString());
}
} else {
// We try to test if this callsite is solvable
// under some contexts
Histogram call_edges = new Histogram(limits);
test_1cfa_call_graph(vn, src, ie.getMethod(), call_edges);
evalRes.total_call_edges.merge(call_edges);
call_edges = null;
}
evalRes.n_geom_user_edges += edge_cnt;
evalRes.n_user_callsites++;
}
}
ptsProvider.ps.println();
ptsProvider.ps.println("--------> Virtual Callsites Evaluation <---------");
ptsProvider.ps.printf("Total virtual callsites (app code): %d (%d)\n", evalRes.n_callsites, evalRes.n_user_callsites);
ptsProvider.ps.printf("Total virtual call edges (app code): %d (%d)\n", evalRes.n_geom_call_edges,
evalRes.n_geom_user_edges);
ptsProvider.ps.printf("Virtual callsites additionally solved by geomPTA compared to SPARK (app code) = %d (%d)\n",
evalRes.n_geom_solved_all, evalRes.n_geom_solved_app);
evalRes.total_call_edges.printResult(ptsProvider.ps, "Testing of unsolved callsites on 1-CFA call graph: ");
if (ptsProvider.getOpts().verbose()) {
ptsProvider.outputNotEvaluatedMethods();
}
}
/**
* Count how many aliased base pointers appeared in all user's functions.
*/
public void checkAliasAnalysis() {
Set<IVarAbstraction> access_expr = new HashSet<IVarAbstraction>();
ArrayList<IVarAbstraction> al = new ArrayList<IVarAbstraction>();
Value[] values = new Value[2];
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod()) {
continue;
}
if (!sm.isConcrete()) {
continue;
}
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm)) {
continue;
}
// We first gather all the pointers
// access_expr.clear();
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext();) {
Stmt st = (Stmt) stmts.next();
if (st instanceof AssignStmt) {
AssignStmt a = (AssignStmt) st;
values[0] = a.getLeftOp();
values[1] = a.getRightOp();
for (Value v : values) {
// We only care those pointers p involving in the
// expression: p.f
if (v instanceof InstanceFieldRef) {
InstanceFieldRef ifr = (InstanceFieldRef) v;
final SootField field = ifr.getField();
if (!(field.getType() instanceof RefType)) {
continue;
}
LocalVarNode vn = ptsProvider.findLocalVarNode((Local) ifr.getBase());
if (vn == null) {
continue;
}
if (ptsProvider.isExceptionPointer(vn)) {
continue;
}
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null) {
continue;
}
pn = pn.getRepresentative();
if (pn.hasPTResult()) {
access_expr.add(pn);
}
}
}
}
}
}
access_expr.remove(null);
al.addAll(access_expr);
access_expr = null;
// Next, we pair up all the pointers
Date begin = new Date();
int size = al.size();
for (int i = 0; i < size; ++i) {
IVarAbstraction pn = al.get(i);
VarNode n1 = (VarNode) pn.getWrappedNode();
for (int j = i + 1; j < size; ++j) {
IVarAbstraction qn = al.get(j);
VarNode n2 = (VarNode) qn.getWrappedNode();
if (pn.heap_sensitive_intersection(qn)) {
evalRes.n_hs_alias++;
}
// We directly use the SPARK points-to sets
if (n1.getP2Set().hasNonEmptyIntersection(n2.getP2Set())) {
evalRes.n_hi_alias++;
}
}
}
evalRes.n_alias_pairs = size * (size - 1) / 2;
Date end = new Date();
ptsProvider.ps.println();
ptsProvider.ps.println("--------> Alias Pairs Evaluation <---------");
ptsProvider.ps.println("Number of pointer pairs in app code: " + evalRes.n_alias_pairs);
ptsProvider.ps.printf("Heap sensitive alias pairs (by Geom): %d, Percentage = %.3f%%\n", evalRes.n_hs_alias,
(double) evalRes.n_hs_alias / evalRes.n_alias_pairs * 100);
ptsProvider.ps.printf("Heap insensitive alias pairs (by SPARK): %d, Percentage = %.3f%%\n", evalRes.n_hi_alias,
(double) evalRes.n_hi_alias / evalRes.n_alias_pairs * 100);
ptsProvider.ps.printf("Using time: %dms \n", end.getTime() - begin.getTime());
ptsProvider.ps.println();
}
/**
* Count how many static casts can be determined safe.
*/
public void checkCastsSafety() {
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod()) {
continue;
}
if (!sm.isConcrete()) {
continue;
}
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm)) {
continue;
}
// All the statements in the method
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext();) {
Stmt st = (Stmt) stmts.next();
if (st instanceof AssignStmt) {
Value rhs = ((AssignStmt) st).getRightOp();
Value lhs = ((AssignStmt) st).getLeftOp();
if (rhs instanceof CastExpr && lhs.getType() instanceof RefLikeType) {
Value v = ((CastExpr) rhs).getOp();
VarNode node = ptsProvider.findLocalVarNode(v);
if (node == null) {
continue;
}
IVarAbstraction pn = ptsProvider.findInternalNode(node);
if (pn == null) {
continue;
}
pn = pn.getRepresentative();
if (!pn.hasPTResult()) {
continue;
}
evalRes.total_casts++;
final Type targetType = (RefLikeType) ((CastExpr) rhs).getCastType();
// We first use the geometric points-to result to
// evaluate
solved = true;
Set<AllocNode> set = pn.get_all_points_to_objects();
for (AllocNode obj : set) {
solved = ptsProvider.castNeverFails(obj.getType(), targetType);
if (solved == false) {
break;
}
}
if (solved) {
evalRes.geom_solved_casts++;
}
// Second is the SPARK result
solved = true;
node.getP2Set().forall(new P2SetVisitor() {
public void visit(Node arg0) {
if (solved == false) {
return;
}
solved = ptsProvider.castNeverFails(arg0.getType(), targetType);
}
});
if (solved) {
evalRes.spark_solved_casts++;
}
}
}
}
}
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Static Casts Safety Evaluation <------------");
ptsProvider.ps.println("Total casts (app code): " + evalRes.total_casts);
ptsProvider.ps.println("Safe casts: Geom = " + evalRes.geom_solved_casts + ", SPARK = " + evalRes.spark_solved_casts);
}
/**
* Estimate the size of the def-use graph for the heap memory. The heap graph is estimated without context information.
*/
public void estimateHeapDefuseGraph() {
final Map<IVarAbstraction, int[]> defUseCounterForGeom = new HashMap<IVarAbstraction, int[]>();
final Map<AllocDotField, int[]> defUseCounterForSpark = new HashMap<AllocDotField, int[]>();
Date begin = new Date();
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod()) {
continue;
}
if (!sm.isConcrete()) {
continue;
}
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm)) {
continue;
}
// We first gather all the memory access expressions
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext();) {
Stmt st = (Stmt) stmts.next();
if (!(st instanceof AssignStmt)) {
continue;
}
AssignStmt a = (AssignStmt) st;
final Value lValue = a.getLeftOp();
final Value rValue = a.getRightOp();
InstanceFieldRef ifr = null;
if (lValue instanceof InstanceFieldRef) {
// Def statement
ifr = (InstanceFieldRef) lValue;
} else if (rValue instanceof InstanceFieldRef) {
// Use statement
ifr = (InstanceFieldRef) rValue;
}
if (ifr != null) {
final SootField field = ifr.getField();
LocalVarNode vn = ptsProvider.findLocalVarNode((Local) ifr.getBase());
if (vn == null) {
continue;
}
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null) {
continue;
}
pn = pn.getRepresentative();
if (!pn.hasPTResult()) {
continue;
}
// Spark
vn.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField((AllocNode) n, field);
AllocDotField adf = (AllocDotField) padf.getWrappedNode();
int[] defUseUnit = defUseCounterForSpark.get(adf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForSpark.put(adf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
});
// Geom
Set<AllocNode> objsSet = pn.get_all_points_to_objects();
for (AllocNode obj : objsSet) {
/*
* We will create a lot of instance fields. Because in points-to analysis, we concern only the reference type
* fields. But here, we concern all the fields read write including the primitive type fields.
*/
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField(obj, field);
int[] defUseUnit = defUseCounterForGeom.get(padf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForGeom.put(padf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
}
}
}
for (int[] defUseUnit : defUseCounterForSpark.values()) {
evalRes.n_spark_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
for (int[] defUseUnit : defUseCounterForGeom.values()) {
evalRes.n_geom_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
Date end = new Date();
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Heap Def Use Graph Evaluation <------------");
ptsProvider.ps.println("The edges in the heap def-use graph is (by Geom): " + evalRes.n_geom_du_pairs);
ptsProvider.ps.println("The edges in the heap def-use graph is (by Spark): " + evalRes.n_spark_du_pairs);
ptsProvider.ps.printf("Using time: %dms \n", end.getTime() - begin.getTime());
ptsProvider.ps.println();
}
}
class EvalResults {
// Basic metrics
public int loc = 0;
public long total_geom_ins_pts = 0, total_spark_pts = 0;
public double avg_geom_ins_pts = .0, avg_spark_pts = .0;
public int max_pts_geom = 0, max_pts_spark = 0;
public Histogram pts_size_bar_geom = null, pts_size_bar_spark = null;
// Call graph metrics
public int n_callsites = 0, n_user_callsites = 0;
public int n_geom_call_edges = 0, n_geom_user_edges = 0;
public int n_geom_solved_all = 0, n_geom_solved_app = 0;
public Histogram total_call_edges = null;
// Alias metrics
public long n_alias_pairs = 0;
public long n_hs_alias = 0, n_hi_alias = 0;
// Static cast metrics
public int total_casts = 0;
public int geom_solved_casts = 0, spark_solved_casts = 0;
// Heap def-use graph metrics
public long n_geom_du_pairs = 0, n_spark_du_pairs = 0;
}
| 22,337
| 31.657895
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/ptinsE/PtInsIntervalManager.java
|
package soot.jimple.spark.geom.ptinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.IFigureManager;
/**
* The figure manager for the PtIns descriptors. The implementation is almost same to the HeapIns manager, please refer to
* HeapInsIntervalManager for more detailed comments.
*
* @author xiao
*
*/
public class PtInsIntervalManager extends IFigureManager {
public static final int Divisions = 3;
public static final int ALL_TO_ALL = -1; // A special case
public static final int ALL_TO_MANY = 0;
public static final int MANY_TO_ALL = 1;
public static final int ONE_TO_ONE = 2;
int size[] = { 0, 0, 0 };
SegmentNode header[] = { null, null, null };
private boolean hasNewObject = false;
public SegmentNode[] getFigures() {
return header;
}
public int[] getSizes() {
return size;
}
public boolean isThereUnprocessedFigures() {
return hasNewObject;
}
public void flush() {
hasNewObject = false;
for (int i = 0; i < Divisions; ++i) {
SegmentNode p = header[i];
while (p != null && p.is_new == true) {
p.is_new = false;
p = p.next;
}
}
}
public SegmentNode addNewFigure(int code, RectangleNode pnew) {
SegmentNode p;
if (code == ALL_TO_ALL) {
// Directly clean all the existing intervals
if (header[0] != null && header[0].I2 == 0) {
return null;
}
p = new SegmentNode();
p.I1 = p.I2 = 0;
p.L = Constants.MAX_CONTEXTS;
for (int i = 0; i < Divisions; ++i) {
size[i] = 0;
header[i] = null;
}
} else {
// Duplicate testing
if (code == ALL_TO_MANY || code == ONE_TO_ONE) {
p = header[ALL_TO_MANY];
while (p != null) {
if ((p.I2 <= pnew.I2) && (p.I2 + p.L >= pnew.I2 + pnew.L)) {
return null;
}
p = p.next;
}
}
if (code == MANY_TO_ALL || code == ONE_TO_ONE) {
p = header[MANY_TO_ALL];
while (p != null) {
if ((p.I1 <= pnew.I1) && (p.I1 + p.L >= pnew.I1 + pnew.L)) {
return null;
}
p = p.next;
}
}
// Be careful of this!
if (code == ONE_TO_ONE) {
p = header[ONE_TO_ONE];
while (p != null) {
if (p.I1 - p.I2 == pnew.I1 - pnew.I2) {
// On the same line
if (p.I1 <= pnew.I1 && p.I1 + p.L >= pnew.I1 + pnew.L) {
return null;
}
}
p = p.next;
}
}
// Insert the new interval immediately, and we delay the merging until necessary
p = new SegmentNode(pnew);
if (code == ALL_TO_MANY) {
clean_garbage_all_to_many(p);
} else if (code == MANY_TO_ALL) {
clean_garbage_many_to_all(p);
} else {
clean_garbage_one_to_one(p);
}
}
hasNewObject = true;
size[code]++;
p.next = header[code];
header[code] = p;
return p;
}
public void mergeFigures(int upperSize) {
if (size[ONE_TO_ONE] > upperSize && header[ONE_TO_ONE].is_new == true) {
// After the merging, we must propagate this interval, thus it has to be a new interval
SegmentNode p = generate_all_to_many(header[ONE_TO_ONE]);
clean_garbage_all_to_many(p);
p.next = header[ALL_TO_MANY];
header[ALL_TO_MANY] = p;
header[ONE_TO_ONE] = null;
size[ALL_TO_MANY]++;
size[ONE_TO_ONE] = 0;
}
if (size[MANY_TO_ALL] > upperSize && header[MANY_TO_ALL].is_new == true) {
header[MANY_TO_ALL] = generate_many_to_all(header[MANY_TO_ALL]);
size[MANY_TO_ALL] = 1;
}
if (size[ALL_TO_MANY] > upperSize && header[ALL_TO_MANY].is_new == true) {
header[0] = generate_all_to_many(header[ALL_TO_MANY]);
size[ALL_TO_MANY] = 1;
}
}
public void removeUselessSegments() {
int i;
SegmentNode p, q, temp;
p = header[ONE_TO_ONE];
size[ONE_TO_ONE] = 0;
q = null;
while (p != null) {
boolean contained = false;
for (i = 0; i < 2; ++i) {
temp = header[i];
while (temp != null) {
if (temp.I1 == 0 || ((temp.I1 <= p.I1) && (temp.I1 + temp.L >= p.I1 + p.L))) {
if (temp.I2 == 0 || ((temp.I2 <= p.I2) && (temp.I2 + temp.L >= p.I2 + p.L))) {
contained = true;
break;
}
}
temp = temp.next;
}
}
temp = p.next;
if (contained == false) {
p.next = q;
q = p;
++size[ONE_TO_ONE];
}
p = temp;
}
header[ONE_TO_ONE] = q;
}
/**
* Merge all the context sensitive intervals. The result is in the form (p, q, 0, I, L).
*/
private SegmentNode generate_all_to_many(SegmentNode mp) {
long left, right, t;
SegmentNode p;
left = mp.I2;
right = left + mp.L;
p = mp.next;
while (p != null) {
if (p.I2 < left) {
left = p.I2;
}
t = p.I2 + p.L;
if (t > right) {
right = t;
}
p = p.next;
}
mp.I1 = 0;
mp.I2 = left;
mp.L = right - left;
mp.next = null;
return mp;
}
/**
* The result is in the form: (p, q, I, 0, L)
*/
private SegmentNode generate_many_to_all(SegmentNode mp) {
long left, right, t;
SegmentNode p;
left = mp.I1;
right = left + mp.L;
p = mp.next;
while (p != null) {
if (p.I1 < left) {
left = p.I1;
}
t = p.I1 + p.L;
if (t > right) {
right = t;
}
p = p.next;
}
// Note, left could be 0. In that case, the propagation along this edge becomes totally insensitive
mp.I1 = left;
mp.I2 = 0;
mp.L = right - left;
mp.next = null;
return mp;
}
// Clean garbages in list that the information is already covered by mp
// BTW, we do some simple concatenation
private void clean_garbage_many_to_all(SegmentNode mp) {
SegmentNode p, q, list;
int num;
long right, left;
list = header[1];
p = q = null;
num = 0;
left = mp.I1;
right = left + mp.L;
while (list != null) {
if (list.I1 >= left) {
if (list.I1 <= right) {
if (list.I1 + list.L > right) {
// We extend mp to the right
right = list.I1 + list.L;
}
list = list.next;
continue;
}
} else if (list.I1 + list.L >= left) {
// We extend mp to the left
left = list.I1;
list = list.next;
continue;
}
// No intersection, no overlap
// Notice that, we have to preserve the order of the list
// Because the unprocessed points-to tuples are headed at the list
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
list = list.next;
}
mp.I1 = left;
mp.L = right - left;
if (q != null) {
q.next = null;
}
header[1] = p;
size[1] = num;
}
private void clean_garbage_all_to_many(SegmentNode mp) {
SegmentNode p, q, list;
int num;
long right, left;
list = header[0];
p = q = null;
num = 0;
left = mp.I2;
right = mp.I2 + mp.L;
while (list != null) {
if (list.I2 >= left) {
if (list.I2 <= right) {
if (list.I2 + list.L > right) {
// We extend mp to the right
right = list.I2 + list.L;
}
list = list.next;
continue;
}
} else if (list.I2 + list.L >= left) {
// We extend mp to the left
left = list.I2;
list = list.next;
continue;
}
// No intersection, no overlap
// Notice that, we have to preserve the order of the list
// Because the unprocessed points-to tuples are headed at the list
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
list = list.next;
}
mp.I2 = left;
mp.L = right - left;
if (q != null) {
q.next = null;
}
header[0] = p;
size[0] = num;
}
/*
* Eliminate the redundant ONE_TO_ONE figures
*/
private void clean_garbage_one_to_one(SegmentNode predator) {
SegmentNode p, q, list;
int num;
list = header[ONE_TO_ONE];
p = q = null;
num = 0;
while (list != null) {
long L = list.L;
if ((predator.I2 - predator.I1 == list.I2 - list.I1) && predator.I1 <= list.I1
&& (predator.I1 + predator.L >= list.I2 + L)) {
// The checked figure is completely contained in the predator
// So we ignore it
;
} else {
if (q == null) {
p = q = list;
} else {
q.next = list;
q = list;
}
++num;
}
list = list.next;
}
if (q != null) {
q.next = null;
}
header[ONE_TO_ONE] = p;
size[ONE_TO_ONE] = num;
}
}
| 9,868
| 22.895884
| 122
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/ptinsE/PtInsNode.java
|
package soot.jimple.spark.geom.ptinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import soot.Hierarchy;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.jimple.spark.geom.dataMgr.PtSensVisitor;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.dataRep.RectangleNode;
import soot.jimple.spark.geom.dataRep.SegmentNode;
import soot.jimple.spark.geom.geomE.GeometricManager;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.geom.geomPA.IWorklist;
import soot.jimple.spark.geom.geomPA.Parameters;
import soot.jimple.spark.geom.heapinsE.HeapInsIntervalManager;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.StringConstantNode;
import soot.jimple.spark.sets.P2SetVisitor;
/**
* This class defines a pointer variable in the PtIns encoding based points-to solver. Also, it is NOT recommended to use.
*
* @author xiao
*
*/
@Deprecated
public class PtInsNode extends IVarAbstraction {
// The targets of directed edges on the constraint graph
public Map<PtInsNode, PtInsIntervalManager> flowto;
// The objects this variable points to
public Map<AllocNode, PtInsIntervalManager> pt_objs;
// Newly added points-to tuple
public Map<AllocNode, PtInsIntervalManager> new_pts;
// store/load complex constraints
public Vector<PlainConstraint> complex_cons = null;
static {
stubManager = new PtInsIntervalManager();
pres = new RectangleNode(0, 0, Constants.MAX_CONTEXTS, Constants.MAX_CONTEXTS);
stubManager.addNewFigure(PtInsIntervalManager.ALL_TO_ALL, pres);
deadManager = new PtInsIntervalManager();
}
public PtInsNode(Node thisVar) {
me = thisVar;
}
@Override
public void deleteAll() {
flowto = null;
pt_objs = null;
new_pts = null;
complex_cons = null;
}
@Override
public void reconstruct() {
flowto = new HashMap<PtInsNode, PtInsIntervalManager>();
pt_objs = new HashMap<AllocNode, PtInsIntervalManager>();
new_pts = new HashMap<AllocNode, PtInsIntervalManager>();
complex_cons = null;
lrf_value = 0;
}
@Override
public void do_before_propagation() {
// if ( complex_cons == null )
do_pts_interval_merge();
// if ( !(me instanceof LocalVarNode) )
do_flow_edge_interval_merge();
// This pointer filter, please read the comments at this line in file FullSensitiveNode.java
Node wrappedNode = getWrappedNode();
if (wrappedNode instanceof LocalVarNode && ((LocalVarNode) wrappedNode).isThisPtr()) {
SootMethod func = ((LocalVarNode) wrappedNode).getMethod();
if (!func.isConstructor()) {
// We don't process the specialinvoke call edge
SootClass defClass = func.getDeclaringClass();
Hierarchy typeHierarchy = Scene.v().getActiveHierarchy();
for (Iterator<AllocNode> it = new_pts.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
if (obj.getType() instanceof RefType) {
SootClass sc = ((RefType) obj.getType()).getSootClass();
if (defClass != sc) {
try {
SootMethod rt_func = typeHierarchy.resolveConcreteDispatch(sc, func);
if (rt_func != func) {
it.remove();
// Also preclude it from propagation again
pt_objs.put(obj, (PtInsIntervalManager) deadManager);
}
} catch (RuntimeException e) {
// If the input program has a wrong type cast, resolveConcreteDispatch fails and it goes here
// We simply ignore this error
}
}
}
}
}
}
}
/**
* Remember to clean the is_new flag
*/
@Override
public void do_after_propagation() {
for (PtInsIntervalManager pim : pt_objs.values()) {
pim.flush();
}
new_pts = new HashMap<AllocNode, PtInsIntervalManager>();
}
@Override
public int num_of_diff_objs() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (pt_objs == null) {
return -1;
}
return pt_objs.size();
}
@Override
public int num_of_diff_edges() {
if (parent != this) {
return getRepresentative().num_of_diff_objs();
}
if (flowto == null) {
return -1;
}
return flowto.size();
}
@Override
public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) {
int code = 0;
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
if (I1 == 0) {
code = (I2 == 0 ? PtInsIntervalManager.ALL_TO_ALL : PtInsIntervalManager.ALL_TO_MANY);
} else {
code = (I2 == 0 ? PtInsIntervalManager.MANY_TO_ALL : PtInsIntervalManager.ONE_TO_ONE);
}
return addPointsTo(code, obj);
}
@Override
public boolean add_points_to_4(AllocNode obj, long I1, long I2, long L1, long L2) {
return false;
}
@Override
public boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L) {
int code = 0;
pres.I1 = I1;
pres.I2 = I2;
pres.L = L;
if (I1 == 0) {
code = (I2 == 0 ? PtInsIntervalManager.ALL_TO_ALL : PtInsIntervalManager.ALL_TO_MANY);
} else {
code = (I2 == 0 ? PtInsIntervalManager.MANY_TO_ALL : PtInsIntervalManager.ONE_TO_ONE);
}
return addFlowsTo(code, (PtInsNode) qv);
}
@Override
public boolean add_simple_constraint_4(IVarAbstraction qv, long I1, long I2, long L1, long L2) {
return false;
}
@Override
public void put_complex_constraint(PlainConstraint cons) {
if (complex_cons == null) {
complex_cons = new Vector<PlainConstraint>();
}
complex_cons.add(cons);
}
/**
* Discard all context sensitive tuples which are covered by insensitive ones
*/
@Override
public void drop_duplicates() {
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
PtInsIntervalManager im = pt_objs.get(it.next());
im.removeUselessSegments();
}
}
/**
* An efficient implementation of differential propagation.
*/
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
int i, j;
AllocNode obj;
SegmentNode pts, pe, int_entry1[], int_entry2[];
PtInsIntervalManager pim1, pim2;
PtInsNode qn, objn;
boolean added, has_new_edges;
// We first build the new flow edges via the field dereferences
if (complex_cons != null) {
for (Map.Entry<AllocNode, PtInsIntervalManager> entry : new_pts.entrySet()) {
obj = entry.getKey();
int_entry1 = entry.getValue().getFigures();
for (PlainConstraint pcons : complex_cons) {
// Construct the two variables in assignment
objn = (PtInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
if (objn == null) {
// This combination of allocdotfield must be invalid
// This expression p.f also renders that p cannot point to obj, so we remove it
// We label this event and sweep the garbage later
pt_objs.put(obj, (PtInsIntervalManager) deadManager);
entry.setValue((PtInsIntervalManager) deadManager);
break;
}
if (objn.willUpdate == false) {
// This must be a store constraint
// This object field is not need for computing
// the points-to information of the seed pointers
continue;
}
qn = (PtInsNode) pcons.otherSide;
for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
pts = int_entry1[i];
while (pts != null && pts.is_new) {
switch (pcons.type) {
case Constants.STORE_CONS:
// Store, qv -> pv.field
// pts.I2 may be zero, pts.L may be less than zero
if (qn.add_simple_constraint_3(objn, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.I2,
pts.L)) {
worklist.push(qn);
}
break;
case Constants.LOAD_CONS:
// Load, pv.field -> qv
if (objn.add_simple_constraint_3(qn, pts.I2, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0,
pts.L)) {
worklist.push(objn);
}
break;
}
pts = pts.next;
}
}
}
}
}
for (Map.Entry<PtInsNode, PtInsIntervalManager> entry1 : flowto.entrySet()) {
// First, we get the flow-to intervals
added = false;
qn = entry1.getKey();
pim1 = entry1.getValue();
int_entry1 = pim1.getFigures();
has_new_edges = pim1.isThereUnprocessedFigures();
Map<AllocNode, PtInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);
for (Map.Entry<AllocNode, PtInsIntervalManager> entry2 : objs.entrySet()) {
// Second, we get the points-to intervals
obj = entry2.getKey();
pim2 = entry2.getValue();
if (pim2 == deadManager) {
continue;
}
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType())) {
continue;
}
int_entry2 = pim2.getFigures();
// We pair up all the interval points-to tuples and interval flow edges
for (i = 0; i < PtInsIntervalManager.Divisions; ++i) {
pts = int_entry2[i];
while (pts != null) {
if (!has_new_edges && !pts.is_new) {
break;
}
for (j = 0; j < PtInsIntervalManager.Divisions; ++j) {
pe = int_entry1[j];
while (pe != null) {
if (pts.is_new || pe.is_new) {
// Propagate this object
if (add_new_points_to_tuple(pts, pe, obj, qn)) {
added = true;
}
} else {
break;
}
pe = pe.next;
}
}
pts = pts.next;
}
}
}
if (added) {
worklist.push(qn);
}
// Now, we clean the new edges if necessary
if (has_new_edges) {
pim1.flush();
}
}
}
@Override
public int count_pts_intervals(AllocNode obj) {
int ret = 0;
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
@Override
public int count_flow_intervals(IVarAbstraction qv) {
int ret = 0;
SegmentNode[] int_entry = find_flowto((PtInsNode) qv);
for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
/**
* Query if this pointer and qv could point to the same object under any contexts
*/
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
int i, j;
PtInsNode qn;
SegmentNode p, q, pt[], qt[];
qn = (PtInsNode) qv;
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode an = it.next();
if (an instanceof StringConstantNode) {
continue;
}
qt = qn.find_points_to(an);
if (qt == null) {
continue;
}
pt = find_points_to(an);
for (i = 0; i < PtInsIntervalManager.Divisions; ++i) {
p = pt[i];
while (p != null) {
for (j = 0; j < PtInsIntervalManager.Divisions; ++j) {
q = qt[j];
while (q != null) {
if (quick_intersecting_test(p, q)) {
return true;
}
q = q.next;
}
}
p = p.next;
}
}
}
return false;
}
@Override
public Set<AllocNode> get_all_points_to_objects() {
// If this pointer is not a representative pointer
if (parent != this) {
return getRepresentative().get_all_points_to_objects();
}
return pt_objs.keySet();
}
@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext();) {
AllocNode obj = it.next();
SegmentNode[] int_entry = find_points_to(obj);
if (int_entry != null) {
for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")");
p = p.next;
}
}
}
}
}
@Override
public boolean pointer_interval_points_to(long l, long r, AllocNode obj) {
SegmentNode[] int_entry = find_points_to(obj);
// Check all-to-many figures
if (int_entry[PtInsIntervalManager.ALL_TO_MANY] != null) {
return true;
}
for (int i = 1; i < HeapInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null) {
long R = p.I1 + p.L;
if ((l <= p.I1 && p.I1 < r) || (p.I1 <= l && l < R)) {
return true;
}
p = p.next;
}
}
return false;
}
@Override
public void remove_points_to(AllocNode obj) {
pt_objs.remove(obj);
}
@Override
public void keepPointsToOnly() {
flowto = null;
new_pts = null;
complex_cons = null;
}
@Override
public int count_new_pts_intervals() {
int ans = 0;
for (PtInsIntervalManager im : new_pts.values()) {
SegmentNode[] int_entry = im.getFigures();
for (int i = 0; i < PtInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null && p.is_new == true) {
++ans;
p = p.next;
}
}
}
return ans;
}
@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
if (parent != this) {
getRepresentative().get_all_context_sensitive_objects(l, r, visitor);
return;
}
GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
for (Map.Entry<AllocNode, PtInsIntervalManager> entry : pt_objs.entrySet()) {
AllocNode obj = entry.getKey();
PtInsIntervalManager im = entry.getValue();
SegmentNode[] int_entry = im.getFigures();
// We first get the 1-CFA contexts for the object
SootMethod sm = obj.getMethod();
int sm_int = 0;
long n_contexts = 1;
if (sm != null) {
sm_int = geomPTA.getIDFromSootMethod(sm);
n_contexts = geomPTA.context_size[sm_int];
}
// We search for all the pointers falling in the range [1, r) that may point to this object
for (int i = 0; i < PtInsIntervalManager.Divisions; ++i) {
SegmentNode p = int_entry[i];
while (p != null) {
long R = p.I1 + p.L;
long objL = -1, objR = -1;
// Now we compute which context sensitive objects are pointed to by this pointer
if (i == PtInsIntervalManager.ALL_TO_MANY) {
// all-to-many figures
objL = p.I2;
objR = p.I2 + p.L;
} else {
// We compute the intersection
if (l <= p.I1 && p.I1 < r) {
if (i != PtInsIntervalManager.MANY_TO_ALL) {
long d = r - p.I1;
if (d > p.L) {
d = p.L;
}
objL = p.I2;
objR = objL + d;
} else {
objL = 1;
objR = 1 + n_contexts;
}
} else if (p.I1 <= l && l < R) {
if (i != PtInsIntervalManager.MANY_TO_ALL) {
long d = R - l;
if (R > r) {
d = r - l;
}
objL = p.I2 + l - p.I1;
objR = objL + d;
} else {
objL = 1;
objR = 1 + n_contexts;
}
}
}
// Now we test which context versions should this interval [objL, objR) maps to
if (objL != -1 && objR != -1) {
visitor.visit(obj, objL, objR, sm_int);
}
p = p.next;
}
}
}
}
@Override
public void injectPts() {
final GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
pt_objs = new HashMap<AllocNode, PtInsIntervalManager>();
me.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (geomPTA.isValidGeometricNode(n)) {
pt_objs.put((AllocNode) n, (PtInsIntervalManager) stubManager);
}
}
});
new_pts = null;
}
@Override
public boolean isDeadObject(AllocNode obj) {
return pt_objs.get(obj) == deadManager;
}
// ---------------------------------Private Functions----------------------------------------
private SegmentNode[] find_flowto(PtInsNode qv) {
PtInsIntervalManager im = flowto.get(qv);
if (im == null) {
return null;
}
return im.getFigures();
}
private SegmentNode[] find_points_to(AllocNode obj) {
PtInsIntervalManager im = pt_objs.get(obj);
if (im == null) {
return null;
}
return im.getFigures();
}
/**
* Merge the context sensitive tuples, and make a single insensitive tuple
*/
private void do_pts_interval_merge() {
for (PtInsIntervalManager im : pt_objs.values()) {
im.mergeFigures(Parameters.max_pts_budget);
}
}
private void do_flow_edge_interval_merge() {
for (PtInsIntervalManager im : flowto.values()) {
im.mergeFigures(Parameters.max_cons_budget);
}
}
private boolean addPointsTo(int code, AllocNode obj) {
PtInsIntervalManager im = pt_objs.get(obj);
if (im == null) {
im = new PtInsIntervalManager();
pt_objs.put(obj, im);
} else if (im == deadManager) {
// We preclude the propagation of this object
return false;
}
// pres has been filled properly before calling this method
if (im.addNewFigure(code, pres) != null) {
new_pts.put(obj, im);
return true;
}
return false;
}
private boolean addFlowsTo(int code, PtInsNode qv) {
PtInsIntervalManager im = flowto.get(qv);
if (im == null) {
im = new PtInsIntervalManager();
flowto.put(qv, im);
}
// pres has been filled properly before calling this method
return im.addNewFigure(code, pres) != null;
}
// Implement the pointer assignment inference rules
private static boolean add_new_points_to_tuple(SegmentNode pts, SegmentNode pe, AllocNode obj, PtInsNode qn) {
long interI, interJ;
int code = 0;
// Special Cases
if (pts.I1 == 0 || pe.I1 == 0) {
// Make it pointer insensitive but heap sensitive
pres.I1 = 0;
pres.I2 = pts.I2;
pres.L = pts.L;
code = (pts.I2 == 0 ? PtInsIntervalManager.ALL_TO_ALL : PtInsIntervalManager.ALL_TO_MANY);
} else {
// The left-end is the larger one
interI = pe.I1 < pts.I1 ? pts.I1 : pe.I1;
// The right-end is the smaller one
interJ = (pe.I1 + pe.L < pts.I1 + pts.L ? pe.I1 + pe.L : pts.I1 + pts.L);
if (interI >= interJ) {
return false;
}
// The intersection is non-empty
pres.I1 = (pe.I2 == 0 ? 0 : interI - pe.I1 + pe.I2);
pres.I2 = (pts.I2 == 0 ? 0 : interI - pts.I1 + pts.I2);
pres.L = interJ - interI;
code = (pres.I2 == 0 ? PtInsIntervalManager.MANY_TO_ALL : PtInsIntervalManager.ONE_TO_ONE);
}
return qn.addPointsTo(code, obj);
}
// We only test if their points-to objects intersected under context
// insensitive manner
private static boolean quick_intersecting_test(SegmentNode p, SegmentNode q) {
if (p.I2 >= q.I2) {
return p.I2 < q.I2 + q.L;
}
return q.I2 < p.I2 + p.L;
}
}
| 21,257
| 28.080711
| 122
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/ptinsE/PtInsNodeGenerator.java
|
package soot.jimple.spark.geom.ptinsE;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2012 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Iterator;
import soot.jimple.spark.geom.dataRep.CgEdge;
import soot.jimple.spark.geom.dataRep.PlainConstraint;
import soot.jimple.spark.geom.geomE.GeometricManager;
import soot.jimple.spark.geom.geomPA.Constants;
import soot.jimple.spark.geom.geomPA.DummyNode;
import soot.jimple.spark.geom.geomPA.GeomPointsTo;
import soot.jimple.spark.geom.geomPA.IEncodingBroker;
import soot.jimple.spark.geom.geomPA.IVarAbstraction;
import soot.jimple.spark.geom.heapinsE.HeapInsNode;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.toolkits.callgraph.Edge;
/**
* Build the initial encoded pointer assignment graph with the PtIns encoding.
*
* @author xiao
*
*/
public class PtInsNodeGenerator extends IEncodingBroker {
private static final int full_convertor[] = { GeometricManager.ONE_TO_ONE, GeometricManager.MANY_TO_MANY,
GeometricManager.MANY_TO_MANY, GeometricManager.MANY_TO_MANY };
@Override
public void initFlowGraph(GeomPointsTo ptAnalyzer) {
int k;
int n_legal_cons;
int nf1, nf2;
int code;
CgEdge q;
IVarAbstraction my_lhs, my_rhs;
// Visit all the simple constraints
n_legal_cons = 0;
for (PlainConstraint cons : ptAnalyzer.constraints) {
if (!cons.isActive) {
continue;
}
my_lhs = cons.getLHS().getRepresentative();
my_rhs = cons.getRHS().getRepresentative();
nf1 = ptAnalyzer.getMethodIDFromPtr(my_lhs);
nf2 = ptAnalyzer.getMethodIDFromPtr(my_rhs);
// Test how many globals are in this constraint
code = ((nf1 == Constants.SUPER_MAIN ? 1 : 0) << 1) | (nf2 == Constants.SUPER_MAIN ? 1 : 0);
switch (cons.type) {
case Constants.NEW_CONS:
// We directly add the objects to the points-to set
my_rhs.add_points_to_3((AllocNode) my_lhs.getWrappedNode(), nf2 == Constants.SUPER_MAIN ? 0 : 1,
nf1 == Constants.SUPER_MAIN ? 0 : 1,
nf2 == Constants.SUPER_MAIN ? ptAnalyzer.context_size[nf1] : ptAnalyzer.context_size[nf2]);
// Enqueue to the worklist
ptAnalyzer.getWorklist().push(my_rhs);
break;
case Constants.ASSIGN_CONS:
// The core part of any context sensitive algorithms
if (cons.interCallEdges != null) {
// Inter-procedural assignment
for (Iterator<Edge> it = cons.interCallEdges.iterator(); it.hasNext();) {
Edge sEdge = it.next();
q = ptAnalyzer.getInternalEdgeFromSootEdge(sEdge);
if (q.is_obsoleted == true) {
continue;
}
if (nf2 == q.t) {
// Parameter passing
// The receiver is a local, while the sender is perhaps not
if (nf1 == Constants.SUPER_MAIN) {
my_lhs.add_simple_constraint_3(my_rhs, 0, q.map_offset, ptAnalyzer.max_context_size_block[q.s]);
} else {
// nf1 == q.s
// We should treat the self recursive calls specially
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf1]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, k * ptAnalyzer.max_context_size_block[nf1] + 1, q.map_offset,
ptAnalyzer.max_context_size_block[nf1]);
}
}
}
} else {
// nf2 == q.s
// Return value
// Both are locals
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf2]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf2]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, q.map_offset, k * ptAnalyzer.max_context_size_block[nf2] + 1,
ptAnalyzer.max_context_size_block[nf2]);
}
}
}
}
} else {
// Intraprocedural
// And, assignment involves global variable goes here. By
// definition, global variables belong to SUPER_MAIN.
// By the Jimple IR, not both sides are global variables
my_lhs.add_simple_constraint_3(my_rhs, nf1 == Constants.SUPER_MAIN ? 0 : 1, nf2 == Constants.SUPER_MAIN ? 0 : 1,
nf1 == Constants.SUPER_MAIN ? ptAnalyzer.context_size[nf2] : ptAnalyzer.context_size[nf1]);
}
break;
case Constants.LOAD_CONS:
// lhs is always a local
// rhs = lhs.f
cons.code = full_convertor[code];
cons.otherSide = my_rhs;
my_lhs.put_complex_constraint(cons);
break;
case Constants.STORE_CONS:
// rhs is always a local
// rhs.f = lhs
cons.code = full_convertor[code];
cons.otherSide = my_lhs;
my_rhs.put_complex_constraint(cons);
break;
default:
throw new RuntimeException("Invalid node type");
}
++n_legal_cons;
}
ptAnalyzer.ps.printf("Only %d (%.1f%%) constraints are needed for this run.\n", n_legal_cons,
((double) n_legal_cons / ptAnalyzer.n_init_constraints) * 100);
}
@Override
public IVarAbstraction generateNode(Node vNode) {
IVarAbstraction ret;
if (vNode instanceof AllocNode || vNode instanceof FieldRefNode) {
ret = new DummyNode(vNode);
} else {
ret = new HeapInsNode(vNode);
}
return ret;
}
@Override
public String getSignature() {
return Constants.ptinsE;
}
}
| 6,659
| 34.614973
| 124
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/utils/Histogram.java
|
package soot.jimple.spark.geom.utils;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.PrintStream;
/**
* A helper class for plotting the evaluation results in histogram form.
*
* @author xiao
*
*/
public class Histogram {
private int[] limits;
private int count = 0;
private int[] results = null;
public Histogram(int[] limits) {
int i;
this.limits = limits;
results = new int[limits.length + 1];
for (i = 0; i <= limits.length; ++i) {
results[i] = 0;
}
}
public void printResult(PrintStream output) {
if (count == 0) {
output.println("No samples are inserted, no output!");
return;
}
output.println("Samples : " + count);
for (int i = 0; i < results.length; i++) {
if (i == 0) {
output.print("<=" + limits[0] + ": " + results[i]);
} else if (i == results.length - 1) {
output.print(">" + limits[limits.length - 1] + ": " + results[i]);
} else {
output.print(limits[i - 1] + "< x <=" + limits[i] + ": " + results[i]);
}
output.printf(", percentage = %.2f\n", (double) results[i] * 100 / count);
}
}
public void printResult(PrintStream output, String title) {
output.println(title);
printResult(output);
}
/**
* This function prints two histograms together for comparative reading. It requires the two histograms having the same
* data separators.
*
* @param output
* @param title
* @param other
*/
public void printResult(PrintStream output, String title, Histogram other) {
output.println(title);
if (count == 0) {
output.println("No samples are inserted, no output!");
return;
}
output.println("Samples : " + count + " (" + other.count + ")");
for (int i = 0; i < results.length; i++) {
if (i == 0) {
output.printf("<= %d: %d (%d)", limits[0], results[i], other.results[i]);
} else if (i == results.length - 1) {
output.printf("> %d: %d (%d)", limits[limits.length - 1], results[i], other.results[i]);
} else {
output.printf("%d < x <= %d: %d (%d)", limits[i - 1], limits[i], results[i], other.results[i]);
}
output.printf(", percentage = %.2f%% (%.2f%%) \n", (double) results[i] * 100 / count,
(double) other.results[i] * 100 / other.count);
}
output.println();
}
public void addNumber(int num) {
count++;
int i = 0;
for (i = 0; i < limits.length; i++) {
if (num <= limits[i]) {
results[i]++;
break;
}
}
if (i == limits.length) {
results[i]++;
}
}
/**
* Merge two histograms.
*
* @param other
*/
public void merge(Histogram other) {
int i;
for (i = 0; i <= limits.length; ++i) {
results[i] += other.results[i];
}
count += other.count;
}
public int getTotalNumofSamples() {
return count;
}
/**
* Use the current distribution but scale the samples close to the user specified one
*
* @param usrSamples
*/
public void scaleToSamples(int usrSamples) {
double ratio;
ratio = (double) usrSamples / count;
count = 0;
for (int i = 0; i <= limits.length; ++i) {
results[i] = (int) Math.round(results[i] * ratio);
count += results[i];
}
}
public int getResult(int inx) {
if (inx >= limits.length) {
return 0;
}
return results[inx];
}
}
| 4,179
| 23.444444
| 121
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/utils/SootInfo.java
|
package soot.jimple.spark.geom.utils;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 - 2014 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Iterator;
import soot.Scene;
import soot.jimple.Stmt;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.Edge;
/**
* It implements missing features in Soot components. All functions should be static.
*
* @author xiao
*
*/
public class SootInfo {
public static int countCallEdgesForCallsite(Stmt callsite, boolean stopForMutiple) {
CallGraph cg = Scene.v().getCallGraph();
int count = 0;
for (Iterator<Edge> it = cg.edgesOutOf(callsite); it.hasNext();) {
it.next();
++count;
if (stopForMutiple && count > 1) {
break;
}
}
return count;
}
}
| 1,502
| 25.839286
| 86
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/geom/utils/ZArrayNumberer.java
|
package soot.jimple.spark.geom.utils;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2011 Richard Xiao
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import soot.util.IterableNumberer;
import soot.util.Numberable;
/**
* Similar to the ArrayNumberer class in soot. But, this class counts the objects from zero. And, we permit the deletion of
* objects from the array container. And most importantly, we permits the search for a particular object efficiently.
* Therefore, this class supports efficiently insert, lookup, deletion and traversal queries.
*
* @author xiao
*/
public class ZArrayNumberer<E extends Numberable> implements IterableNumberer<E>, Iterable<E> {
final Map<E, E> objContainer;
Numberable[] numberToObj;
int lastNumber = 0;
int filledCells = 0;
public ZArrayNumberer() {
// With default initialize size
numberToObj = new Numberable[1023];
objContainer = new HashMap<E, E>(1023);
}
public ZArrayNumberer(int initSize) {
numberToObj = new Numberable[initSize];
objContainer = new HashMap<E, E>(initSize);
}
@Override
public void add(E o) {
// We check if this object is already put into the set
if (o.getNumber() != -1 && numberToObj[o.getNumber()] == o) {
return;
}
numberToObj[lastNumber] = o;
o.setNumber(lastNumber);
objContainer.put(o, o);
++lastNumber;
++filledCells;
if (lastNumber >= numberToObj.length) {
Numberable[] newnto = new Numberable[numberToObj.length * 2];
System.arraycopy(numberToObj, 0, newnto, 0, numberToObj.length);
numberToObj = newnto;
}
}
/**
* Clear the reference to the objects to help the garbage collection
*/
public void clear() {
for (int i = 0; i < lastNumber; ++i) {
numberToObj[i] = null;
}
lastNumber = 0;
filledCells = 0;
objContainer.clear();
}
/**
* Input object o should be added to this container previously.
*/
@Override
public long get(E o) {
if (o == null) {
return -1;
}
return o.getNumber();
}
@Override
public E get(long number) {
@SuppressWarnings("unchecked")
E ret = (E) numberToObj[(int) number];
return ret;
}
/**
* Input object o is not required to be an object added previously.
*
* @param o
* @return
*/
public E searchFor(E o) {
return objContainer.get(o);
}
@Override
public boolean remove(E o) {
int id = o.getNumber();
if (id < 0) {
return false;
}
if (numberToObj[id] != o) {
return false;
}
numberToObj[id] = null;
o.setNumber(-1);
--filledCells;
return true;
}
/**
* Return how many objects are in the container but not the capacity of the container.
*/
@Override
public int size() {
return filledCells;
}
/**
* The removed objects cause some empty slots. We shift the objects to the empty slots in order to ensure ids of the
* objects are less than the filledCells.
*/
public void reassign() {
int i = 0;
for (int j = lastNumber - 1; i < j; ++i) {
if (numberToObj[i] != null) {
continue;
}
while (j > i) {
if (numberToObj[j] != null) {
break;
}
--j;
}
if (i == j) {
break;
}
numberToObj[i] = numberToObj[j];
numberToObj[i].setNumber(i);
numberToObj[j] = null;
}
lastNumber = i;
}
@Override
public Iterator<E> iterator() {
return new NumbererIterator();
}
final class NumbererIterator implements Iterator<E> {
int cur = 0;
E lastElement = null;
/**
* We locate the next non-null item.
*/
@Override
public final boolean hasNext() {
while (cur < lastNumber) {
if (numberToObj[cur] != null) {
break;
}
++cur;
}
return cur < lastNumber;
}
/**
* We move on until a none null pointer found. In this way, the clients don't need to be aware of the empty slots.
*/
@Override
public final E next() {
@SuppressWarnings("unchecked")
E temp = (E) numberToObj[cur++];
lastElement = temp;
return temp;
}
@Override
public final void remove() {
ZArrayNumberer.this.remove(lastElement);
}
}
}
| 5,043
| 22.792453
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/ClientAccessibilityOracle.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.SootField;
import soot.SootMethod;
/**
* The decision whether a libraries field or method is accessible for a client can be different for different analyses.
*
* This interface provides methods to define how this decision will be made.
*
* @author Florian Kuebler
*
*/
public interface ClientAccessibilityOracle {
/**
* Determines whether the method is accessible for a potential library user.
*/
public boolean isAccessible(SootMethod method);
/**
* Determines whether the field is accessible for a potential library user.
*/
public boolean isAccessible(SootField field);
}
| 1,455
| 28.714286
| 119
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/CompleteAccessibility.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.G;
import soot.Singletons;
import soot.SootField;
import soot.SootMethod;
/**
* Using this oracle one assumes, that a client of the target library can call every method and access every field.
*
* @author Florian Kuebler
*
*/
public class CompleteAccessibility implements ClientAccessibilityOracle {
public CompleteAccessibility(Singletons.Global g) {
}
public static CompleteAccessibility v() {
return G.v().soot_jimple_spark_internal_CompleteAccessibility();
}
@Override
public boolean isAccessible(SootMethod method) {
return true;
}
@Override
public boolean isAccessible(SootField field) {
return true;
}
}
| 1,502
| 25.839286
| 115
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/PublicAndProtectedAccessibility.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.G;
import soot.Singletons;
import soot.SootField;
import soot.SootMethod;
/**
* Using this oracle one assumes, that a client of the target library can call every public or protected method and access
* every public or protected field.
*
* @author Florian Kuebler
*
*/
public class PublicAndProtectedAccessibility implements ClientAccessibilityOracle {
public PublicAndProtectedAccessibility(Singletons.Global g) {
}
public static PublicAndProtectedAccessibility v() {
return G.v().soot_jimple_spark_internal_PublicAndProtectedAccessibility();
}
@Override
public boolean isAccessible(SootMethod method) {
return method.isPublic() || method.isProtected();
}
@Override
public boolean isAccessible(SootField field) {
return field.isPublic() || field.isProtected();
}
}
| 1,656
| 28.070175
| 122
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/SparkLibraryHelper.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.AnySubType;
import soot.ArrayType;
import soot.RefType;
import soot.SootMethod;
import soot.Type;
import soot.TypeSwitch;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.VarNode;
/**
* This {@link TypeSwitch} can be used to add library behavior to the PAG. It adds allocation nodes with {@link AnySubType}
* of the declared type to the target node.
*
* @author Florian Kuebler
*
*/
public class SparkLibraryHelper extends TypeSwitch {
private PAG pag;
private Node node;
private SootMethod method;
/**
* The constructor for this {@link TypeSwitch}.
*
* @param pag
* the pointer assignment graph in that the new edges and nodes should be added into.
* @param node
* the node of the value for which allocations should be made.
* @param method
* the method in which the allocations should take place. This parameter can be null.
*/
public SparkLibraryHelper(PAG pag, Node node, SootMethod method) {
this.pag = pag;
this.node = node;
this.method = method;
}
/**
* A new local will be created and connected to {@link SparkLibraryHelper#node} of type {@link RefType}. For this new local
* an allocation edge to {@link AnySubType} of its declared type will be added.
*/
@Override
public void caseRefType(RefType t) {
// var tmp;
VarNode local = pag.makeLocalVarNode(new Object(), t, method);
// new T();
AllocNode alloc = pag.makeAllocNode(new Object(), AnySubType.v(t), method);
// tmp = new T();
pag.addAllocEdge(alloc, local);
// x = tmp;
pag.addEdge(local, node);
}
/**
* A new local array will be created and connected to {@link SparkLibraryHelper#node} of type {@link ArrayType}. For this
* new local an allocation edge to a new array of its declared type will be added. If the
* {@link ArrayType#getElementType()} is still an array an allocation to a new array of this element type will be made and
* stored until the element type is a {@link RefType}. If this is the case an allocation to {@link AnySubType} of
* {@link ArrayType#baseType} will be made.
*/
@Override
public void caseArrayType(ArrayType type) {
Node array = node;
for (Type t = type; t instanceof ArrayType; t = ((ArrayType) t).getElementType()) {
ArrayType at = (ArrayType) t;
if (at.baseType instanceof RefType) {
// var tmpArray;
LocalVarNode localArray = pag.makeLocalVarNode(new Object(), t, method);
// x = tmpArray;
pag.addEdge(localArray, array);
// new T[]
AllocNode newArray = pag.makeAllocNode(new Object(), at, method);
// tmpArray = new T[]
pag.addEdge(newArray, localArray);
// tmpArray[i]
FieldRefNode arrayRef = pag.makeFieldRefNode(localArray, ArrayElement.v());
// var tmp
LocalVarNode local = pag.makeLocalVarNode(new Object(), at.getElementType(), method);
// tmpArray[i] = tmp
pag.addEdge(local, arrayRef);
// x = tmp
array = local;
if (at.numDimensions == 1) {
// new T()
AllocNode alloc = pag.makeAllocNode(new Object(), AnySubType.v((RefType) at.baseType), method);
// tmp = new T()
pag.addEdge(alloc, local);
}
}
}
}
}
| 4,369
| 30.89781
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/SparkNativeHelper.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.G;
import soot.RefType;
import soot.SootClass;
import soot.SootField;
import soot.SootMethod;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.toolkits.pointer.representations.AbstractObject;
import soot.jimple.toolkits.pointer.representations.ReferenceVariable;
import soot.jimple.toolkits.pointer.util.NativeHelper;
import soot.toolkits.scalar.Pair;
public class SparkNativeHelper extends NativeHelper {
protected PAG pag;
public SparkNativeHelper(PAG pag) {
this.pag = pag;
}
protected void assignImpl(ReferenceVariable lhs, ReferenceVariable rhs) {
pag.addEdge((Node) rhs, (Node) lhs);
}
protected void assignObjectToImpl(ReferenceVariable lhs, AbstractObject obj) {
AllocNode objNode = pag.makeAllocNode(new Pair("AbstractObject", obj.getType()), obj.getType(), null);
VarNode var;
if (lhs instanceof FieldRefNode) {
var = pag.makeGlobalVarNode(objNode, objNode.getType());
pag.addEdge((Node) lhs, var);
} else {
var = (VarNode) lhs;
}
pag.addEdge(objNode, var);
}
protected void throwExceptionImpl(AbstractObject obj) {
AllocNode objNode = pag.makeAllocNode(new Pair("AbstractObject", obj.getType()), obj.getType(), null);
pag.addEdge(objNode, pag.nodeFactory().caseThrow());
}
protected ReferenceVariable arrayElementOfImpl(ReferenceVariable base) {
VarNode l;
if (base instanceof VarNode) {
l = (VarNode) base;
} else {
FieldRefNode b = (FieldRefNode) base;
l = pag.makeGlobalVarNode(b, b.getType());
pag.addEdge(b, l);
}
return pag.makeFieldRefNode(l, ArrayElement.v());
}
protected ReferenceVariable cloneObjectImpl(ReferenceVariable source) {
return source;
}
protected ReferenceVariable newInstanceOfImpl(ReferenceVariable cls) {
return pag.nodeFactory().caseNewInstance((VarNode) cls);
}
protected ReferenceVariable staticFieldImpl(String className, String fieldName) {
SootClass c = RefType.v(className).getSootClass();
SootField f = c.getFieldByName(fieldName);
return pag.makeGlobalVarNode(f, f.getType());
}
protected ReferenceVariable tempFieldImpl(String fieldsig) {
return pag.makeGlobalVarNode(new Pair("tempField", fieldsig), RefType.v("java.lang.Object"));
}
protected ReferenceVariable tempVariableImpl() {
return pag.makeGlobalVarNode(new Pair("TempVar", new Integer(++G.v().SparkNativeHelper_tempVar)),
RefType.v("java.lang.Object"));
}
protected ReferenceVariable tempLocalVariableImpl(SootMethod method) {
return pag.makeLocalVarNode(new Pair("TempVar", new Integer(++G.v().SparkNativeHelper_tempVar)),
RefType.v("java.lang.Object"), method);
}
}
| 3,735
| 32.657658
| 106
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/internal/TypeManager.java
|
package soot.jimple.spark.internal;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2002 Ondrej Lhotak
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import soot.AnySubType;
import soot.ArrayType;
import soot.FastHierarchy;
import soot.NullType;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.Type;
import soot.TypeSwitch;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.toolkits.typing.fast.WeakObjectType;
import soot.util.ArrayNumberer;
import soot.util.BitVector;
import soot.util.LargeNumberedMap;
import soot.util.queue.QueueReader;
/**
* A map of bit-vectors representing subtype relationships.
*
* @author Ondrej Lhotak
*
* @author Hamid A. Toussi (hamid2c@gmail.com): Making TypeManager faster by making type masks during a depth-first-traversal
* on the class hierarchy. First, type-masks of the leaves of Class Hierarchy are created and then the type mask of
* each type T is obtained by ORing type maks of Types sub-types and setting the bit-numbers associated with
* Allocation Nodes of type T. The type-mask of each interface is achieved by ORing the type-masks of its top-level
* concrete implementers. In fact, Reference types are visited in reversed-topological-order.
*/
public final class TypeManager {
private Map<SootClass, List<AllocNode>> class2allocs = new HashMap<SootClass, List<AllocNode>>(1024);
private List<AllocNode> anySubtypeAllocs = new LinkedList<AllocNode>();
protected final RefType rtObject;
protected final RefType rtSerializable;
protected final RefType rtCloneable;
public TypeManager(PAG pag) {
this.pag = pag;
this.rtObject = RefType.v("java.lang.Object");
this.rtSerializable = RefType.v("java.io.Serializable");
this.rtCloneable = RefType.v("java.lang.Cloneable");
}
public static boolean isUnresolved(Type type) {
if (type instanceof ArrayType) {
ArrayType at = (ArrayType) type;
type = at.getArrayElementType();
}
if (!(type instanceof RefType)) {
return false;
}
RefType rt = (RefType) type;
if (!rt.hasSootClass()) {
if (rt instanceof WeakObjectType) {
// try to resolve sootClass one more time.
SootClass c = Scene.v().forceResolve(rt.getClassName(), SootClass.HIERARCHY);
if (c == null) {
return true;
} else {
rt.setSootClass(c);
}
}
else {
return true;
}
}
SootClass cl = rt.getSootClass();
return cl.resolvingLevel() < SootClass.HIERARCHY;
}
final public BitVector get(Type type) {
if (type == null) {
return null;
}
while (allocNodeListener.hasNext()) {
AllocNode n = allocNodeListener.next();
Type nt = n.getType();
Iterable<Type> types;
if (nt instanceof NullType || nt instanceof AnySubType) {
types = Scene.v().getTypeNumberer();
} else {
types = Scene.v().getOrMakeFastHierarchy().canStoreTypeList(nt);
}
for (final Type t : types) {
if (!(t instanceof RefLikeType)) {
continue;
}
if (t instanceof AnySubType) {
continue;
}
if (isUnresolved(t)) {
continue;
}
BitVector mask = typeMask.get(t);
if (mask == null) {
typeMask.put(t, mask = new BitVector());
for (final AllocNode an : pag.getAllocNodeNumberer()) {
if (castNeverFails(an.getType(), t)) {
mask.set(an.getNumber());
}
}
continue;
}
mask.set(n.getNumber());
}
}
BitVector ret = (BitVector) typeMask.get(type);
if (ret == null && fh != null) {
// If we have a phantom class and have no type mask, we assume that
// it is not cast-compatible to anything
SootClass curClass = ((RefType) type).getSootClass();
if (curClass.isPhantom()) {
return new BitVector();
} else {
// Scan through the hierarchy. We might have a phantom class higher up
while (curClass.hasSuperclass()) {
curClass = curClass.getSuperclass();
if (type instanceof RefType && curClass.isPhantom()) {
return new BitVector();
}
}
throw new RuntimeException("Type mask not found for type " + type);
}
}
return ret;
}
final public void clearTypeMask() {
typeMask = null;
}
final public void makeTypeMask() {
RefType.v("java.lang.Class");
typeMask = new LargeNumberedMap<Type, BitVector>(Scene.v().getTypeNumberer());
if (fh == null) {
return;
}
// **
initClass2allocs();
makeClassTypeMask(Scene.v().getSootClass("java.lang.Object"));
BitVector visitedTypes = new BitVector();
{
Iterator<Type> it = typeMask.keyIterator();
while (it.hasNext()) {
Type t = it.next();
visitedTypes.set(t.getNumber());
}
}
// **
ArrayNumberer<AllocNode> allocNodes = pag.getAllocNodeNumberer();
for (Type t : Scene.v().getTypeNumberer()) {
if (!(t instanceof RefLikeType)) {
continue;
}
if (t instanceof AnySubType) {
continue;
}
if (isUnresolved(t)) {
continue;
}
// **
if (t instanceof RefType && t != rtObject && t != rtSerializable && t != rtCloneable) {
RefType rt = (RefType) t;
SootClass sc = rt.getSootClass();
if (sc.isInterface()) {
makeMaskOfInterface(sc);
}
if (!visitedTypes.get(t.getNumber()) && !rt.getSootClass().isPhantom()) {
makeClassTypeMask(rt.getSootClass());
}
continue;
}
// **
BitVector mask = new BitVector(allocNodes.size());
for (Node n : allocNodes) {
if (castNeverFails(n.getType(), t)) {
mask.set(n.getNumber());
}
}
typeMask.put(t, mask);
}
allocNodeListener = pag.allocNodeListener();
}
private LargeNumberedMap<Type, BitVector> typeMask = null;
final public boolean castNeverFails(Type src, Type dst) {
if (dst == null) {
return true;
} else if (dst == src) {
return true;
} else if (src == null) {
return false;
} else if (src instanceof NullType) {
return true;
} else if (src instanceof AnySubType) {
return true;
} else if (dst instanceof NullType) {
return false;
} else if (dst instanceof AnySubType) {
throw new RuntimeException("oops src=" + src + " dst=" + dst);
} else {
FastHierarchy fh = getFastHierarchy();
if (fh == null) {
return true;
}
return fh.canStoreType(src, dst);
}
}
public void setFastHierarchy(Supplier<FastHierarchy> fh) {
this.fh = fh;
}
public FastHierarchy getFastHierarchy() {
return fh == null ? null : fh.get();
}
protected Supplier<FastHierarchy> fh = null;
protected PAG pag;
protected QueueReader<AllocNode> allocNodeListener = null;
// ** new methods
private void initClass2allocs() {
for (AllocNode an : pag.getAllocNodeNumberer()) {
addAllocNode(an);
}
}
final private void addAllocNode(final AllocNode alloc) {
alloc.getType().apply(new TypeSwitch() {
final public void caseRefType(RefType t) {
SootClass cl = t.getSootClass();
List<AllocNode> list;
if ((list = class2allocs.get(cl)) == null) {
list = new LinkedList<AllocNode>();
class2allocs.put(cl, list);
}
list.add(alloc);
}
final public void caseAnySubType(AnySubType t) {
anySubtypeAllocs.add(alloc);
}
});
}
final private BitVector makeClassTypeMask(SootClass clazz) {
{
BitVector cachedMask = typeMask.get(clazz.getType());
if (cachedMask != null) {
return cachedMask;
}
}
int nBits = pag.getAllocNodeNumberer().size();
final BitVector mask = new BitVector(nBits);
List<AllocNode> allocs = null;
if (clazz.isConcrete()) {
allocs = class2allocs.get(clazz);
}
if (allocs != null) {
for (AllocNode an : allocs) {
mask.set(an.getNumber());
}
}
Collection<SootClass> subclasses = fh.get().getSubclassesOf(clazz);
if (subclasses == Collections.EMPTY_LIST) {
for (AllocNode an : anySubtypeAllocs) {
mask.set(an.getNumber());
}
typeMask.put(clazz.getType(), mask);
return mask;
}
for (SootClass subcl : subclasses) {
mask.or(makeClassTypeMask(subcl));
}
typeMask.put(clazz.getType(), mask);
return mask;
}
final private BitVector makeMaskOfInterface(SootClass interf) {
if (!(interf.isInterface())) {
throw new RuntimeException();
}
BitVector ret = new BitVector(pag.getAllocNodeNumberer().size());
typeMask.put(interf.getType(), ret);
Collection<SootClass> implementers = getFastHierarchy().getAllImplementersOfInterface(interf);
for (SootClass impl : implementers) {
BitVector other = typeMask.get(impl.getType());
if (other == null) {
other = makeClassTypeMask(impl);
}
ret.or(other);
}
// I think, the following can be eliminated. It is added to make
// type-masks exactly the same as the original type-masks
if (implementers.size() == 0) {
for (AllocNode an : anySubtypeAllocs) {
ret.set(an.getNumber());
}
}
return ret;
}
}
| 10,472
| 28.501408
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/AllocAndContext.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.ondemand.genericutil.ImmutableStack;
import soot.jimple.spark.pag.AllocNode;
public class AllocAndContext {
public final AllocNode alloc;
public final ImmutableStack<Integer> context;
public AllocAndContext(AllocNode alloc, ImmutableStack<Integer> context) {
this.alloc = alloc;
this.context = context;
}
public String toString() {
return alloc + ", context " + context;
}
public int hashCode() {
final int PRIME = 31;
int result = 1;
result = PRIME * result + alloc.hashCode();
result = PRIME * result + context.hashCode();
return result;
}
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AllocAndContext other = (AllocAndContext) obj;
if (!alloc.equals(other.alloc)) {
return false;
}
if (!context.equals(other.context)) {
return false;
}
return true;
}
}
| 1,870
| 25.728571
| 76
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/AllocAndContextSet.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import soot.PointsToSet;
import soot.Type;
import soot.jimple.ClassConstant;
import soot.jimple.spark.ondemand.genericutil.ArraySet;
import soot.jimple.spark.ondemand.genericutil.ImmutableStack;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.ClassConstantNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.StringConstantNode;
import soot.jimple.spark.sets.EqualsSupportingPointsToSet;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.spark.sets.PointsToSetInternal;
public final class AllocAndContextSet extends ArraySet<AllocAndContext> implements EqualsSupportingPointsToSet {
public boolean hasNonEmptyIntersection(PointsToSet other) {
if (other instanceof AllocAndContextSet) {
return nonEmptyHelper((AllocAndContextSet) other);
} else if (other instanceof WrappedPointsToSet) {
return hasNonEmptyIntersection(((WrappedPointsToSet) other).getWrapped());
} else if (other instanceof PointsToSetInternal) {
return ((PointsToSetInternal) other).forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (!returnValue) {
for (AllocAndContext allocAndContext : AllocAndContextSet.this) {
if (n.equals(allocAndContext.alloc)) {
returnValue = true;
break;
}
}
}
}
});
}
throw new UnsupportedOperationException("can't check intersection with set of type " + other.getClass());
}
private boolean nonEmptyHelper(AllocAndContextSet other) {
for (AllocAndContext otherAllocAndContext : other) {
for (AllocAndContext myAllocAndContext : this) {
if (otherAllocAndContext.alloc.equals(myAllocAndContext.alloc)) {
ImmutableStack<Integer> myContext = myAllocAndContext.context;
ImmutableStack<Integer> otherContext = otherAllocAndContext.context;
if (myContext.topMatches(otherContext) || otherContext.topMatches(myContext)) {
return true;
}
}
}
}
return false;
}
public Set<ClassConstant> possibleClassConstants() {
Set<ClassConstant> res = new HashSet<ClassConstant>();
for (AllocAndContext allocAndContext : this) {
AllocNode n = allocAndContext.alloc;
if (n instanceof ClassConstantNode) {
res.add(((ClassConstantNode) n).getClassConstant());
} else {
return null;
}
}
return res;
}
public Set<String> possibleStringConstants() {
Set<String> res = new HashSet<String>();
for (AllocAndContext allocAndContext : this) {
AllocNode n = allocAndContext.alloc;
if (n instanceof StringConstantNode) {
res.add(((StringConstantNode) n).getString());
} else {
return null;
}
}
return res;
}
public Set<Type> possibleTypes() {
Set res = new HashSet<Type>();
for (AllocAndContext allocAndContext : this) {
res.add(allocAndContext.alloc.getType());
}
return res;
}
/**
* Computes a hash code based on the contents of the points-to set. Note that hashCode() is not overwritten on purpose.
* This is because Spark relies on comparison by object identity.
*/
public int pointsToSetHashCode() {
final int PRIME = 31;
int result = 1;
for (AllocAndContext elem : this) {
result = PRIME * result + elem.hashCode();
}
return result;
}
/**
* Returns <code>true</code> if and only if other holds the same alloc nodes as this. Note that equals() is not overwritten
* on purpose. This is because Spark relies on comparison by object identity.
*/
public boolean pointsToSetEquals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof AllocAndContextSet)) {
return false;
}
AllocAndContextSet otherPts = (AllocAndContextSet) other;
// both sets are equal if they are supersets of each other
return superSetOf(otherPts, this) && superSetOf(this, otherPts);
}
/**
* Returns <code>true</code> if <code>onePts</code> is a (non-strict) superset of <code>otherPts</code>.
*/
private boolean superSetOf(AllocAndContextSet onePts, final AllocAndContextSet otherPts) {
return onePts.containsAll(otherPts);
}
}
| 5,179
| 32.636364
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/CallSiteException.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
@SuppressWarnings("serial")
public class CallSiteException extends RuntimeException {
}
| 925
| 30.931034
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/DemandCSPointsTo.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.AnySubType;
import soot.ArrayType;
import soot.Context;
import soot.Local;
import soot.PointsToAnalysis;
import soot.PointsToSet;
import soot.RefType;
import soot.Scene;
import soot.SootField;
import soot.SootMethod;
import soot.Type;
import soot.jimple.spark.ondemand.genericutil.ArraySet;
import soot.jimple.spark.ondemand.genericutil.HashSetMultiMap;
import soot.jimple.spark.ondemand.genericutil.ImmutableStack;
import soot.jimple.spark.ondemand.genericutil.Predicate;
import soot.jimple.spark.ondemand.genericutil.Propagator;
import soot.jimple.spark.ondemand.genericutil.Stack;
import soot.jimple.spark.ondemand.pautil.AssignEdge;
import soot.jimple.spark.ondemand.pautil.ContextSensitiveInfo;
import soot.jimple.spark.ondemand.pautil.OTFMethodSCCManager;
import soot.jimple.spark.ondemand.pautil.SootUtil;
import soot.jimple.spark.ondemand.pautil.SootUtil.FieldToEdgesMap;
import soot.jimple.spark.ondemand.pautil.ValidMatches;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.GlobalVarNode;
import soot.jimple.spark.pag.LocalVarNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PAG;
import soot.jimple.spark.pag.SparkField;
import soot.jimple.spark.pag.VarNode;
import soot.jimple.spark.sets.EmptyPointsToSet;
import soot.jimple.spark.sets.EqualsSupportingPointsToSet;
import soot.jimple.spark.sets.HybridPointsToSet;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.spark.sets.PointsToSetEqualsWrapper;
import soot.jimple.spark.sets.PointsToSetInternal;
import soot.jimple.toolkits.callgraph.VirtualCalls;
import soot.toolkits.scalar.Pair;
import soot.util.NumberedString;
/**
* Tries to find imprecision in points-to sets from a previously run analysis. Requires that all sub-results of previous
* analysis were cached.
*
* @author Manu Sridharan
*
*/
public final class DemandCSPointsTo implements PointsToAnalysis {
private static final Logger logger = LoggerFactory.getLogger(DemandCSPointsTo.class);
@SuppressWarnings("serial")
protected static final class AllocAndContextCache extends HashMap<AllocAndContext, Map<VarNode, CallingContextSet>> {
}
protected static final class CallingContextSet extends ArraySet<ImmutableStack<Integer>> {
}
protected final static class CallSiteAndContext extends Pair<Integer, ImmutableStack<Integer>> {
public CallSiteAndContext(Integer callSite, ImmutableStack<Integer> callingContext) {
super(callSite, callingContext);
}
}
protected static final class CallSiteToTargetsMap extends HashSetMultiMap<CallSiteAndContext, SootMethod> {
}
protected static abstract class IncomingEdgeHandler {
public abstract void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext);
public abstract void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase,
VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine);
abstract Object getResult();
abstract void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge);
abstract boolean shouldHandleSrc(VarNode src);
boolean terminate() {
return false;
}
}
protected static class VarAndContext {
final ImmutableStack<Integer> context;
final VarNode var;
public VarAndContext(VarNode var, ImmutableStack<Integer> context) {
assert var != null;
assert context != null;
this.var = var;
this.context = context;
}
public boolean equals(Object o) {
if (o != null && o.getClass() == VarAndContext.class) {
VarAndContext other = (VarAndContext) o;
return var.equals(other.var) && context.equals(other.context);
}
return false;
}
public int hashCode() {
return var.hashCode() + context.hashCode();
}
public String toString() {
return var + " " + context;
}
}
protected final static class VarContextAndUp extends VarAndContext {
final ImmutableStack<Integer> upContext;
public VarContextAndUp(VarNode var, ImmutableStack<Integer> context, ImmutableStack<Integer> upContext) {
super(var, context);
this.upContext = upContext;
}
public boolean equals(Object o) {
if (o != null && o.getClass() == VarContextAndUp.class) {
VarContextAndUp other = (VarContextAndUp) o;
return var.equals(other.var) && context.equals(other.context) && upContext.equals(other.upContext);
}
return false;
}
public int hashCode() {
return var.hashCode() + context.hashCode() + upContext.hashCode();
}
public String toString() {
return var + " " + context + " up " + upContext;
}
}
public static boolean DEBUG = false;
protected static final int DEBUG_NESTING = 15;
protected static final int DEBUG_PASS = -1;
protected static final boolean DEBUG_VIRT = DEBUG && true;
protected static final int DEFAULT_MAX_PASSES = 10;
protected static final int DEFAULT_MAX_TRAVERSAL = 75000;
protected static final boolean DEFAULT_LAZY = true;
/**
* if <code>true</code>, refine the pre-computed call graph
*/
private boolean refineCallGraph = true;
protected static final ImmutableStack<Integer> EMPTY_CALLSTACK = ImmutableStack.<Integer>emptyStack();
/**
* Make a default analysis. Assumes Spark has already run.
*
* @return
*/
public static DemandCSPointsTo makeDefault() {
return makeWithBudget(DEFAULT_MAX_TRAVERSAL, DEFAULT_MAX_PASSES, DEFAULT_LAZY);
}
public static DemandCSPointsTo makeWithBudget(int maxTraversal, int maxPasses, boolean lazy) {
PAG pag = (PAG) Scene.v().getPointsToAnalysis();
ContextSensitiveInfo csInfo = new ContextSensitiveInfo(pag);
return new DemandCSPointsTo(csInfo, pag, maxTraversal, maxPasses, lazy);
}
protected final AllocAndContextCache allocAndContextCache = new AllocAndContextCache();
protected Stack<Pair<Integer, ImmutableStack<Integer>>> callGraphStack
= new Stack<Pair<Integer, ImmutableStack<Integer>>>();
protected final CallSiteToTargetsMap callSiteToResolvedTargets = new CallSiteToTargetsMap();
protected HashMap<List<Object>, Set<SootMethod>> callTargetsArgCache = new HashMap<List<Object>, Set<SootMethod>>();
protected final Stack<VarAndContext> contextForAllocsStack = new Stack<VarAndContext>();
protected Map<VarAndContext, Pair<PointsToSetInternal, AllocAndContextSet>> contextsForAllocsCache
= new HashMap<VarAndContext, Pair<PointsToSetInternal, AllocAndContextSet>>();
protected final ContextSensitiveInfo csInfo;
/**
* if <code>true</code>, compute full points-to set for queried variable
*/
protected boolean doPointsTo;
protected FieldCheckHeuristic fieldCheckHeuristic;
protected HeuristicType heuristicType;
protected FieldToEdgesMap fieldToLoads;
protected FieldToEdgesMap fieldToStores;
protected final int maxNodesPerPass;
protected final int maxPasses;
protected int nesting = 0;
protected int numNodesTraversed;
protected int numPasses = 0;
protected final PAG pag;
protected AllocAndContextSet pointsTo = null;
protected final Set<CallSiteAndContext> queriedCallSites = new HashSet<CallSiteAndContext>();
protected int recursionDepth = -1;
protected boolean refiningCallSite = false;
protected OTFMethodSCCManager sccManager;
protected Map<VarContextAndUp, Map<AllocAndContext, CallingContextSet>> upContextCache
= new HashMap<VarContextAndUp, Map<AllocAndContext, CallingContextSet>>();
protected ValidMatches vMatches;
protected Map<Local, PointsToSet> reachingObjectsCache, reachingObjectsCacheNoCGRefinement;
protected boolean useCache;
private final boolean lazy;
public DemandCSPointsTo(ContextSensitiveInfo csInfo, PAG pag) {
this(csInfo, pag, DEFAULT_MAX_TRAVERSAL, DEFAULT_MAX_PASSES, DEFAULT_LAZY);
}
public DemandCSPointsTo(ContextSensitiveInfo csInfo, PAG pag, int maxTraversal, int maxPasses, boolean lazy) {
this.csInfo = csInfo;
this.pag = pag;
this.maxPasses = maxPasses;
this.lazy = lazy;
this.maxNodesPerPass = maxTraversal / maxPasses;
this.heuristicType = HeuristicType.INCR;
this.reachingObjectsCache = new HashMap<Local, PointsToSet>();
this.reachingObjectsCacheNoCGRefinement = new HashMap<Local, PointsToSet>();
this.useCache = true;
}
private void init() {
this.fieldToStores = SootUtil.storesOnField(pag);
this.fieldToLoads = SootUtil.loadsOnField(pag);
this.vMatches = new ValidMatches(pag, fieldToStores);
}
public PointsToSet reachingObjects(Local l) {
if (lazy) {
/*
* create a lazy points-to set; this will not actually compute context information until we ask whether this points-to
* set has a non-empty intersection with another points-to set and this intersection appears to be non-empty; when this
* is the case then the points-to set will call doReachingObjects(..) to refine itself
*/
return new LazyContextSensitivePointsToSet(l, new WrappedPointsToSet((PointsToSetInternal) pag.reachingObjects(l)),
this);
} else {
return doReachingObjects(l);
}
}
public PointsToSet doReachingObjects(Local l) {
// lazy initialization
if (fieldToStores == null) {
init();
}
PointsToSet result;
Map<Local, PointsToSet> cache;
if (refineCallGraph) { // we use different caches for different settings
cache = reachingObjectsCache;
} else {
cache = reachingObjectsCacheNoCGRefinement;
}
result = cache.get(l);
if (result == null) {
result = computeReachingObjects(l);
if (useCache) {
cache.put(l, result);
}
}
assert consistentResult(l, result);
return result;
}
/**
* Returns <code>false</code> if an inconsistent computation occurred, i.e. if result differs from the result computed by
* {@link #computeReachingObjects(Local)} on l.
*/
private boolean consistentResult(Local l, PointsToSet result) {
PointsToSet result2 = computeReachingObjects(l);
if (!(result instanceof EqualsSupportingPointsToSet) || !(result2 instanceof EqualsSupportingPointsToSet)) {
// cannot compare, assume everything is fine
return true;
}
EqualsSupportingPointsToSet eq1 = (EqualsSupportingPointsToSet) result;
EqualsSupportingPointsToSet eq2 = (EqualsSupportingPointsToSet) result2;
return new PointsToSetEqualsWrapper(eq1).equals(new PointsToSetEqualsWrapper(eq2));
}
/**
* Computes the possibly refined set of reaching objects for l.
*/
protected PointsToSet computeReachingObjects(Local l) {
VarNode v = pag.findLocalVarNode(l);
if (v == null) {
// no reaching objects
return EmptyPointsToSet.v();
}
PointsToSet contextSensitiveResult = computeRefinedReachingObjects(v);
if (contextSensitiveResult == null) {
// had to abort; return Spark's points-to set in a wrapper
return new WrappedPointsToSet(v.getP2Set());
} else {
return contextSensitiveResult;
}
}
/**
* Computes the refined set of reaching objects for l. Returns <code>null</code> if refinement failed.
*/
protected PointsToSet computeRefinedReachingObjects(VarNode v) {
// must reset the refinement heuristic for each query
this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristicType, pag.getTypeManager(), getMaxPasses());
doPointsTo = true;
numPasses = 0;
PointsToSet contextSensitiveResult = null;
while (true) {
numPasses++;
if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) {
break;
}
if (numPasses > maxPasses) {
break;
}
if (DEBUG) {
logger.debug("PASS " + numPasses);
logger.debug("" + fieldCheckHeuristic);
}
clearState();
pointsTo = new AllocAndContextSet();
try {
refineP2Set(new VarAndContext(v, EMPTY_CALLSTACK), null);
contextSensitiveResult = pointsTo;
} catch (TerminateEarlyException e) {
logger.debug(e.getMessage(), e);
}
if (!fieldCheckHeuristic.runNewPass()) {
break;
}
}
return contextSensitiveResult;
}
protected boolean callEdgeInSCC(AssignEdge assignEdge) {
boolean sameSCCAlready = false;
assert assignEdge.isCallEdge();
// assert assignEdge.getSrc() instanceof LocalVarNode :
// assignEdge.getSrc() + " not LocalVarNode";
if (!(assignEdge.getSrc() instanceof LocalVarNode) || !(assignEdge.getDst() instanceof LocalVarNode)) {
return false;
}
LocalVarNode src = (LocalVarNode) assignEdge.getSrc();
LocalVarNode dst = (LocalVarNode) assignEdge.getDst();
if (sccManager.inSameSCC(src.getMethod(), dst.getMethod())) {
sameSCCAlready = true;
}
return sameSCCAlready;
}
protected CallingContextSet checkAllocAndContextCache(AllocAndContext allocAndContext, VarNode targetVar) {
if (allocAndContextCache.containsKey(allocAndContext)) {
Map<VarNode, CallingContextSet> m = allocAndContextCache.get(allocAndContext);
if (m.containsKey(targetVar)) {
return m.get(targetVar);
}
} else {
allocAndContextCache.put(allocAndContext, new HashMap<VarNode, CallingContextSet>());
}
return null;
}
protected PointsToSetInternal checkContextsForAllocsCache(VarAndContext varAndContext, AllocAndContextSet ret,
PointsToSetInternal locs) {
PointsToSetInternal retSet = null;
if (contextsForAllocsCache.containsKey(varAndContext)) {
for (AllocAndContext allocAndContext : contextsForAllocsCache.get(varAndContext).getO2()) {
if (locs.contains(allocAndContext.alloc)) {
ret.add(allocAndContext);
}
}
final PointsToSetInternal oldLocs = contextsForAllocsCache.get(varAndContext).getO1();
final PointsToSetInternal tmpSet = HybridPointsToSet.getFactory().newSet(locs.getType(), pag);
locs.forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (!oldLocs.contains(n)) {
tmpSet.add(n);
}
}
});
retSet = tmpSet;
oldLocs.addAll(tmpSet, null);
} else {
PointsToSetInternal storedSet = HybridPointsToSet.getFactory().newSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext,
new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, new AllocAndContextSet()));
retSet = locs;
}
return retSet;
}
/**
* check the computed points-to set of a variable against some predicate
*
* @param v
* the variable
* @param heuristic
* how to refine match edges
* @param p2setPred
* the predicate on the points-to set
* @return true if the p2setPred holds for the computed points-to set, or if a points-to set cannot be computed in the
* budget; false otherwise
*/
protected boolean checkP2Set(VarNode v, HeuristicType heuristic, Predicate<Set<AllocAndContext>> p2setPred) {
doPointsTo = true;
// DEBUG = v.getNumber() == 150;
this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristic, pag.getTypeManager(), getMaxPasses());
numPasses = 0;
while (true) {
numPasses++;
if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) {
return true;
}
if (numPasses > maxPasses) {
return true;
}
if (DEBUG) {
logger.debug("PASS " + numPasses);
logger.debug("" + fieldCheckHeuristic);
}
clearState();
pointsTo = new AllocAndContextSet();
boolean success = false;
try {
success = refineP2Set(new VarAndContext(v, EMPTY_CALLSTACK), null);
} catch (TerminateEarlyException e) {
success = false;
}
if (success) {
if (p2setPred.test(pointsTo)) {
return false;
}
} else {
if (!fieldCheckHeuristic.runNewPass()) {
return true;
}
}
}
}
// protected boolean upContextsSane(CallingContextSet ret, AllocAndContext
// allocAndContext, VarContextAndUp varContextAndUp) {
// for (ImmutableStack<Integer> context : ret) {
// ImmutableStack<Integer> fixedContext = fixUpContext(context,
// allocAndContext, varContextAndUp);
// if (!context.equals(fixedContext)) {
// return false;
// }
// }
// return true;
// }
//
// protected CallingContextSet fixAllUpContexts(CallingContextSet contexts,
// AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
// if (DEBUG) {
// debugPrint("fixing up contexts");
// }
// CallingContextSet ret = new CallingContextSet();
// for (ImmutableStack<Integer> context : contexts) {
// ret.add(fixUpContext(context, allocAndContext, varContextAndUp));
// }
// return ret;
// }
//
// protected ImmutableStack<Integer> fixUpContext(ImmutableStack<Integer>
// context, AllocAndContext allocAndContext, VarContextAndUp
// varContextAndUp) {
//
// return null;
// }
protected CallingContextSet checkUpContextCache(VarContextAndUp varContextAndUp, AllocAndContext allocAndContext) {
if (upContextCache.containsKey(varContextAndUp)) {
Map<AllocAndContext, CallingContextSet> allocAndContextMap = upContextCache.get(varContextAndUp);
if (allocAndContextMap.containsKey(allocAndContext)) {
return allocAndContextMap.get(allocAndContext);
}
} else {
upContextCache.put(varContextAndUp, new HashMap<AllocAndContext, CallingContextSet>());
}
return null;
}
protected void clearState() {
allocAndContextCache.clear();
callGraphStack.clear();
callSiteToResolvedTargets.clear();
queriedCallSites.clear();
contextsForAllocsCache.clear();
contextForAllocsStack.clear();
upContextCache.clear();
callTargetsArgCache.clear();
sccManager = new OTFMethodSCCManager();
numNodesTraversed = 0;
nesting = 0;
recursionDepth = -1;
}
/**
* compute a flows-to set for an allocation site. for now, we use a simple refinement strategy; just refine as much as
* possible, maintaining the smallest set of flows-to vars
*
* @param alloc
* @param heuristic
* @return
*/
protected Set<VarNode> computeFlowsTo(AllocNode alloc, HeuristicType heuristic) {
this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristic, pag.getTypeManager(), getMaxPasses());
numPasses = 0;
Set<VarNode> smallest = null;
while (true) {
numPasses++;
if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) {
return smallest;
}
if (numPasses > maxPasses) {
return smallest;
}
if (DEBUG) {
logger.debug("PASS " + numPasses);
logger.debug("" + fieldCheckHeuristic);
}
clearState();
Set<VarNode> result = null;
try {
result = getFlowsToHelper(new AllocAndContext(alloc, EMPTY_CALLSTACK));
} catch (TerminateEarlyException e) {
logger.debug(e.getMessage(), e);
}
if (result != null) {
if (smallest == null || result.size() < smallest.size()) {
smallest = result;
}
}
if (!fieldCheckHeuristic.runNewPass()) {
return smallest;
}
}
}
protected void debugPrint(String str) {
if (nesting <= DEBUG_NESTING) {
if (DEBUG_PASS == -1 || DEBUG_PASS == numPasses) {
logger.debug(":" + nesting + " " + str);
}
}
}
/*
* (non-Javadoc)
*
* @see AAA.summary.Refiner#dumpPathForBadLoc(soot.jimple.spark.pag.VarNode, soot.jimple.spark.pag.AllocNode)
*/
protected void dumpPathForLoc(VarNode v, final AllocNode badLoc, String filePrefix) {
final HashSet<VarNode> visited = new HashSet<VarNode>();
final DotPointerGraph dotGraph = new DotPointerGraph();
final class Helper {
boolean handle(VarNode curNode) {
assert curNode.getP2Set().contains(badLoc);
visited.add(curNode);
Node[] newEdges = pag.allocInvLookup(curNode);
for (int i = 0; i < newEdges.length; i++) {
AllocNode alloc = (AllocNode) newEdges[i];
if (alloc.equals(badLoc)) {
dotGraph.addNew(alloc, curNode);
return true;
}
}
for (AssignEdge assignEdge : csInfo.getAssignEdges(curNode)) {
VarNode other = assignEdge.getSrc();
if (other.getP2Set().contains(badLoc) && !visited.contains(other) && handle(other)) {
if (assignEdge.isCallEdge()) {
dotGraph.addCall(other, curNode, assignEdge.getCallSite());
} else {
dotGraph.addAssign(other, curNode);
}
return true;
}
}
Node[] loadEdges = pag.loadInvLookup(curNode);
for (int i = 0; i < loadEdges.length; i++) {
FieldRefNode frNode = (FieldRefNode) loadEdges[i];
SparkField field = frNode.getField();
VarNode base = frNode.getBase();
PointsToSetInternal baseP2Set = base.getP2Set();
for (Pair<VarNode, VarNode> store : fieldToStores.get(field)) {
if (store.getO2().getP2Set().hasNonEmptyIntersection(baseP2Set)) {
VarNode matchSrc = store.getO1();
if (matchSrc.getP2Set().contains(badLoc) && !visited.contains(matchSrc) && handle(matchSrc)) {
dotGraph.addMatch(matchSrc, curNode);
return true;
}
}
}
}
return false;
}
}
Helper h = new Helper();
h.handle(v);
// logger.debug(""+dotGraph.numEdges() + " edges on path");
dotGraph.dump("tmp/" + filePrefix + v.getNumber() + "_" + badLoc.getNumber() + ".dot");
}
protected Collection<AssignEdge> filterAssigns(final VarNode v, final ImmutableStack<Integer> callingContext,
boolean forward, boolean refineVirtCalls) {
Set<AssignEdge> assigns = forward ? csInfo.getAssignEdges(v) : csInfo.getAssignBarEdges(v);
Collection<AssignEdge> realAssigns;
boolean exitNode = forward ? SootUtil.isParamNode(v) : SootUtil.isRetNode(v);
final boolean backward = !forward;
if (exitNode && !callingContext.isEmpty()) {
Integer topCallSite = callingContext.peek();
realAssigns = new ArrayList<AssignEdge>();
for (AssignEdge assignEdge : assigns) {
assert (forward && assignEdge.isParamEdge()) || (backward && assignEdge.isReturnEdge()) : assignEdge;
Integer assignEdgeCallSite = assignEdge.getCallSite();
assert csInfo.getCallSiteTargets(assignEdgeCallSite).contains(((LocalVarNode) v).getMethod()) : assignEdge;
if (topCallSite.equals(assignEdgeCallSite) || callEdgeInSCC(assignEdge)) {
realAssigns.add(assignEdge);
}
}
// assert realAssigns.size() == 1;
} else {
if (assigns.size() > 1) {
realAssigns = new ArrayList<AssignEdge>();
for (AssignEdge assignEdge : assigns) {
boolean enteringCall = forward ? assignEdge.isReturnEdge() : assignEdge.isParamEdge();
if (enteringCall) {
Integer callSite = assignEdge.getCallSite();
if (csInfo.isVirtCall(callSite) && refineVirtCalls) {
Set<SootMethod> targets = refineCallSite(assignEdge.getCallSite(), callingContext);
LocalVarNode nodeInTargetMethod
= forward ? (LocalVarNode) assignEdge.getSrc() : (LocalVarNode) assignEdge.getDst();
if (targets.contains(nodeInTargetMethod.getMethod())) {
realAssigns.add(assignEdge);
}
} else {
realAssigns.add(assignEdge);
}
} else {
realAssigns.add(assignEdge);
}
}
} else {
realAssigns = assigns;
}
}
return realAssigns;
}
protected AllocAndContextSet findContextsForAllocs(final VarAndContext varAndContext, PointsToSetInternal locs) {
if (contextForAllocsStack.contains(varAndContext)) {
// recursion; check depth
// we're fine for x = x.next
int oldIndex = contextForAllocsStack.indexOf(varAndContext);
if (oldIndex != contextForAllocsStack.size() - 1) {
if (recursionDepth == -1) {
recursionDepth = oldIndex + 1;
if (DEBUG) {
debugPrint("RECURSION depth = " + recursionDepth);
}
} else if (contextForAllocsStack.size() - oldIndex > 5) {
// just give up
throw new TerminateEarlyException();
}
}
}
contextForAllocsStack.push(varAndContext);
final AllocAndContextSet ret = new AllocAndContextSet();
final PointsToSetInternal realLocs = checkContextsForAllocsCache(varAndContext, ret, locs);
if (realLocs.isEmpty()) {
if (DEBUG) {
debugPrint("cached result " + ret);
}
contextForAllocsStack.pop();
return ret;
}
nesting++;
if (DEBUG) {
debugPrint("finding alloc contexts for " + varAndContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varAndContext);
IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
if (realLocs.contains(allocNode)) {
if (DEBUG) {
debugPrint("found alloc " + allocNode);
}
ret.add(new AllocAndContext(allocNode, origVarAndContext.context));
}
}
@Override
public void handleMatchSrc(final VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase,
VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
if (DEBUG) {
debugPrint("handling src " + matchSrc);
debugPrint("intersection " + intersection);
}
if (!refine) {
p.prop(new VarAndContext(matchSrc, EMPTY_CALLSTACK));
return;
}
AllocAndContextSet allocContexts
= findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
if (DEBUG) {
debugPrint("alloc contexts " + allocContexts);
}
for (AllocAndContext allocAndContext : allocContexts) {
if (DEBUG) {
debugPrint("alloc and context " + allocAndContext);
}
CallingContextSet matchSrcContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
matchSrcContexts
= findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
}
for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
// ret
// .add(new Pair<AllocNode,
// ImmutableStack<Integer>>(
// (AllocNode) n,
// matchSrcContext));
// ret.addAll(findContextsForAllocs(matchSrc,
// matchSrcContext, locs));
p.prop(new VarAndContext(matchSrc, matchSrcContext));
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
p.prop(newVarAndContext);
}
@Override
boolean shouldHandleSrc(VarNode src) {
return realLocs.hasNonEmptyIntersection(src.getP2Set());
}
};
processIncomingEdges(edgeHandler, worklist);
// update the cache
if (recursionDepth != -1) {
// if we're beyond recursion, don't cache anything
if (contextForAllocsStack.size() > recursionDepth) {
if (DEBUG) {
debugPrint("REMOVING " + varAndContext);
debugPrint(contextForAllocsStack.toString());
}
contextsForAllocsCache.remove(varAndContext);
} else {
assert contextForAllocsStack.size() == recursionDepth : recursionDepth + " " + contextForAllocsStack;
recursionDepth = -1;
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = HybridPointsToSet.getFactory().newSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
} else {
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = HybridPointsToSet.getFactory().newSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
nesting--;
return ret;
} catch (CallSiteException e) {
contextsForAllocsCache.remove(varAndContext);
throw e;
} finally {
contextForAllocsStack.pop();
}
}
protected CallingContextSet findUpContextsForVar(AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
final AllocNode alloc = allocAndContext.alloc;
final ImmutableStack<Integer> allocContext = allocAndContext.context;
CallingContextSet tmpSet = checkUpContextCache(varContextAndUp, allocAndContext);
if (tmpSet != null) {
return tmpSet;
}
final CallingContextSet ret = new CallingContextSet();
upContextCache.get(varContextAndUp).put(allocAndContext, ret);
nesting++;
if (DEBUG) {
debugPrint("finding up context for " + varContextAndUp + " to " + alloc + " " + allocContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varContextAndUp);
class UpContextEdgeHandler extends IncomingEdgeHandler {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (allocNode == alloc) {
if (allocContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = allocContext.popAll(contextAndUp.context).pushAll(reverse);
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
} else if (contextAndUp.context.topMatches(allocContext)) {
ImmutableStack<Integer> toAdd = contextAndUp.upContext.reverse();
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
}
}
}
@Override
public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase,
VarAndContext origVarAndContext, SparkField field, boolean refine) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (DEBUG) {
debugPrint("CHECKING " + alloc);
}
PointsToSetInternal tmp = HybridPointsToSet.getFactory().newSet(alloc.getType(), pag);
tmp.add(alloc);
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(matchSrc, EMPTY_CALLSTACK), tmp);
// Set allocContexts = Collections.singleton(new Object());
if (!refine) {
if (!allocContexts.isEmpty()) {
ret.add(contextAndUp.upContext.reverse());
}
} else {
if (!allocContexts.isEmpty()) {
for (AllocAndContext t : allocContexts) {
ImmutableStack<Integer> discoveredAllocContext = t.context;
if (!allocContext.topMatches(discoveredAllocContext)) {
continue;
}
ImmutableStack<Integer> trueAllocContext = allocContext.popAll(discoveredAllocContext);
AllocAndContextSet allocAndContexts
= findContextsForAllocs(new VarAndContext(storeBase, trueAllocContext), intersection);
for (AllocAndContext allocAndContext : allocAndContexts) {
// if (DEBUG)
// logger.debug("alloc context "
// + newAllocContext);
// CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
ret.addAll(findUpContextsForVar(allocAndContext,
new VarContextAndUp(loadBase, contextAndUp.context, contextAndUp.upContext)));
} else {
CallingContextSet tmpContexts = findVarContextsFromAlloc(allocAndContext, loadBase);
// upContexts = new CallingContextSet();
for (ImmutableStack<Integer> tmpContext : tmpContexts) {
if (tmpContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = tmpContext.popAll(contextAndUp.context).pushAll(reverse);
ret.add(toAdd);
}
}
}
}
}
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
ImmutableStack<Integer> upContext = contextAndUp.upContext;
ImmutableStack<Integer> newUpContext = upContext;
if (assignEdge.isParamEdge() && contextAndUp.context.isEmpty()) {
if (upContext.size() < ImmutableStack.getMaxSize()) {
newUpContext = pushWithRecursionCheck(upContext, assignEdge);
}
;
}
p.prop(new VarContextAndUp(newVarAndContext.var, newVarAndContext.context, newUpContext));
}
@Override
boolean shouldHandleSrc(VarNode src) {
if (src instanceof GlobalVarNode) {
// TODO properly handle case of global here; rare
// but possible
// reachedGlobal = true;
// // for now, just give up
throw new TerminateEarlyException();
}
return src.getP2Set().contains(alloc);
}
}
;
UpContextEdgeHandler edgeHandler = new UpContextEdgeHandler();
processIncomingEdges(edgeHandler, worklist);
nesting--;
// if (edgeHandler.reachedGlobal) {
// return fixAllUpContexts(ret, allocAndContext, varContextAndUp);
// } else {
// assert upContextsSane(ret, allocAndContext, varContextAndUp);
// return ret;
// }
return ret;
} catch (CallSiteException e) {
upContextCache.remove(varContextAndUp);
throw e;
}
}
protected CallingContextSet findVarContextsFromAlloc(AllocAndContext allocAndContext, VarNode targetVar) {
CallingContextSet tmpSet = checkAllocAndContextCache(allocAndContext, targetVar);
if (tmpSet != null) {
return tmpSet;
}
CallingContextSet ret = new CallingContextSet();
allocAndContextCache.get(allocAndContext).put(targetVar, ret);
try {
HashSet<VarAndContext> marked = new HashSet<VarAndContext>();
Stack<VarAndContext> worklist = new Stack<VarAndContext>();
Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
AllocNode alloc = allocAndContext.alloc;
ImmutableStack<Integer> allocContext = allocAndContext.context;
Node[] newBarNodes = pag.allocLookup(alloc);
for (int i = 0; i < newBarNodes.length; i++) {
VarNode v = (VarNode) newBarNodes[i];
p.prop(new VarAndContext(v, allocContext));
}
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext curVarAndContext = worklist.pop();
if (DEBUG) {
debugPrint("looking at " + curVarAndContext);
}
VarNode curVar = curVarAndContext.var;
ImmutableStack<Integer> curContext = curVarAndContext.context;
if (curVar == targetVar) {
ret.add(curContext);
}
// assign
Collection<AssignEdge> assignEdges = filterAssigns(curVar, curContext, false, true);
for (AssignEdge assignEdge : assignEdges) {
VarNode dst = assignEdge.getDst();
ImmutableStack<Integer> newContext = curContext;
if (assignEdge.isReturnEdge()) {
if (!curContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(curContext.peek()) : assignEdge + " " + curContext;
newContext = curContext.pop();
} else {
newContext = popRecursiveCallSites(curContext);
}
}
} else if (assignEdge.isParamEdge()) {
if (DEBUG) {
debugPrint("entering call site " + assignEdge.getCallSite());
}
// if (!isRecursive(curContext, assignEdge)) {
// newContext = curContext.push(assignEdge
// .getCallSite());
// }
newContext = pushWithRecursionCheck(curContext, assignEdge);
}
if (assignEdge.isReturnEdge() && curContext.isEmpty() && csInfo.isVirtCall(assignEdge.getCallSite())) {
Set<SootMethod> targets = refineCallSite(assignEdge.getCallSite(), newContext);
if (!targets.contains(((LocalVarNode) assignEdge.getDst()).getMethod())) {
continue;
}
}
if (dst instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
p.prop(new VarAndContext(dst, newContext));
}
// putfield_bars
Set<VarNode> matchTargets = vMatches.vMatchLookup(curVar);
Node[] pfTargets = pag.storeLookup(curVar);
for (int i = 0; i < pfTargets.length; i++) {
FieldRefNode frNode = (FieldRefNode) pfTargets[i];
final VarNode storeBase = frNode.getBase();
SparkField field = frNode.getField();
// Pair<VarNode, FieldRefNode> putfield = new Pair<VarNode,
// FieldRefNode>(curVar, frNode);
for (Pair<VarNode, VarNode> load : fieldToLoads.get(field)) {
final VarNode loadBase = load.getO2();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final VarNode matchTgt = load.getO1();
if (matchTargets.contains(matchTgt)) {
if (DEBUG) {
debugPrint("match source " + matchTgt);
}
PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
boolean checkField = fieldCheckHeuristic.validateMatchesForField(field);
if (checkField) {
AllocAndContextSet sharedAllocContexts
= findContextsForAllocs(new VarAndContext(storeBase, curContext), intersection);
for (AllocAndContext curAllocAndContext : sharedAllocContexts) {
CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
upContexts = findUpContextsForVar(curAllocAndContext,
new VarContextAndUp(loadBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
upContexts = findVarContextsFromAlloc(curAllocAndContext, loadBase);
}
for (ImmutableStack<Integer> upContext : upContexts) {
p.prop(new VarAndContext(matchTgt, upContext));
}
}
} else {
p.prop(new VarAndContext(matchTgt, EMPTY_CALLSTACK));
}
// h.handleMatchSrc(matchSrc, intersection,
// storeBase,
// loadBase, varAndContext, checkGetfield);
// if (h.terminate())
// return;
}
}
}
}
return ret;
} catch (CallSiteException e) {
allocAndContextCache.remove(allocAndContext);
throw e;
}
}
@SuppressWarnings("unchecked")
protected Set<SootMethod> getCallTargets(PointsToSetInternal p2Set, SootMethod callee, Type receiverType,
Set<SootMethod> possibleTargets) {
List<Object> args = Arrays.asList(p2Set, callee, receiverType, possibleTargets);
if (callTargetsArgCache.containsKey(args)) {
return callTargetsArgCache.get(args);
}
Set<Type> types = p2Set.possibleTypes();
Set<SootMethod> ret = new HashSet<SootMethod>();
for (Type type : types) {
ret.addAll(getCallTargetsForType(type, callee, receiverType, possibleTargets));
}
callTargetsArgCache.put(args, ret);
return ret;
}
protected Set<SootMethod> getCallTargetsForType(Type type, SootMethod callee, Type receiverType,
Set<SootMethod> possibleTargets) {
if (!pag.getTypeManager().castNeverFails(type, receiverType)) {
return Collections.<SootMethod>emptySet();
}
if (type instanceof AnySubType) {
AnySubType any = (AnySubType) type;
RefType refType = any.getBase();
if (pag.getTypeManager().getFastHierarchy().canStoreType(receiverType, refType)
|| pag.getTypeManager().getFastHierarchy().canStoreType(refType, receiverType)) {
return possibleTargets;
} else {
return Collections.<SootMethod>emptySet();
}
}
if (type instanceof ArrayType) {
// we'll invoke the java.lang.Object method in this
// case
// Assert.chk(varNodeType.toString().equals("java.lang.Object"));
type = Scene.v().getSootClass("java.lang.Object").getType();
}
RefType refType = (RefType) type;
SootMethod targetMethod = null;
targetMethod = VirtualCalls.v().resolveNonSpecial(refType, callee.makeRef());
return Collections.<SootMethod>singleton(targetMethod);
}
protected Set<VarNode> getFlowsToHelper(AllocAndContext allocAndContext) {
Set<VarNode> ret = new ArraySet<VarNode>();
try {
HashSet<VarAndContext> marked = new HashSet<VarAndContext>();
Stack<VarAndContext> worklist = new Stack<VarAndContext>();
Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
AllocNode alloc = allocAndContext.alloc;
ImmutableStack<Integer> allocContext = allocAndContext.context;
Node[] newBarNodes = pag.allocLookup(alloc);
for (int i = 0; i < newBarNodes.length; i++) {
VarNode v = (VarNode) newBarNodes[i];
ret.add(v);
p.prop(new VarAndContext(v, allocContext));
}
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext curVarAndContext = worklist.pop();
if (DEBUG) {
debugPrint("looking at " + curVarAndContext);
}
VarNode curVar = curVarAndContext.var;
ImmutableStack<Integer> curContext = curVarAndContext.context;
ret.add(curVar);
// assign
Collection<AssignEdge> assignEdges = filterAssigns(curVar, curContext, false, true);
for (AssignEdge assignEdge : assignEdges) {
VarNode dst = assignEdge.getDst();
ImmutableStack<Integer> newContext = curContext;
if (assignEdge.isReturnEdge()) {
if (!curContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(curContext.peek()) : assignEdge + " " + curContext;
newContext = curContext.pop();
} else {
newContext = popRecursiveCallSites(curContext);
}
}
} else if (assignEdge.isParamEdge()) {
if (DEBUG) {
debugPrint("entering call site " + assignEdge.getCallSite());
}
// if (!isRecursive(curContext, assignEdge)) {
// newContext = curContext.push(assignEdge
// .getCallSite());
// }
newContext = pushWithRecursionCheck(curContext, assignEdge);
}
if (assignEdge.isReturnEdge() && curContext.isEmpty() && csInfo.isVirtCall(assignEdge.getCallSite())) {
Set<SootMethod> targets = refineCallSite(assignEdge.getCallSite(), newContext);
if (!targets.contains(((LocalVarNode) assignEdge.getDst()).getMethod())) {
continue;
}
}
if (dst instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
p.prop(new VarAndContext(dst, newContext));
}
// putfield_bars
Set<VarNode> matchTargets = vMatches.vMatchLookup(curVar);
Node[] pfTargets = pag.storeLookup(curVar);
for (int i = 0; i < pfTargets.length; i++) {
FieldRefNode frNode = (FieldRefNode) pfTargets[i];
final VarNode storeBase = frNode.getBase();
SparkField field = frNode.getField();
// Pair<VarNode, FieldRefNode> putfield = new Pair<VarNode,
// FieldRefNode>(curVar, frNode);
for (Pair<VarNode, VarNode> load : fieldToLoads.get(field)) {
final VarNode loadBase = load.getO2();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final VarNode matchTgt = load.getO1();
if (matchTargets.contains(matchTgt)) {
if (DEBUG) {
debugPrint("match source " + matchTgt);
}
PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
boolean checkField = fieldCheckHeuristic.validateMatchesForField(field);
if (checkField) {
AllocAndContextSet sharedAllocContexts
= findContextsForAllocs(new VarAndContext(storeBase, curContext), intersection);
for (AllocAndContext curAllocAndContext : sharedAllocContexts) {
CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
upContexts = findUpContextsForVar(curAllocAndContext,
new VarContextAndUp(loadBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
upContexts = findVarContextsFromAlloc(curAllocAndContext, loadBase);
}
for (ImmutableStack<Integer> upContext : upContexts) {
p.prop(new VarAndContext(matchTgt, upContext));
}
}
} else {
p.prop(new VarAndContext(matchTgt, EMPTY_CALLSTACK));
}
// h.handleMatchSrc(matchSrc, intersection,
// storeBase,
// loadBase, varAndContext, checkGetfield);
// if (h.terminate())
// return;
}
}
}
}
return ret;
} catch (CallSiteException e) {
allocAndContextCache.remove(allocAndContext);
throw e;
}
}
protected int getMaxPasses() {
return maxPasses;
}
protected void incrementNodesTraversed() {
numNodesTraversed++;
if (numNodesTraversed > maxNodesPerPass) {
throw new TerminateEarlyException();
}
}
@SuppressWarnings("unused")
protected boolean isRecursive(ImmutableStack<Integer> context, AssignEdge assignEdge) {
boolean sameSCCAlready = callEdgeInSCC(assignEdge);
if (sameSCCAlready) {
return true;
}
Integer callSite = assignEdge.getCallSite();
if (context.contains(callSite)) {
Set<SootMethod> toBeCollapsed = new ArraySet<SootMethod>();
int callSiteInd = 0;
for (; callSiteInd < context.size() && !context.get(callSiteInd).equals(callSite); callSiteInd++) {
;
}
for (; callSiteInd < context.size(); callSiteInd++) {
toBeCollapsed.add(csInfo.getInvokingMethod(context.get(callSiteInd)));
}
sccManager.makeSameSCC(toBeCollapsed);
return true;
}
return false;
}
protected boolean isRecursiveCallSite(Integer callSite) {
SootMethod invokingMethod = csInfo.getInvokingMethod(callSite);
SootMethod invokedMethod = csInfo.getInvokedMethod(callSite);
return sccManager.inSameSCC(invokingMethod, invokedMethod);
}
@SuppressWarnings("unused")
protected Set<VarNode> nodesPropagatedThrough(final VarNode source, final PointsToSetInternal allocs) {
final Set<VarNode> marked = new HashSet<VarNode>();
final Stack<VarNode> worklist = new Stack<VarNode>();
Propagator<VarNode> p = new Propagator<VarNode>(marked, worklist);
p.prop(source);
while (!worklist.isEmpty()) {
VarNode curNode = worklist.pop();
Node[] assignSources = pag.simpleInvLookup(curNode);
for (int i = 0; i < assignSources.length; i++) {
VarNode assignSrc = (VarNode) assignSources[i];
if (assignSrc.getP2Set().hasNonEmptyIntersection(allocs)) {
p.prop(assignSrc);
}
}
Set<VarNode> matchSources = vMatches.vMatchInvLookup(curNode);
for (VarNode matchSrc : matchSources) {
if (matchSrc.getP2Set().hasNonEmptyIntersection(allocs)) {
p.prop(matchSrc);
}
}
}
return marked;
}
protected ImmutableStack<Integer> popRecursiveCallSites(ImmutableStack<Integer> context) {
ImmutableStack<Integer> ret = context;
while (!ret.isEmpty() && isRecursiveCallSite(ret.peek())) {
ret = ret.pop();
}
return ret;
}
protected void processIncomingEdges(IncomingEdgeHandler h, Stack<VarAndContext> worklist) {
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext varAndContext = worklist.pop();
if (DEBUG) {
debugPrint("looking at " + varAndContext);
}
VarNode v = varAndContext.var;
ImmutableStack<Integer> callingContext = varAndContext.context;
Node[] newEdges = pag.allocInvLookup(v);
for (int i = 0; i < newEdges.length; i++) {
AllocNode allocNode = (AllocNode) newEdges[i];
h.handleAlloc(allocNode, varAndContext);
if (h.terminate()) {
return;
}
}
Collection<AssignEdge> assigns = filterAssigns(v, callingContext, true, true);
for (AssignEdge assignEdge : assigns) {
VarNode src = assignEdge.getSrc();
// if (DEBUG) {
// logger.debug("assign src " + src);
// }
if (h.shouldHandleSrc(src)) {
ImmutableStack<Integer> newContext = callingContext;
if (assignEdge.isParamEdge()) {
if (!callingContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(callingContext.peek()) : assignEdge + " " + callingContext;
newContext = callingContext.pop();
} else {
newContext = popRecursiveCallSites(callingContext);
}
}
// } else if (refiningCallSite) {
// if (!fieldCheckHeuristic.aggressiveVirtCallRefine())
// {
// // throw new CallSiteException();
// }
// }
} else if (assignEdge.isReturnEdge()) {
if (DEBUG) {
debugPrint("entering call site " + assignEdge.getCallSite());
}
// if (!isRecursive(callingContext, assignEdge)) {
// newContext = callingContext.push(assignEdge
// .getCallSite());
// }
newContext = pushWithRecursionCheck(callingContext, assignEdge);
}
if (assignEdge.isParamEdge()) {
Integer callSite = assignEdge.getCallSite();
if (csInfo.isVirtCall(callSite) && !weirdCall(callSite)) {
Set<SootMethod> targets = refineCallSite(callSite, newContext);
if (DEBUG) {
debugPrint(targets.toString());
}
SootMethod targetMethod = ((LocalVarNode) assignEdge.getDst()).getMethod();
if (!targets.contains(targetMethod)) {
if (DEBUG) {
debugPrint("skipping call because of call graph");
}
continue;
}
}
}
if (src instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
h.handleAssignSrc(new VarAndContext(src, newContext), varAndContext, assignEdge);
if (h.terminate()) {
return;
}
}
}
Set<VarNode> matchSources = vMatches.vMatchInvLookup(v);
Node[] loads = pag.loadInvLookup(v);
for (int i = 0; i < loads.length; i++) {
FieldRefNode frNode = (FieldRefNode) loads[i];
final VarNode loadBase = frNode.getBase();
SparkField field = frNode.getField();
// Pair<VarNode, FieldRefNode> getfield = new Pair<VarNode,
// FieldRefNode>(v, frNode);
for (Pair<VarNode, VarNode> store : fieldToStores.get(field)) {
final VarNode storeBase = store.getO2();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final VarNode matchSrc = store.getO1();
if (matchSources.contains(matchSrc)) {
if (h.shouldHandleSrc(matchSrc)) {
if (DEBUG) {
debugPrint("match source " + matchSrc);
}
PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
boolean checkGetfield = fieldCheckHeuristic.validateMatchesForField(field);
h.handleMatchSrc(matchSrc, intersection, loadBase, storeBase, varAndContext, field, checkGetfield);
if (h.terminate()) {
return;
}
}
}
}
}
}
}
protected ImmutableStack<Integer> pushWithRecursionCheck(ImmutableStack<Integer> context, AssignEdge assignEdge) {
boolean foundRecursion = callEdgeInSCC(assignEdge);
if (!foundRecursion) {
Integer callSite = assignEdge.getCallSite();
if (context.contains(callSite)) {
foundRecursion = true;
if (DEBUG) {
debugPrint("RECURSION!!!");
}
// TODO properly collapse recursive methods
if (true) {
throw new TerminateEarlyException();
}
Set<SootMethod> toBeCollapsed = new ArraySet<SootMethod>();
int callSiteInd = 0;
for (; callSiteInd < context.size() && !context.get(callSiteInd).equals(callSite); callSiteInd++) {
;
}
// int numToPop = 0;
for (; callSiteInd < context.size(); callSiteInd++) {
toBeCollapsed.add(csInfo.getInvokingMethod(context.get(callSiteInd)));
// numToPop++;
}
sccManager.makeSameSCC(toBeCollapsed);
// ImmutableStack<Integer> poppedContext = context;
// for (int i = 0; i < numToPop; i++) {
// poppedContext = poppedContext.pop();
// }
// if (DEBUG) {
// debugPrint("new stack " + poppedContext);
// }
// return poppedContext;
}
}
if (foundRecursion) {
ImmutableStack<Integer> popped = popRecursiveCallSites(context);
if (DEBUG) {
debugPrint("popped stack " + popped);
}
return popped;
} else {
return context.push(assignEdge.getCallSite());
}
}
protected boolean refineAlias(VarNode v1, VarNode v2, PointsToSetInternal intersection, HeuristicType heuristic) {
if (refineAliasInternal(v1, v2, intersection, heuristic)) {
return true;
}
if (refineAliasInternal(v2, v1, intersection, heuristic)) {
return true;
}
return false;
}
protected boolean refineAliasInternal(VarNode v1, VarNode v2, PointsToSetInternal intersection, HeuristicType heuristic) {
this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristic, pag.getTypeManager(), getMaxPasses());
numPasses = 0;
while (true) {
numPasses++;
if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) {
return false;
}
if (numPasses > maxPasses) {
return false;
}
if (DEBUG) {
logger.debug("PASS " + numPasses);
logger.debug("" + fieldCheckHeuristic);
}
clearState();
boolean success = false;
try {
AllocAndContextSet allocAndContexts = findContextsForAllocs(new VarAndContext(v1, EMPTY_CALLSTACK), intersection);
boolean emptyIntersection = true;
for (AllocAndContext allocAndContext : allocAndContexts) {
CallingContextSet upContexts
= findUpContextsForVar(allocAndContext, new VarContextAndUp(v2, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
if (!upContexts.isEmpty()) {
emptyIntersection = false;
break;
}
}
success = emptyIntersection;
} catch (TerminateEarlyException e) {
success = false;
}
if (success) {
logger.debug("took " + numPasses + " passes");
return true;
} else {
if (!fieldCheckHeuristic.runNewPass()) {
return false;
}
}
}
}
protected Set<SootMethod> refineCallSite(Integer callSite, ImmutableStack<Integer> origContext) {
CallSiteAndContext callSiteAndContext = new CallSiteAndContext(callSite, origContext);
if (queriedCallSites.contains(callSiteAndContext)) {
// if (DEBUG_VIRT) {
// final SootMethod invokedMethod =
// csInfo.getInvokedMethod(callSite);
// final VarNode receiver =
// csInfo.getReceiverForVirtCallSite(callSite);
// debugPrint("call of " + invokedMethod + " on " + receiver + " "
// + origContext + " goes to "
// + callSiteToResolvedTargets.get(callSiteAndContext));
// }
return callSiteToResolvedTargets.get(callSiteAndContext);
}
if (callGraphStack.contains(callSiteAndContext)) {
return Collections.<SootMethod>emptySet();
} else {
callGraphStack.push(callSiteAndContext);
}
final VarNode receiver = csInfo.getReceiverForVirtCallSite(callSite);
final Type receiverType = receiver.getType();
final SootMethod invokedMethod = csInfo.getInvokedMethod(callSite);
final Set<SootMethod> allTargets = csInfo.getCallSiteTargets(callSite);
if (!refineCallGraph) {
callGraphStack.pop();
return allTargets;
}
if (DEBUG_VIRT) {
debugPrint("refining call to " + invokedMethod + " on " + receiver + " " + origContext);
}
final HashSet<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final class Helper {
void prop(VarAndContext varAndContext) {
if (marked.add(varAndContext)) {
worklist.push(varAndContext);
}
}
}
;
final Helper h = new Helper();
h.prop(new VarAndContext(receiver, origContext));
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext curVarAndContext = worklist.pop();
if (DEBUG_VIRT) {
debugPrint("virt looking at " + curVarAndContext);
}
VarNode curVar = curVarAndContext.var;
ImmutableStack<Integer> curContext = curVarAndContext.context;
// Set<SootMethod> curVarTargets = getCallTargets(curVar.getP2Set(),
// methodSig, receiverType, allTargets);
// if (curVarTargets.size() <= 1) {
// for (SootMethod method : curVarTargets) {
// callSiteToResolvedTargets.put(callSiteAndContext, method);
// }
// continue;
// }
Node[] newNodes = pag.allocInvLookup(curVar);
for (int i = 0; i < newNodes.length; i++) {
AllocNode allocNode = (AllocNode) newNodes[i];
for (SootMethod method : getCallTargetsForType(allocNode.getType(), invokedMethod, receiverType, allTargets)) {
callSiteToResolvedTargets.put(callSiteAndContext, method);
}
}
Collection<AssignEdge> assigns = filterAssigns(curVar, curContext, true, true);
for (AssignEdge assignEdge : assigns) {
VarNode src = assignEdge.getSrc();
ImmutableStack<Integer> newContext = curContext;
if (assignEdge.isParamEdge()) {
if (!curContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(curContext.peek());
newContext = curContext.pop();
} else {
newContext = popRecursiveCallSites(curContext);
}
} else {
callSiteToResolvedTargets.putAll(callSiteAndContext, allTargets);
// if (DEBUG) {
// debugPrint("giving up on virt");
// }
continue;
}
} else if (assignEdge.isReturnEdge()) {
// if (DEBUG)
// logger.debug("entering call site "
// + assignEdge.getCallSite());
// if (!isRecursive(curContext, assignEdge)) {
// newContext = curContext.push(assignEdge.getCallSite());
// }
newContext = pushWithRecursionCheck(curContext, assignEdge);
} else if (src instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
h.prop(new VarAndContext(src, newContext));
}
// TODO respect heuristic
Set<VarNode> matchSources = vMatches.vMatchInvLookup(curVar);
final boolean oneMatch = matchSources.size() == 1;
Node[] loads = pag.loadInvLookup(curVar);
for (int i = 0; i < loads.length; i++) {
FieldRefNode frNode = (FieldRefNode) loads[i];
final VarNode loadBase = frNode.getBase();
SparkField field = frNode.getField();
for (Pair<VarNode, VarNode> store : fieldToStores.get(field)) {
final VarNode storeBase = store.getO2();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final VarNode matchSrc = store.getO1();
if (matchSources.contains(matchSrc)) {
// optimize for common case of constructor init
boolean skipMatch = false;
if (oneMatch) {
PointsToSetInternal matchSrcPTo = matchSrc.getP2Set();
Set<SootMethod> matchSrcCallTargets = getCallTargets(matchSrcPTo, invokedMethod, receiverType, allTargets);
if (matchSrcCallTargets.size() <= 1) {
skipMatch = true;
for (SootMethod method : matchSrcCallTargets) {
callSiteToResolvedTargets.put(callSiteAndContext, method);
}
}
}
if (!skipMatch) {
final PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
AllocAndContextSet allocContexts = null;
boolean oldRefining = refiningCallSite;
int oldNesting = nesting;
try {
refiningCallSite = true;
allocContexts = findContextsForAllocs(new VarAndContext(loadBase, curContext), intersection);
} catch (CallSiteException e) {
callSiteToResolvedTargets.putAll(callSiteAndContext, allTargets);
continue;
} finally {
refiningCallSite = oldRefining;
nesting = oldNesting;
}
for (AllocAndContext allocAndContext : allocContexts) {
CallingContextSet matchSrcContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
matchSrcContexts = findUpContextsForVar(allocAndContext,
new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
}
for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
VarAndContext newVarAndContext = new VarAndContext(matchSrc, matchSrcContext);
h.prop(newVarAndContext);
}
}
}
}
}
}
}
if (DEBUG_VIRT) {
debugPrint("call of " + invokedMethod + " on " + receiver + " " + origContext + " goes to "
+ callSiteToResolvedTargets.get(callSiteAndContext));
}
callGraphStack.pop();
queriedCallSites.add(callSiteAndContext);
return callSiteToResolvedTargets.get(callSiteAndContext);
}
protected boolean refineP2Set(VarAndContext varAndContext, final PointsToSetInternal badLocs) {
nesting++;
if (DEBUG) {
debugPrint("refining " + varAndContext);
}
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varAndContext);
IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {
boolean success = true;
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
if (doPointsTo && pointsTo != null) {
pointsTo.add(new AllocAndContext(allocNode, origVarAndContext.context));
} else {
if (badLocs.contains(allocNode)) {
success = false;
}
}
}
@Override
public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase,
VarAndContext origVarAndContext, SparkField field, boolean refine) {
AllocAndContextSet allocContexts
= findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
for (AllocAndContext allocAndContext : allocContexts) {
if (DEBUG) {
debugPrint("alloc and context " + allocAndContext);
}
CallingContextSet matchSrcContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
matchSrcContexts
= findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
}
for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
if (DEBUG) {
debugPrint("match source context " + matchSrcContext);
}
VarAndContext newVarAndContext = new VarAndContext(matchSrc, matchSrcContext);
p.prop(newVarAndContext);
}
}
}
Object getResult() {
return Boolean.valueOf(success);
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
p.prop(newVarAndContext);
}
@Override
boolean shouldHandleSrc(VarNode src) {
if (doPointsTo) {
return true;
} else {
return src.getP2Set().hasNonEmptyIntersection(badLocs);
}
}
boolean terminate() {
return !success;
}
};
processIncomingEdges(edgeHandler, worklist);
nesting--;
return (Boolean) edgeHandler.getResult();
}
/*
* (non-Javadoc)
*
* @see AAA.summary.Refiner#refineP2Set(soot.jimple.spark.pag.VarNode, soot.jimple.spark.sets.PointsToSetInternal)
*/
protected boolean refineP2Set(VarNode v, PointsToSetInternal badLocs, HeuristicType heuristic) {
// logger.debug(""+badLocs);
this.doPointsTo = false;
this.fieldCheckHeuristic = HeuristicType.getHeuristic(heuristic, pag.getTypeManager(), getMaxPasses());
try {
numPasses = 0;
while (true) {
numPasses++;
if (DEBUG_PASS != -1 && numPasses > DEBUG_PASS) {
return false;
}
if (numPasses > maxPasses) {
return false;
}
if (DEBUG) {
logger.debug("PASS " + numPasses);
logger.debug("" + fieldCheckHeuristic);
}
clearState();
boolean success = false;
try {
success = refineP2Set(new VarAndContext(v, EMPTY_CALLSTACK), badLocs);
} catch (TerminateEarlyException e) {
success = false;
}
if (success) {
return true;
} else {
if (!fieldCheckHeuristic.runNewPass()) {
return false;
}
}
}
} finally {
}
}
protected boolean weirdCall(Integer callSite) {
SootMethod invokedMethod = csInfo.getInvokedMethod(callSite);
return SootUtil.isThreadStartMethod(invokedMethod) || SootUtil.isNewInstanceMethod(invokedMethod);
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjects(Context c, Local l) {
throw new UnsupportedOperationException();
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjects(Context c, Local l, SootField f) {
throw new UnsupportedOperationException();
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjects(Local l, SootField f) {
throw new UnsupportedOperationException();
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjects(PointsToSet s, SootField f) {
throw new UnsupportedOperationException();
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjects(SootField f) {
throw new UnsupportedOperationException();
}
/**
* Currently not implemented.
*
* @throws UnsupportedOperationException
* always
*/
public PointsToSet reachingObjectsOfArrayElement(PointsToSet s) {
throw new UnsupportedOperationException();
}
/**
* @return returns the (SPARK) pointer assignment graph
*/
public PAG getPAG() {
return pag;
}
/**
* @return <code>true</code> is caching is enabled
*/
public boolean usesCache() {
return useCache;
}
/**
* enables caching
*/
public void enableCache() {
useCache = true;
}
/**
* disables caching
*/
public void disableCache() {
useCache = false;
}
/**
* clears the cache
*/
public void clearCache() {
reachingObjectsCache.clear();
reachingObjectsCacheNoCGRefinement.clear();
}
public boolean isRefineCallGraph() {
return refineCallGraph;
}
public void setRefineCallGraph(boolean refineCallGraph) {
this.refineCallGraph = refineCallGraph;
}
public HeuristicType getHeuristicType() {
return heuristicType;
}
public void setHeuristicType(HeuristicType heuristicType) {
this.heuristicType = heuristicType;
clearCache();
}
}
| 73,591
| 36.055388
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/DotPointerGraph.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.jimple.spark.ondemand.genericutil.Predicate;
import soot.jimple.spark.pag.AllocNode;
import soot.jimple.spark.pag.FieldRefNode;
import soot.jimple.spark.pag.Node;
import soot.jimple.spark.pag.PagToDotDumper;
import soot.jimple.spark.pag.VarNode;
/**
* you can just add edges and then dump them as a dot graph
*
* @author Manu Sridharan
*
*/
public class DotPointerGraph {
private static final Logger logger = LoggerFactory.getLogger(DotPointerGraph.class);
private final Set<String> edges = new HashSet<String>();
private final Set<Node> nodes = new HashSet<Node>();
public void addAssign(VarNode from, VarNode to) {
addEdge(to, from, "", "black");
}
private void addEdge(Node from, Node to, String edgeLabel, String color) {
nodes.add(from);
nodes.add(to);
addEdge(PagToDotDumper.makeNodeName(from), PagToDotDumper.makeNodeName(to), edgeLabel, color);
}
private void addEdge(String from, String to, String edgeLabel, String color) {
StringBuffer tmp = new StringBuffer();
tmp.append(" ");
tmp.append(from);
tmp.append(" -> ");
tmp.append(to);
tmp.append(" [label=\"");
tmp.append(edgeLabel);
tmp.append("\", color=");
tmp.append(color);
tmp.append("];");
edges.add(tmp.toString());
}
public void addNew(AllocNode from, VarNode to) {
addEdge(to, from, "", "yellow");
}
public void addCall(VarNode from, VarNode to, Integer callSite) {
addEdge(to, from, callSite.toString(), "blue");
}
public void addMatch(VarNode from, VarNode to) {
addEdge(to, from, "", "brown");
}
public void addLoad(FieldRefNode from, VarNode to) {
addEdge(to, from.getBase(), from.getField().toString(), "green");
}
public void addStore(VarNode from, FieldRefNode to) {
addEdge(to.getBase(), from, to.getField().toString(), "red");
}
public int numEdges() {
return edges.size();
}
public void dump(String filename) {
PrintWriter pw = null;
try {
pw = new PrintWriter(new FileOutputStream(filename));
} catch (FileNotFoundException e) {
logger.error(e.getMessage(), e);
}
// pw.println("digraph G {\n\trankdir=LR;");
pw.println("digraph G {");
Predicate<Node> falsePred = new Predicate<Node>() {
@Override
public boolean test(Node obj_) {
return false;
}
};
for (Node node : nodes) {
pw.println(PagToDotDumper.makeDotNodeLabel(node, falsePred));
}
for (String edge : edges) {
pw.println(edge);
}
pw.println("}");
pw.close();
}
}
| 3,617
| 26.830769
| 98
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/EverythingHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.SparkField;
public class EverythingHeuristic implements FieldCheckHeuristic {
public boolean runNewPass() {
return false;
}
public boolean validateMatchesForField(SparkField field) {
return true;
}
public boolean validFromBothEnds(SparkField field) {
return false;
}
}
| 1,163
| 26.714286
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/FieldCheckHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.SparkField;
public interface FieldCheckHeuristic {
/**
* Update the heuristic for another pass of the algorithm.
*
* @return true if the heuristic will act differently on the next pass
*/
public boolean runNewPass();
public boolean validateMatchesForField(SparkField field);
public boolean validFromBothEnds(SparkField field);
}
| 1,219
| 29.5
| 72
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/HeuristicType.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.internal.TypeManager;
public enum HeuristicType {
MANUAL, INCR, EVERY, MANUALINCR, NOTHING;
public static FieldCheckHeuristic getHeuristic(HeuristicType type, TypeManager tm, int maxPasses) {
FieldCheckHeuristic ret = null;
switch (type) {
case MANUAL:
ret = new ManualFieldCheckHeuristic();
break;
case INCR:
ret = new InnerTypesIncrementalHeuristic(tm, maxPasses);
break;
case EVERY:
ret = new EverythingHeuristic();
break;
case MANUALINCR:
ret = new ManualAndInnerHeuristic(tm, maxPasses);
break;
case NOTHING:
ret = new NothingHeuristic();
break;
default:
break;
}
return ret;
}
}
| 1,596
| 27.517857
| 101
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/IncrementalTypesHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import soot.RefType;
import soot.SootField;
import soot.jimple.spark.internal.TypeManager;
import soot.jimple.spark.ondemand.genericutil.Util;
import soot.jimple.spark.ondemand.pautil.SootUtil;
import soot.jimple.spark.ondemand.pautil.SootUtil.CallSiteAndContext;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.SparkField;
public class IncrementalTypesHeuristic implements FieldCheckHeuristic {
private final TypeManager manager;
private static final boolean EXCLUDE_TYPES = false;
private static final String[] EXCLUDED_NAMES = new String[] { "ca.mcgill.sable.soot.SootMethod" };
private Set<RefType> typesToCheck = new HashSet<RefType>();
private Set<RefType> notBothEndsTypes = new HashSet<RefType>();
private RefType newTypeOnQuery = null;
/*
* (non-Javadoc)
*
* @see AAA.algs.Heuristic#newQuery()
*/
public boolean runNewPass() {
// if (!aggressive && reachedAggressive) {
// aggressive = true;
// return true;
// }
if (newTypeOnQuery != null) {
boolean added = typesToCheck.add(newTypeOnQuery);
if (SootUtil.hasRecursiveField(newTypeOnQuery.getSootClass())) {
notBothEndsTypes.add(newTypeOnQuery);
}
newTypeOnQuery = null;
return added;
}
return false;
}
/*
* (non-Javadoc)
*
* @see AAA.algs.Heuristic#validateMatchesForField(soot.jimple.spark.pag.SparkField)
*/
public boolean validateMatchesForField(SparkField field) {
// if (true) return true;
if (field instanceof ArrayElement) {
return true;
}
SootField sootField = (SootField) field;
RefType declaringType = sootField.getDeclaringClass().getType();
if (EXCLUDE_TYPES) {
for (String typeName : EXCLUDED_NAMES) {
if (Util.stringContains(declaringType.toString(), typeName)) {
return false;
}
}
}
for (RefType typeToCheck : typesToCheck) {
if (manager.castNeverFails(declaringType, typeToCheck)) {
return true;
}
}
if (newTypeOnQuery == null) {
newTypeOnQuery = declaringType;
// System.err.println("adding type " + declaringType);
}
// System.err.println("false for " + field);
return false;
}
public IncrementalTypesHeuristic(TypeManager manager) {
super();
this.manager = manager;
}
public String toString() {
StringBuffer ret = new StringBuffer();
ret.append("types ");
ret.append(typesToCheck.toString());
if (!notBothEndsTypes.isEmpty()) {
ret.append(" not both ");
ret.append(notBothEndsTypes.toString());
}
return ret.toString();
}
public boolean validFromBothEnds(SparkField field) {
if (field instanceof SootField) {
SootField sootField = (SootField) field;
RefType declaringType = sootField.getDeclaringClass().getType();
for (RefType type : notBothEndsTypes) {
if (manager.castNeverFails(declaringType, type)) {
return false;
}
}
}
return true;
}
public boolean refineVirtualCall(CallSiteAndContext callSiteAndContext) {
// TODO make real heuristic
return true;
}
}
| 4,036
| 28.043165
| 100
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/InnerTypesIncrementalHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import soot.RefType;
import soot.Scene;
import soot.SootField;
import soot.jimple.spark.internal.TypeManager;
import soot.jimple.spark.ondemand.genericutil.Util;
import soot.jimple.spark.ondemand.pautil.SootUtil;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.SparkField;
public class InnerTypesIncrementalHeuristic implements FieldCheckHeuristic {
private final TypeManager manager;
private final Set<RefType> typesToCheck = new HashSet<RefType>();
private String newTypeOnQuery = null;
private final Set<RefType> bothEndsTypes = new HashSet<RefType>();
private final Set<RefType> notBothEndsTypes = new HashSet<RefType>();
private int numPasses = 0;
private final int passesInDirection;
private boolean allNotBothEnds = false;
public InnerTypesIncrementalHeuristic(TypeManager manager, int maxPasses) {
this.manager = manager;
this.passesInDirection = maxPasses / 2;
}
public boolean runNewPass() {
numPasses++;
if (numPasses == passesInDirection) {
return switchToNotBothEnds();
} else {
if (newTypeOnQuery != null) {
String topLevelTypeStr = Util.topLevelTypeString(newTypeOnQuery);
boolean added;
if (Scene.v().containsType(topLevelTypeStr)) {
RefType refType = Scene.v().getRefType(topLevelTypeStr);
added = typesToCheck.add(refType);
} else {
added = false;
}
newTypeOnQuery = null;
return added;
} else {
return switchToNotBothEnds();
}
}
}
private boolean switchToNotBothEnds() {
if (!allNotBothEnds) {
numPasses = 0;
allNotBothEnds = true;
newTypeOnQuery = null;
typesToCheck.clear();
return true;
} else {
return false;
}
}
public boolean validateMatchesForField(SparkField field) {
if (field instanceof ArrayElement) {
return true;
}
SootField sootField = (SootField) field;
RefType declaringType = sootField.getDeclaringClass().getType();
String declaringTypeStr = declaringType.toString();
String topLevel = Util.topLevelTypeString(declaringTypeStr);
RefType refType;
if (Scene.v().containsType(topLevel)) {
refType = Scene.v().getRefType(topLevel);
} else {
refType = null;
}
for (RefType checkedType : typesToCheck) {
if (manager.castNeverFails(checkedType, refType)) {
// System.err.println("validate " + declaringTypeStr);
return true;
}
}
if (newTypeOnQuery == null) {
newTypeOnQuery = declaringTypeStr;
}
return false;
}
public boolean validFromBothEnds(SparkField field) {
if (allNotBothEnds) {
return false;
}
if (field instanceof ArrayElement) {
return true;
}
SootField sootField = (SootField) field;
RefType declaringType = sootField.getDeclaringClass().getType();
if (bothEndsTypes.contains(declaringType)) {
return true;
} else if (notBothEndsTypes.contains(declaringType)) {
return false;
} else {
if (SootUtil.hasRecursiveField(declaringType.getSootClass())) {
notBothEndsTypes.add(declaringType);
return false;
} else {
bothEndsTypes.add(declaringType);
return true;
}
}
}
@Override
public String toString() {
return typesToCheck.toString();
}
}
| 4,263
| 27.61745
| 77
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/LazyContextSensitivePointsToSet.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 1997 - 2018 Raja Vallée-Rai and others
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import java.util.Set;
import soot.Local;
import soot.PointsToSet;
import soot.Type;
import soot.jimple.ClassConstant;
import soot.jimple.spark.sets.EqualsSupportingPointsToSet;
/**
* This is a <i>lazy</i> points-to set that is potentially context sensitive. It is created by the {@link DemandCSPointsTo}
* analysis. The idea is that the points-to set is usually context-insensitive. However, when compared with another points-to
* set and the intersection of these points-to sets is non-empty, <i>then</i> context information is computed for this
* points-to set and also for the other one, if applicable. Then the test is repeated. Once context information is computed
* it is stored in this wrapper object so that it does not have to be computed again. Objects of this type should only be
* compared to other {@link LazyContextSensitivePointsToSet} objects using the equals method. Checking for non-empty
* intersection with points-to sets of other types should be possible but it is recommended to consistently use
* {@link LazyContextSensitivePointsToSet} nevertheless.
*
* @author Eric Bodden
*/
public class LazyContextSensitivePointsToSet implements EqualsSupportingPointsToSet {
private EqualsSupportingPointsToSet delegate;
private final DemandCSPointsTo demandCSPointsTo;
private final Local local;
private boolean isContextSensitive;
public boolean isContextSensitive() {
return isContextSensitive;
}
public LazyContextSensitivePointsToSet(Local l, EqualsSupportingPointsToSet contextInsensitiveSet,
DemandCSPointsTo demandCSPointsTo) {
this.local = l;
this.delegate = contextInsensitiveSet;
this.demandCSPointsTo = demandCSPointsTo;
this.isContextSensitive = false;
}
public boolean hasNonEmptyIntersection(PointsToSet other) {
PointsToSet otherInner;
if (other instanceof LazyContextSensitivePointsToSet) {
otherInner = ((LazyContextSensitivePointsToSet) other).delegate;
} else {
otherInner = other;
}
if (delegate.hasNonEmptyIntersection(otherInner)) {
if (other instanceof LazyContextSensitivePointsToSet) {
((LazyContextSensitivePointsToSet) other).computeContextSensitiveInfo();
otherInner = ((LazyContextSensitivePointsToSet) other).delegate;
}
computeContextSensitiveInfo();
return delegate.hasNonEmptyIntersection(otherInner);
} else {
return false;
}
}
public void computeContextSensitiveInfo() {
if (!isContextSensitive) {
delegate = (EqualsSupportingPointsToSet) demandCSPointsTo.doReachingObjects(local);
isContextSensitive = true;
}
}
public boolean isEmpty() {
return delegate.isEmpty();
}
public Set<ClassConstant> possibleClassConstants() {
return delegate.possibleClassConstants();
}
public Set<String> possibleStringConstants() {
return delegate.possibleStringConstants();
}
public Set<Type> possibleTypes() {
return delegate.possibleTypes();
}
public boolean pointsToSetEquals(Object other) {
if (!(other instanceof LazyContextSensitivePointsToSet)) {
return false;
}
return ((LazyContextSensitivePointsToSet) other).delegate.equals(delegate);
}
public int pointsToSetHashCode() {
return delegate.pointsToSetHashCode();
}
public EqualsSupportingPointsToSet getDelegate() {
return delegate;
}
}
| 4,235
| 33.16129
| 125
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/ManualAndInnerHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.internal.TypeManager;
import soot.jimple.spark.pag.SparkField;
public class ManualAndInnerHeuristic implements FieldCheckHeuristic {
final ManualFieldCheckHeuristic manual = new ManualFieldCheckHeuristic();
final InnerTypesIncrementalHeuristic inner;
public ManualAndInnerHeuristic(TypeManager tm, int maxPasses) {
inner = new InnerTypesIncrementalHeuristic(tm, maxPasses);
}
public boolean runNewPass() {
return inner.runNewPass();
}
public boolean validateMatchesForField(SparkField field) {
return manual.validateMatchesForField(field) || inner.validateMatchesForField(field);
}
public boolean validFromBothEnds(SparkField field) {
return inner.validFromBothEnds(field);
}
}
| 1,582
| 30.66
| 89
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/ManualFieldCheckHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.SootField;
import soot.jimple.spark.pag.ArrayElement;
import soot.jimple.spark.pag.SparkField;
/**
* for hacking around with stuff
*
* @author manu
*
*/
public class ManualFieldCheckHeuristic implements FieldCheckHeuristic {
private boolean allNotBothEnds = false;
public boolean runNewPass() {
if (!allNotBothEnds) {
allNotBothEnds = true;
return true;
}
return false;
}
private static final String[] importantTypes = new String[] {
// "ca.mcgill.sable.util.ArrayList",
// "ca.mcgill.sable.util.ArrayList$ArrayIterator",
// "ca.mcgill.sable.util.AbstractList$AbstractListIterator",
/* "ca.mcgill.sable.util.VectorList", */ "java.util.Vector", "java.util.Hashtable", "java.util.Hashtable$Entry",
"java.util.Hashtable$Enumerator", "java.util.LinkedList", "java.util.LinkedList$Entry", "java.util.AbstractList$Itr",
// "ca.mcgill.sable.util.HashMap", "ca.mcgill.sable.util.LinkedList",
// "ca.mcgill.sable.util.LinkedList$LinkedListIterator",
// "ca.mcgill.sable.util.LinkedList$Node",
/* "ca.mcgill.sable.soot.TrustingMonotonicArraySet", */ "java.util.Vector$1", "java.util.ArrayList", };
private static final String[] notBothEndsTypes = new String[] { "java.util.Hashtable$Entry",
"java.util.LinkedList$Entry", /* "ca.mcgill.sable.util.LinkedList$Node" */ };
public boolean validateMatchesForField(SparkField field) {
if (field instanceof ArrayElement) {
return true;
}
SootField sootField = (SootField) field;
String fieldTypeStr = sootField.getDeclaringClass().getType().toString();
for (String typeName : importantTypes) {
if (fieldTypeStr.equals(typeName)) {
return true;
}
}
return false;
}
public boolean validFromBothEnds(SparkField field) {
if (allNotBothEnds) {
return false;
}
if (field instanceof SootField) {
SootField sootField = (SootField) field;
String fieldTypeStr = sootField.getDeclaringClass().getType().toString();
for (String typeName : notBothEndsTypes) {
if (fieldTypeStr.equals(typeName)) {
return false;
}
}
}
return true;
}
@Override
public String toString() {
return "Manual annotations";
}
}
| 3,127
| 31.247423
| 123
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/NothingHeuristic.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
import soot.jimple.spark.pag.SparkField;
public class NothingHeuristic implements FieldCheckHeuristic {
public boolean runNewPass() {
return false;
}
public boolean validateMatchesForField(SparkField field) {
return false;
}
public boolean validFromBothEnds(SparkField field) {
return false;
}
}
| 1,161
| 26.666667
| 71
|
java
|
soot
|
soot-master/src/main/java/soot/jimple/spark/ondemand/TerminateEarlyException.java
|
package soot.jimple.spark.ondemand;
/*-
* #%L
* Soot - a J*va Optimization Framework
* %%
* Copyright (C) 2007 Manu Sridharan
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
/**
* @author manu_s
*
* TODO To change the template for this generated type comment go to Window - Preferences - Java - Code Generation -
* Code and Comments
*/
@SuppressWarnings("serial")
public class TerminateEarlyException extends RuntimeException {
}
| 1,114
| 30.857143
| 124
|
java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.